Codebase list libdbix-class-perl / upstream/0.08120
[svn-upgrade] Integrating new upstream version, libdbix-class-perl (0.08120) Jonathan Yu 14 years ago
86 changed file(s) with 1052 addition(s) and 508 deletion(s). Raw diff Collapse all Expand all
00 Revision history for DBIx::Class
1
2 0.08120 2010-02-24 08:58:00 (UTC)
3 - Make sure possibly overwritten deployment_statements methods in
4 schemas get called on $schema->deploy
5 - Fix count() with group_by aliased-function resultsets
6 - with_deferred_fk_checks() Oracle support
7 - Massive refactor and cleanup of primary key handling
8 - Fixed regression losing custom result_class (really this time)
9 (RT#54697)
10 - Fixed regression in DBIC SQLT::Parser failing with a classname
11 (as opposed to a schema object)
12 - Changes to Storage::DBI::Oracle to accomodate changes in latest
13 SQL::Translator (quote handling)
14 - Make sure deployment_statements is per-storage overridable
15 - Fix dbicadmin's (lack of) POD
116
217 0.08119 2010-02-15 09:36:00 (UTC)
318 - Add $rs->is_ordered to test for existing order_by on a resultset
2020 lib/DBIx/Class.pm
2121 lib/DBIx/Class/AccessorGroup.pm
2222 lib/DBIx/Class/Admin.pm
23 lib/DBIx/Class/Admin/Descriptive.pm
2324 lib/DBIx/Class/Admin/Types.pm
25 lib/DBIx/Class/Admin/Usage.pm
2426 lib/DBIx/Class/CDBICompat.pm
2527 lib/DBIx/Class/CDBICompat/AbstractSearch.pm
2628 lib/DBIx/Class/CDBICompat/AccessorMapping.pm
113115 lib/DBIx/Class/Storage/DBI.pm
114116 lib/DBIx/Class/Storage/DBI/ADO.pm
115117 lib/DBIx/Class/Storage/DBI/ADO/Microsoft_SQL_Server.pm
116 lib/DBIx/Class/Storage/DBI/AmbiguousGlob.pm
117118 lib/DBIx/Class/Storage/DBI/AutoCast.pm
118119 lib/DBIx/Class/Storage/DBI/Cursor.pm
119120 lib/DBIx/Class/Storage/DBI/DB2.pm
310311 t/cdbi/testlib/Thing.pm
311312 t/count/count_rs.t
312313 t/count/distinct.t
314 t/count/group_by_func.t
313315 t/count/grouped_pager.t
314316 t/count/in_subquery.t
315317 t/count/joined.t
55 DBD::SQLite: 1.25
66 File::Temp: 0.22
77 Test::Builder: 0.33
8 Test::Deep: 0
98 Test::Exception: 0
109 Test::More: 0.92
1110 Test::Warn: 0.21
2221 directory:
2322 - examples
2423 - inc
24 - lib/DBIx/Class/Admin
25 - lib/DBIx/Class/CDBICompat
2526 - lib/DBIx/Class/PK/Auto
2627 - lib/DBIx/Class/SQLAHacks
2728 - t
2829 package:
29 - DBIx::Class::Storage::DBI::AmbiguousGlob
3030 - DBIx::Class::SQLAHacks
3131 - DBIx::Class::Storage::DBIHacks
3232 requires:
3434 Class::Accessor::Grouped: 0.09002
3535 Class::C3::Componentised: 1.0005
3636 Class::Inspector: 1.24
37 Context::Preserve: 0.01
3738 DBI: 1.609
3839 Data::Dumper::Concise: 1.000
3940 Data::Page: 2.00
40 List::Util: 0
4141 MRO::Compat: 0.09
4242 Module::Find: 0.06
4343 Path::Class: 0.18
4444 SQL::Abstract: 1.61
4545 SQL::Abstract::Limit: 0.13
46 Scalar::Util: 0
4746 Scope::Guard: 0.03
48 Storable: 0
4947 Sub::Name: 0.04
5048 perl: 5.8.1
5149 resources:
5351 MailingList: http://lists.scsys.co.uk/cgi-bin/mailman/listinfo/dbix-class
5452 license: http://dev.perl.org/licenses/
5553 repository: http://dev.catalyst.perl.org/repos/bast/DBIx-Class/
56 version: 0.08119
54 version: 0.08120
66
77 use FindBin;
88 use lib "$FindBin::Bin/lib";
9
10 # adjust ENV for $AUTHOR system() calls
11 use Config;
12 $ENV{PERL5LIB} = join ($Config{path_sep}, @INC);
13
914
1015 ###
1116 ### DO NOT ADD OPTIONAL DEPENDENCIES HERE, EVEN AS recommends()
2429 my $test_requires = {
2530 'File::Temp' => '0.22',
2631 'Test::Builder' => '0.33',
27 'Test::Deep' => '0',
2832 'Test::Exception' => '0',
2933 'Test::More' => '0.92',
3034 'Test::Warn' => '0.21',
3135 };
3236
3337 my $runtime_requires = {
34 # Core
35 'List::Util' => '0',
36 'Scalar::Util' => '0',
37 'Storable' => '0',
38
39 # Dependencies
4038 'Carp::Clan' => '6.0',
4139 'Class::Accessor::Grouped' => '0.09002',
4240 'Class::C3::Componentised' => '1.0005',
4644 'MRO::Compat' => '0.09',
4745 'Module::Find' => '0.06',
4846 'Path::Class' => '0.18',
49 'Scope::Guard' => '0.03',
5047 'SQL::Abstract' => '1.61',
5148 'SQL::Abstract::Limit' => '0.13',
5249 'Sub::Name' => '0.04',
5350 'Data::Dumper::Concise' => '1.000',
51 'Scope::Guard' => '0.03',
52 'Context::Preserve' => '0.01',
5453 };
5554
5655 # this is so we can order requires alphabetically
6160 test_requires => { %$test_requires },
6261 };
6362
64 # re-build README and require extra modules for testing if we're in a checkout
63
64 # require extra modules for testing if we're in a checkout
6565 if ($Module::Install::AUTHOR) {
66
67 print "Regenerating README\n";
68 system('pod2text lib/DBIx/Class.pm > README');
69
70 if (-f 'MANIFEST') {
71 print "Removing MANIFEST\n";
72 unlink 'MANIFEST';
73 }
74
75 print "Regenerating Optional/Dependencies.pod\n";
76 require DBIx::Class::Optional::Dependencies;
77 DBIx::Class::Optional::Dependencies->_gen_pod;
78
79 # FIXME Disabled due to unsolved issues, ask theorbtwo
80 # require Module::Install::Pod::Inherit;
81 # PodInherit();
82
8366 warn <<'EOW';
8467 ******************************************************************************
8568 ******************************************************************************
9174
9275 EOW
9376
77 require DBIx::Class::Optional::Dependencies;
9478 $reqs->{test_requires} = {
9579 %{$reqs->{test_requires}},
9680 %{DBIx::Class::Optional::Dependencies->_all_optional_requirements},
118102 $rtype->($mod, $ver);
119103 }
120104
105 auto_install();
106
107 # re-create various autogenerated documentation bits
108 if ($Module::Install::AUTHOR) {
109
110 print "Regenerating README\n";
111 system('pod2text lib/DBIx/Class.pm > README');
112
113 if (-f 'MANIFEST') {
114 print "Removing MANIFEST\n";
115 unlink 'MANIFEST';
116 }
117
118 print "Regenerating Optional/Dependencies.pod\n";
119 require DBIx::Class::Optional::Dependencies;
120 DBIx::Class::Optional::Dependencies->_gen_pod;
121
122 # FIXME Disabled due to unsolved issues, ask theorbtwo
123 # require Module::Install::Pod::Inherit;
124 # PodInherit();
125 }
126
127 tests_recursive (qw|
128 t
129 |);
130
121131 install_script (qw|
122132 script/dbicadmin
123133 |);
124134
125 tests_recursive (qw|
126 t
127 |);
135
136 ### Mangle makefile - read the comments for more info
137 #
138 postamble <<"EOP";
139
140 # This will add an extra dep-spec for the distdir target,
141 # which `make` will fold together in a first-come first-serve
142 # fashion. What we do here is essentially adding extra
143 # commands to execute once the distdir is assembled (via
144 # create_distdir), but before control is returned to a higher
145 # calling rule.
146 distdir : dbicadmin_pod_inject
147
148 # The pod self-injection code is in fact a hidden option in
149 # dbicadmin itself
150 dbicadmin_pod_inject :
151 \tcd \$(DISTVNAME) && \$(ABSPERL) -Ilib script/dbicadmin --selfinject-pod
152
153 # Regenerate manifest before running create_distdir.
154 create_distdir : manifest
155
156 EOP
157
158
128159
129160 resources 'IRC' => 'irc://irc.perl.org/#dbix-class';
130161 resources 'license' => 'http://dev.perl.org/licenses/';
133164
134165 # Deprecated/internal modules need no exposure
135166 no_index directory => $_ for (qw|
167 lib/DBIx/Class/Admin
136168 lib/DBIx/Class/SQLAHacks
137169 lib/DBIx/Class/PK/Auto
170 lib/DBIx/Class/CDBICompat
138171 |);
139172 no_index package => $_ for (qw/
140 DBIx::Class::Storage::DBI::AmbiguousGlob
141173 DBIx::Class::SQLAHacks DBIx::Class::Storage::DBIHacks
142174 /);
143175
144
145 auto_install();
146176
147177 WriteAll();
148178
33 GETTING HELP/SUPPORT
44 The community can be found via:
55
6 Mailing list: http://lists.scsys.co.uk/mailman/listinfo/dbix-class/
7
8 SVN: http://dev.catalyst.perl.org/repos/bast/DBIx-Class/
9
10 SVNWeb: http://dev.catalyst.perl.org/svnweb/bast/browse/DBIx-Class/
11
12 IRC: irc.perl.org#dbix-class
6 * IRC: <irc.perl.org#dbix-class (click for instant chatroom login) >
7
8 * Mailing list: <http://lists.scsys.co.uk/mailman/listinfo/dbix-class>
9
10 * RT Bug Tracker:
11 <https://rt.cpan.org/Dist/Display.html?Queue=DBIx-Class>
12
13 * SVNWeb:
14 <http://dev.catalyst.perl.org/svnweb/bast/browse/DBIx-Class/0.08>
15
16 * SVN: <http://dev.catalyst.perl.org/repos/bast/DBIx-Class/0.08>
1317
1418 SYNOPSIS
1519 Create a schema class called MyDB/Schema.pm:
247251
248252 norbi: Norbert Buchmuller <norbi@nix.hu>
249253
254 nuba: Nuba Princigalli <nuba@cpan.org>
255
250256 Numa: Dan Sully <daniel@cpan.org>
251257
252258 ovid: Curtis "Ovid" Poe <ovid@cpan.org>
314320 zamolxes: Bogdan Lucaciu <bogdan@wiz.ro>
315321
316322 COPYRIGHT
317 Copyright (c) 2005 - 2009 the DBIx::Class "AUTHOR" and "CONTRIBUTORS" as
323 Copyright (c) 2005 - 2010 the DBIx::Class "AUTHOR" and "CONTRIBUTORS" as
318324 listed above.
319325
320326 LICENSE
0 package # hide from PAUSE
1 DBIx::Class::Admin::Descriptive;
2
3 use DBIx::Class::Admin::Usage;
4
5 use base 'Getopt::Long::Descriptive';
6
7 sub usage_class { 'DBIx::Class::Admin::Usage'; }
8
9 1;
0 package # hide from PAUSE
1 DBIx::Class::Admin::Usage;
2
3
4 use base 'Getopt::Long::Descriptive::Usage';
5
6 use base 'Class::Accessor::Grouped';
7
8 use Class::C3;
9
10 __PACKAGE__->mk_group_accessors('simple', 'synopsis', 'short_description');
11
12 sub prog_name {
13 Getopt::Long::Descriptive::prog_name();
14 }
15
16 sub set_simple {
17 my ($self,$field, $value) = @_;
18 my $prog_name = prog_name();
19 $value =~ s/%c/$prog_name/g;
20 $self->next::method($field, $value);
21 }
22
23
24
25 # This returns the usage formated as a pod document
26 sub pod {
27 my ($self) = @_;
28 return join qq{\n}, $self->pod_leader_text, $self->pod_option_text, $self->pod_authorlic_text;
29 }
30
31 sub pod_leader_text {
32 my ($self) = @_;
33
34 return qq{=head1 NAME\n\n}.prog_name()." - ".$self->short_description().qq{\n\n}.
35 qq{=head1 SYNOPSIS\n\n}.$self->leader_text().qq{\n}.$self->synopsis().qq{\n\n};
36
37 }
38
39 sub pod_authorlic_text {
40
41 return join ("\n\n",
42 '=head1 AUTHORS',
43 'See L<DBIx::Class/CONTRIBUTORS>',
44 '=head1 LICENSE',
45 'You may distribute this code under the same terms as Perl itself',
46 '=cut',
47 );
48 }
49
50
51 sub pod_option_text {
52 my ($self) = @_;
53 my @options = @{ $self->{options} || [] };
54 my $string = q{};
55 return $string unless @options;
56
57 $string .= "=head1 OPTIONS\n\n=over\n\n";
58
59 foreach my $opt (@options) {
60 my $spec = $opt->{spec};
61 my $desc = $opt->{desc};
62 if ($desc eq 'spacer') {
63 $string .= "=back\n\n=head2 $spec\n\n=cut\n\n=over\n\n";
64 next;
65 }
66
67 $spec = Getopt::Long::Descriptive->_strip_assignment($spec);
68 $string .= "=item " . join " or ", map { length > 1 ? "B<--$_>" : "B<-$_>" }
69 split /\|/, $spec;
70 $string .= "\n\n$desc\n\n=cut\n\n";
71
72 }
73 $string .= "=back\n\n";
74 return $string;
75 }
76
77 1;
99
1010 =head1 SYNOPSIS
1111
12 See DBIx::Class::CDBICompat for directions for use.
12 See DBIx::Class::CDBICompat for usage directions.
1313
1414 =head1 DESCRIPTION
1515
1010
1111 =head1 SYNOPSIS
1212
13 See DBIx::Class::CDBICompat for directions for use.
13 See DBIx::Class::CDBICompat for usage directions.
1414
1515 =head1 DESCRIPTION
1616
1111
1212 =head1 SYNOPSIS
1313
14 See DBIx::Class::CDBICompat for directions for use.
14 See DBIx::Class::CDBICompat for usage directions.
1515
1616 =head1 DESCRIPTION
1717
99
1010 =head1 SYNOPSIS
1111
12 See DBIx::Class::CDBICompat for directions for use.
12 See DBIx::Class::CDBICompat for usage directions.
1313
1414 =head1 DESCRIPTION
1515
9090
9191 =head2 Choosing Features
9292
93 In fact, this class is just a receipe containing all the features emulated.
93 In fact, this class is just a recipe containing all the features emulated.
9494 If you like, you can choose which features to emulate by building your
9595 own class and loading it like this:
9696
144144
145145 =item Relationships
146146
147 Relationships between tables (has_a, has_many...) must be delcared after all tables in the relationship have been declared. Thus the usual CDBI idiom of declaring columns and relationships for each class together will not work. They must instead be done like so:
147 Relationships between tables (has_a, has_many...) must be declared after all tables in the relationship have been declared. Thus the usual CDBI idiom of declaring columns and relationships for each class together will not work. They must instead be done like so:
148148
149149 package Foo;
150150 use base qw(Class::DBI);
175175
176176 =head2 _file_column_callback ($file,$ret,$target)
177177
178 method made to be overridden for callback purposes.
178 Method made to be overridden for callback purposes.
179179
180180 =cut
181181
2525
2626 It can be used, for example, to automatically convert to and from
2727 L<DateTime> objects for your date and time fields. There's a
28 conveniece component to actually do that though, try
28 convenience component to actually do that though, try
2929 L<DBIx::Class::InflateColumn::DateTime>.
3030
3131 It will handle all types of references except scalar references. It
113113
114114 Fetch a column value in its inflated state. This is directly
115115 analogous to L<DBIx::Class::Row/get_column> in that it only fetches a
116 column already retreived from the database, and then inflates it.
116 column already retrieved from the database, and then inflates it.
117117 Throws an exception if the column requested is not an inflated column.
118118
119119 =cut
105105
106106 =head2 Experimental
107107
108 These components are under development, there interfaces may
108 These components are under development, their interfaces may
109109 change, they may not work, etc. So, use them if you want, but
110110 be warned.
111111
140140 );
141141
142142 ... and you'll get back a perfect L<DBIx::Class::ResultSet> (except, of course,
143 that you cannot modify the rows it contains, ie. cannot call L</update>,
143 that you cannot modify the rows it contains, e.g. cannot call L</update>,
144144 L</delete>, ... on it).
145145
146146 Note that you cannot have bind parameters unless is_virtual is set to true.
200200 # SELECT name name, LENGTH( name )
201201 # FROM artist
202202
203 Note that the C<as> attribute B<has absolutely nothing to do> with the sql
203 Note that the C<as> attribute B<has absolutely nothing to do> with the SQL
204204 syntax C< SELECT foo AS bar > (see the documentation in
205205 L<DBIx::Class::ResultSet/ATTRIBUTES>). You can control the C<AS> part of the
206206 generated SQL via the C<-as> field attribute as follows:
328328 artist_id => { 'IN' => $inside_rs->get_column('id')->as_query },
329329 });
330330
331 The usual operators ( =, !=, IN, NOT IN, etc) are supported.
331 The usual operators ( =, !=, IN, NOT IN, etc.) are supported.
332332
333333 B<NOTE>: You have to explicitly use '=' when doing an equality comparison.
334334 The following will B<not> work:
410410
411411 Using SQL functions on the left hand side of a comparison is generally not a
412412 good idea since it requires a scan of the entire table. (Unless your RDBMS
413 supports indexes on expressions - including return values of functions -, and
413 supports indexes on expressions - including return values of functions - and
414414 you create an index on the return value of the function in question.) However,
415415 it can be accomplished with C<DBIx::Class> when necessary.
416416
770770
771771 package My::App::Schema;
772772
773 use base DBIx::Class::Schema;
773 use base 'DBIx::Class::Schema';
774774
775775 # load subclassed classes from My::App::Schema::Result/ResultSet
776776 __PACKAGE__->load_namespaces;
790790
791791 use strict;
792792 use warnings;
793 use base My::Shared::Model::Result::Baz;
793 use base 'My::Shared::Model::Result::Baz';
794794
795795 # WARNING: Make sure you call table() again in your subclass,
796796 # otherwise DBIx::Class::ResultSourceProxy::Table will not be called
16201620 Add the L<DBIx::Class::Schema::Versioned> schema component to your
16211621 Schema class. This will add a new table to your database called
16221622 C<dbix_class_schema_vesion> which will keep track of which version is installed
1623 and warn if the user trys to run a newer schema version than the
1623 and warn if the user tries to run a newer schema version than the
16241624 database thinks it has.
16251625
1626 Alternatively, you can send the conversion sql scripts to your
1626 Alternatively, you can send the conversion SQL scripts to your
16271627 customers as above.
16281628
16291629 =head2 Setting quoting for the generated SQL
17051705 }
17061706 );
17071707
1708 In conditions (eg. C<\%cond> in the L<DBIx::Class::ResultSet/search> family of
1708 In conditions (e.g. C<\%cond> in the L<DBIx::Class::ResultSet/search> family of
17091709 methods) you cannot directly use array references (since this is interpreted as
17101710 a list of values to be C<OR>ed), but you can use the following syntax to force
17111711 passing them as bind values:
5757 title TEXT NOT NULL
5858 );
5959
60 and create the sqlite database file:
60 and create the SQLite database file:
6161
6262 sqlite3 example.db < example.sql
6363
197197 use strict;
198198
199199 my $schema = MyDatabase::Main->connect('dbi:SQLite:db/example.db');
200 # for other DSNs, e.g. MySql, see the perldoc for the relevant dbd
200 # for other DSNs, e.g. MySQL, see the perldoc for the relevant dbd
201201 # driver, e.g perldoc L<DBD::mysql>.
202202
203203 get_tracks_by_cd('Bad');
344344
345345 =head1 Notes
346346
347 A reference implentation of the database and scripts in this example
347 A reference implementation of the database and scripts in this example
348348 are available in the main distribution for DBIx::Class under the
349349 directory F<t/examples/Schema>.
350350
224224 Note that L<DBIx::Class::Schema> does not cache connections for you. If you use
225225 multiple connections, you need to do this manually.
226226
227 To execute some sql statements on every connect you can add them as an option in
227 To execute some SQL statements on every connect you can add them as an option in
228228 a special fifth argument to connect:
229229
230230 my $another_schema = My::Schema->connect(
112112
113113 =head2 Whole related objects
114114
115 To fetch entire related objects, eg CDs and all Track data, use the
115 To fetch entire related objects, e.g. CDs and all Track data, use the
116116 'prefetch' attribute:
117117
118118 $schema->resultset('CD')->search(
128128 SELECT cd.ID, cd.Title, cd.Year, tracks.id, tracks.Name, tracks.Artist FROM CD JOIN Tracks ON CD.ID = tracks.CDID WHERE cd.Title = 'Funky CD' ORDER BY 'tracks.id';
129129
130130 The syntax of 'prefetch' is the same as 'join' and implies the
131 joining, so no need to use both together.
131 joining, so there is no need to use both together.
132132
133133 =head2 Subset of related fields
134134
231231
232232 To perform joins using relations of the tables you are joining to, use
233233 a hashref to indicate the join depth. This can theoretically go as
234 deep as you like (warning, contrived examples!):
234 deep as you like (warning: contrived examples!):
235235
236236 join => { room => { table => 'leg' } }
237237
1616 Methods should be documented in the files which also contain the code
1717 for the method, or that file should be hidden from PAUSE completely,
1818 in which case the methods are documented in the file which loads
19 it. Methods may also be documented and refered to in files
19 it. Methods may also be documented and referred to in files
2020 representing the major objects or components on which they can be
2121 called.
2222
2323 For example, L<DBIx::Class::Relationship> documents the methods
2424 actually coded in the helper relationship classes like
2525 DBIx::Class::Relationship::BelongsTo. The BelongsTo file itself is
26 hidden from pause as it has no documentation. The accessors created by
26 hidden from PAUSE as it has no documentation. The accessors created by
2727 relationships should be mentioned in L<DBIx::Class::Row>, the major
2828 object that they will be called on.
2929
144144 =item *
145145
146146 The argument list is followed by some examples of how to use the
147 method, using it's various types of arguments.
147 method, using its various types of arguments.
148148
149149 The examples can also include ways to use the results if
150 applicable. For instance if the documentation is for a relationship
150 applicable. For instance, if the documentation is for a relationship
151151 type, the examples can include how to call the resulting relation
152152 accessor, how to use the relation name in a search and so on.
153153
2222
2323 $schema->storage->debugfh(IO::File->new('/tmp/trace.out', 'w');
2424
25 Alternatively you can do this with the environment variable too:-
25 Alternatively you can do this with the environment variable, too:-
2626
2727 export DBIC_TRACE="1=/tmp/trace.out"
2828
5050
5151 There's likely a syntax error in the table class referred to elsewhere
5252 in this error message. In particular make sure that the package
53 declaration is correct, so for a schema C< MySchema > you need to
54 specify a fully qualified namespace: C< package MySchema::MyTable; >
55 for example.
53 declaration is correct. For example, for a schema C< MySchema >
54 you need to specify a fully qualified namespace: C< package MySchema::MyTable; >.
5655
5756 =head2 syntax error at or near "<something>" ...
5857
102101 =head2 column "foo DESC" does not exist ...
103102
104103 This can happen if you are still using the obsolete order hack, and also
105 happen to turn on sql-quoting.
104 happen to turn on SQL-quoting.
106105
107106 $rs->search( {}, { order_by => [ 'name DESC' ] } );
108107
132131 Fedora 8 - perl-5.8.8-41.fc8
133132 RHEL5 - perl-5.8.8-15.el5_2.1
134133
135 The issue is due to perl doing an exhaustive search of blessed objects
134 This issue is due to perl doing an exhaustive search of blessed objects
136135 under certain circumstances. The problem shows up as performance
137 degredation exponential to the number of L<DBIx::Class> row objects in
138 memory, so can be unoticeable with certain data sets, but with huge
136 degradation exponential to the number of L<DBIx::Class> row objects in
137 memory, so can be unnoticeable with certain data sets, but with huge
139138 performance impacts on other datasets.
140139
141 A pair of tests for susceptability to the issue, and performance effects
140 A pair of tests for susceptibility to the issue and performance effects
142141 of the bless/overload problem can be found in the L<DBIx::Class> test
143 suite in the file C<t/99rh_perl_perf_bug.t>
142 suite, in the C<t/99rh_perl_perf_bug.t> file.
144143
145144 Further information on this issue can be found in
146145 L<https://bugzilla.redhat.com/show_bug.cgi?id=379791>,
149148
150149 =head2 Excessive Memory Allocation with TEXT/BLOB/etc. Columns and Large LongReadLen
151150
152 It has been observed, using L<DBD::ODBC>, that a creating a L<DBIx::Class::Row>
151 It has been observed, using L<DBD::ODBC>, that creating a L<DBIx::Class::Row>
153152 object which includes a column of data type TEXT/BLOB/etc. will allocate
154153 LongReadLen bytes. This allocation does not leak, but if LongReadLen
155154 is large in size, and many such row objects are created, e.g. as the
3232 req => {
3333 %$moose_basic,
3434 'namespace::clean' => '0.11',
35 'Hash::Merge' => '0.11',
35 'Hash::Merge' => '0.12',
3636 },
3737 pod => {
3838 title => 'Storage::Replicated',
6565
6666 deploy => {
6767 req => {
68 'SQL::Translator' => '0.11002',
68 'SQL::Translator' => '0.11005',
6969 },
7070 pod => {
7171 title => 'Storage::DBI::deploy()',
7373 },
7474 },
7575
76 author => {
77 req => {
78 'Test::Pod' => '1.26',
76
77 test_pod => {
78 req => {
79 'Test::Pod' => '1.41',
80 },
81 },
82
83 test_podcoverage => {
84 req => {
7985 'Test::Pod::Coverage' => '1.08',
8086 'Pod::Coverage' => '0.20',
87 },
88 },
89
90 test_notabs => {
91 req => {
8192 #'Test::NoTabs' => '0.9',
93 },
94 },
95
96 test_eol => {
97 req => {
8298 #'Test::EOL' => '0.6',
8399 },
84100 },
85101
86 core => {
87 req => {
88 # t/52cycle.t
102 test_cycle => {
103 req => {
89104 'Test::Memory::Cycle' => '0',
90105 'Devel::Cycle' => '1.10',
91
106 },
107 },
108
109 test_dtrelated => {
110 req => {
92111 # t/36datetime.t
93112 # t/60core.t
94113 'DateTime::Format::SQLite' => '0',
95114
96115 # t/96_is_deteministic_value.t
97116 'DateTime::Format::Strptime'=> '0',
117
118 # t/inflate/datetime_mysql.t
119 # (doesn't need Mysql itself)
120 'DateTime::Format::MySQL' => '0',
121
122 # t/inflate/datetime_pg.t
123 # (doesn't need PG itself)
124 'DateTime::Format::Pg' => '0',
98125 },
99126 },
100127
115142 ? (
116143 'Sys::SigAction' => '0',
117144 'DBD::Pg' => '2.009002',
118 'DateTime::Format::Pg' => '0',
119145 ) : ()
120146 },
121147 },
124150 req => {
125151 $ENV{DBICTEST_MYSQL_DSN}
126152 ? (
127 'DateTime::Format::MySQL' => '0',
128153 'DBD::mysql' => '0',
129154 ) : ()
130155 },
150175
151176 rdbms_asa => {
152177 req => {
153 grep $_, @ENV{qw/DBICTEST_SYBASE_ASA_DSN DBICTEST_SYBASE_ASA_ODBC_DSN/}
178 (scalar grep $_, @ENV{qw/DBICTEST_SYBASE_ASA_DSN DBICTEST_SYBASE_ASA_ODBC_DSN/})
154179 ? (
155180 'DateTime::Format::Strptime' => 0,
156181 ) : ()
157182 },
158183 },
184
185 rdbms_db2 => {
186 req => {
187 $ENV{DBICTEST_DB2_DSN}
188 ? (
189 'DBD::DB2' => 0,
190 ) : ()
191 },
192 },
193
159194 };
160195
161196
1919
2020 ...
2121
22 configure_requires 'DBIx::Class' => '0.08119';
22 configure_requires 'DBIx::Class' => '0.08120';
2323
2424 require DBIx::Class::Optional::Dependencies;
2525
108108
109109 =over
110110
111 =item * SQL::Translator >= 0.11002
111 =item * SQL::Translator >= 0.11005
112112
113113 =back
114114
120120
121121 =over
122122
123 =item * Hash::Merge >= 0.11
123 =item * Hash::Merge >= 0.12
124124
125125 =item * Moose >= 0.98
126126
126126 This method specifies a value of L</position_column> which B<would
127127 never be assigned to a row> during normal operation. When
128128 a row is moved, its position is set to this value temporarily, so
129 that any unique constrainst can not be violated. This value defaults
129 that any unique constraints can not be violated. This value defaults
130130 to 0, which should work for all cases except when your positions do
131131 indeed start from 0.
132132
796796
797797 if (grep { $_ eq $position_column } ( map { @$_ } (values %{{ $rsrc->unique_constraints }} ) ) ) {
798798
799 my @pcols = $rsrc->primary_columns;
799 my @pcols = $rsrc->_pri_cols;
800800 my $cursor = $shift_rs->search ({}, { order_by => { "-$ord", $position_column }, columns => \@pcols } )->cursor;
801801 my $rs = $self->result_source->resultset;
802802
803 while (my @pks = $cursor->next ) {
804
803 my @all_pks = $cursor->all;
804 while (my $pks = shift @all_pks) {
805805 my $cond;
806806 for my $i (0.. $#pcols) {
807 $cond->{$pcols[$i]} = $pks[$i];
807 $cond->{$pcols[$i]} = $pks->[$i];
808808 }
809809
810810 $rs->search($cond)->update ({ $position_column => \ "$position_column $op 1" } );
920920 triggering any of the positioning integrity code).
921921
922922 Some day you might get confronted by datasets that have ambiguous
923 positioning data (i.e. duplicate position values within the same group,
923 positioning data (e.g. duplicate position values within the same group,
924924 in a table without unique constraints). When manually fixing such data
925925 keep in mind that you can not invoke L<DBIx::Class::Row/update> like
926926 you normally would, as it will get confused by the wrong data before
955955
956956 =head2 Multiple Moves
957957
958 Be careful when issueing move_* methods to multiple objects. If
958 Be careful when issuing move_* methods to multiple objects. If
959959 you've pre-loaded the objects then when you move one of the objects
960960 the position of the other object will not reflect their new value
961961 until you reload them from the database - see
962962 L<DBIx::Class::Row/discard_changes>.
963963
964964 There are times when you will want to move objects as groups, such
965 as changeing the parent of several objects at once - this directly
965 as changing the parent of several objects at once - this directly
966966 conflicts with this problem. One solution is for us to write a
967967 ResultSet class that supports a parent() method, for example. Another
968968 solution is to somehow automagically modify the objects that exist
3030 my ($self) = @_;
3131 $self->throw_exception( "Can't call id() as a class method" )
3232 unless ref $self;
33 my @pk = $self->_ident_values;
34 return (wantarray ? @pk : $pk[0]);
33 my @id_vals = $self->_ident_values;
34 return (wantarray ? @id_vals : $id_vals[0]);
3535 }
3636
3737 sub _ident_values {
3838 my ($self) = @_;
39 return (map { $self->{_column_data}{$_} } $self->primary_columns);
39 my (@ids, @missing);
40
41 for ($self->_pri_cols) {
42 push @ids, $self->get_column($_);
43 push @missing, $_ if (! defined $ids[-1] and ! $self->has_column_loaded ($_) );
44 }
45
46 if (@missing && $self->in_storage) {
47 $self->throw_exception (
48 'Unable to uniquely identify row object with missing PK columns: '
49 . join (', ', @missing )
50 );
51 }
52
53 return @ids;
4054 }
4155
4256 =head2 ID
6377 $self->throw_exception( "Can't call ID() as a class method" )
6478 unless ref $self;
6579 return undef unless $self->in_storage;
66 return $self->_create_ID(map { $_ => $self->{_column_data}{$_} }
67 $self->primary_columns);
80 return $self->_create_ID(%{$self->ident_condition});
6881 }
6982
7083 sub _create_ID {
71 my ($self,%vals) = @_;
84 my ($self, %vals) = @_;
7285 return undef unless 0 == grep { !defined } values %vals;
7386 return join '|', ref $self || $self, $self->result_source->name,
7487 map { $_ . '=' . $vals{$_} } sort keys %vals;
8699
87100 sub ident_condition {
88101 my ($self, $alias) = @_;
89 my %cond;
102
103 my @pks = $self->_pri_cols;
104 my @vals = $self->_ident_values;
105
106 my (%cond, @undef);
90107 my $prefix = defined $alias ? $alias.'.' : '';
91 $cond{$prefix.$_} = $self->get_column($_) for $self->primary_columns;
108 for my $col (@pks) {
109 if (! defined ($cond{$prefix.$col} = shift @vals) ) {
110 push @undef, $col;
111 }
112 }
113
114 if (@undef && $self->in_storage) {
115 $self->throw_exception (
116 'Unable to construct row object identity condition due to NULL PK columns: '
117 . join (', ', @undef)
118 );
119 }
120
92121 return \%cond;
93122 }
94123
33 use strict;
44 use warnings;
55 use Sub::Name ();
6 use Class::Inspector ();
76
87 our %_pod_inherit_config =
98 (
199199 my $query = ((@_ > 1) ? {@_} : shift);
200200
201201 my $source = $self->result_source;
202 my $cond = $source->_resolve_condition(
203 $rel_info->{cond}, $rel, $self
204 );
202
203 # condition resolution may fail if an incomplete master-object prefetch
204 # is encountered
205 my $cond =
206 eval { $source->_resolve_condition( $rel_info->{cond}, $rel, $self ) }
207 ||
208 $DBIx::Class::ResultSource::UNRESOLVABLE_CONDITION
209 ;
210
205211 if ($cond eq $DBIx::Class::ResultSource::UNRESOLVABLE_CONDITION) {
206212 my $reverse = $source->reverse_relationship_info($rel);
207213 foreach my $rev_rel (keys %$reverse) {
259265 ( $objects_rs ) = $rs->search_related_rs('relname', $cond, $attrs);
260266
261267 This method works exactly the same as search_related, except that
262 it guarantees a restultset, even in list context.
268 it guarantees a resultset, even in list context.
263269
264270 =cut
265271
391397 call set_from_related on the book.
392398
393399 This is called internally when you pass existing objects as values to
394 L<DBIx::Class::ResultSet/create>, or pass an object to a belongs_to acessor.
400 L<DBIx::Class::ResultSet/create>, or pass an object to a belongs_to accessor.
395401
396402 The columns are only set in the local copy of the object, call L</update> to
397403 set them in the storage.
2323 # no join condition or just a column name
2424 if (!ref $cond) {
2525 $class->ensure_class_loaded($f_class);
26 my %f_primaries = map { $_ => 1 } eval { $f_class->primary_columns };
26 my %f_primaries = map { $_ => 1 } eval { $f_class->_pri_cols };
2727 $class->throw_exception(
28 "Can't infer join condition for ${rel} on ${class}; ".
29 "unable to load ${f_class}: $@"
28 "Can't infer join condition for ${rel} on ${class}: $@"
3029 ) if $@;
3130
3231 my ($pri, $too_many) = keys %f_primaries;
33 $class->throw_exception(
34 "Can't infer join condition for ${rel} on ${class}; ".
35 "${f_class} has no primary keys"
36 ) unless defined $pri;
3732 $class->throw_exception(
3833 "Can't infer join condition for ${rel} on ${class}; ".
3934 "${f_class} has multiple primary keys"
1313
1414 unless (ref $cond) {
1515 $class->ensure_class_loaded($f_class);
16 my ($pri, $too_many) = $class->primary_columns;
16 my ($pri, $too_many) = eval { $class->_pri_cols };
17 $class->throw_exception(
18 "Can't infer join condition for ${rel} on ${class}: $@"
19 ) if $@;
1720
1821 $class->throw_exception(
1922 "has_many can only infer join for a single primary key; ".
2323 $class->ensure_class_loaded($f_class);
2424
2525 my $pri = $class->_get_primary_key;
26
26
2727 $class->throw_exception(
2828 "might_have/has_one needs a primary key to infer a join; ".
2929 "${class} has none"
5959 sub _get_primary_key {
6060 my ( $class, $target_class ) = @_;
6161 $target_class ||= $class;
62 my ($pri, $too_many) = $target_class->primary_columns;
62 my ($pri, $too_many) = eval { $target_class->_pri_cols };
63 $class->throw_exception(
64 "Can't infer join condition on ${target_class}: $@"
65 ) if $@;
66
6367 $class->throw_exception(
6468 "might_have/has_one can only infer join for a single primary key; ".
6569 "${class} has more"
110110 you want to use the default value for it, but still want to set C<\%attrs>.
111111
112112 See L<DBIx::Class::Relationship::Base> for documentation on the
113 attrubutes that are allowed in the C<\%attrs> argument.
113 attributes that are allowed in the C<\%attrs> argument.
114114
115115
116116 =head2 belongs_to
233233
234234 Creates a one-to-many relationship where the foreign class refers to
235235 this class's primary key. This relationship refers to zero or more
236 records in the foreign table (ie, a C<LEFT JOIN>). This relationship
236 records in the foreign table (e.g. a C<LEFT JOIN>). This relationship
237237 defaults to using the end of this classes namespace as the foreign key
238238 in C<$related_class> to resolve the join, unless C<$their_fk_column>
239239 specifies the foreign key column in C<$related_class> or C<cond>
140140 =head1 OVERLOADING
141141
142142 If a resultset is used in a numeric context it returns the L</count>.
143 However, if it is used in a booleand context it is always true. So if
143 However, if it is used in a boolean context it is always true. So if
144144 you want to check if a resultset has any results use C<if $rs != 0>.
145145 C<if $rs> will always be true.
146146
523523 # in ::Relationship::Base::search_related (the row method), and furthermore
524524 # the relationship is of the 'single' type. This means that the condition
525525 # provided by the relationship (already attached to $self) is sufficient,
526 # as there can be only one row in the databse that would satisfy the
526 # as there can be only one row in the database that would satisfy the
527527 # relationship
528528 }
529529 else {
534534 }
535535
536536 # Run the query
537 my $rs = $self->search ($query, {result_class => $self->result_class, %$attrs});
537 my $rs = $self->search ($query, $attrs);
538538 if (keys %{$rs->_resolved_attrs->{collapse}}) {
539539 my $row = $rs->next;
540540 carp "Query returned more than one row" if $rs->next;
638638 =head2 search_related_rs
639639
640640 This method works exactly the same as search_related, except that
641 it guarantees a restultset, even in list context.
641 it guarantees a resultset, even in list context.
642642
643643 =cut
644644
11351135 if ($result_class) {
11361136 $self->ensure_class_loaded($result_class);
11371137 $self->_result_class($result_class);
1138 $self->{attrs}{result_class} = $result_class if ref $self;
11381139 }
11391140 $self->_result_class;
11401141 }
12601261 # if we multi-prefetch we group_by primary keys only as this is what we would
12611262 # get out of the rs via ->next/->all. We *DO WANT* to clobber old group_by regardless
12621263 if ( keys %{$attrs->{collapse}} ) {
1263 $sub_attrs->{group_by} = [ map { "$attrs->{alias}.$_" } ($rsrc->primary_columns) ]
1264 }
1265
1266 $sub_attrs->{select} = $rsrc->storage->_subq_count_select ($rsrc, $sub_attrs);
1264 $sub_attrs->{group_by} = [ map { "$attrs->{alias}.$_" } ($rsrc->_pri_cols) ]
1265 }
1266
1267 $sub_attrs->{select} = $rsrc->storage->_subq_count_select ($rsrc, $attrs);
12671268
12681269 # this is so that the query can be simplified e.g.
12691270 # * ordering can be thrown away in things like Top limit
14191420 my $attrs = $self->_resolved_attrs_copy;
14201421
14211422 delete $attrs->{$_} for qw/collapse select as/;
1422 $attrs->{columns} = [ map { "$attrs->{alias}.$_" } ($self->result_source->primary_columns) ];
1423 $attrs->{columns} = [ map { "$attrs->{alias}.$_" } ($self->result_source->_pri_cols) ];
14231424
14241425 if ($needs_group_by_subq) {
14251426 # make sure no group_by was supplied, or if there is one - make sure it matches
15961597 ],
15971598 },
15981599 { artistid => 5, name => 'Angsty-Whiny Girl', cds => [
1599 { title => 'My parents sold me to a record company' ,year => 2005 },
1600 { title => 'My parents sold me to a record company', year => 2005 },
16001601 { title => 'Why Am I So Ugly?', year => 2006 },
16011602 { title => 'I Got Surgery and am now Popular', year => 2007 }
16021603 ],
16241625 [qw/artistid name/],
16251626 [100, 'A Formally Unknown Singer'],
16261627 [101, 'A singer that jumped the shark two albums ago'],
1627 [102, 'An actually cool singer.'],
1628 [102, 'An actually cool singer'],
16281629 ]);
16291630
16301631 Please note an important effect on your data when choosing between void and
21312132 B<keyed on the relationship name>. If the relationship is of type C<multi>
21322133 (L<DBIx::Class::Relationship/has_many>) - pass an arrayref of hashrefs.
21332134 The process will correctly identify columns holding foreign keys, and will
2134 transparrently populate them from the keys of the corresponding relation.
2135 transparently populate them from the keys of the corresponding relation.
21352136 This can be applied recursively, and will work correctly for a structure
21362137 with an arbitrary depth and width, as long as the relationships actually
21372138 exists and the correct column data has been supplied.
33283329 will fail miserably.
33293330
33303331 To get around this limitation, you can supply literal SQL to your
3331 C<select> attibute that contains the C<AS alias> text, eg:
3332 C<select> attribute that contains the C<AS alias> text, e.g.
33323333
33333334 select => [\'myfield AS alias']
33343335
34393440 C<prefetch> can be used with the following relationship types: C<belongs_to>,
34403441 C<has_one> (or if you're using C<add_relationship>, any relationship declared
34413442 with an accessor type of 'single' or 'filter'). A more complex example that
3442 prefetches an artists cds, the tracks on those cds, and the tags associted
3443 prefetches an artists cds, the tracks on those cds, and the tags associated
34433444 with that artist is given below (assuming many-to-many from artists to tags):
34443445
34453446 my $rs = $schema->resultset('Artist')->search(
35183519
35193520 =back
35203521
3521 Specifes the maximum number of rows for direct retrieval or the number of
3522 Specifies the maximum number of rows for direct retrieval or the number of
35223523 rows per page if the page attribute or method is used.
35233524
35243525 =head2 offset
1515
1616 =head1 DESCRIPTION
1717
18 Table object that inherits from L<DBIx::Class::ResultSource>
18 Table object that inherits from L<DBIx::Class::ResultSource>.
1919
2020 =head1 METHODS
2121
500500
501501 sub primary_columns {
502502 return @{shift->_primaries||[]};
503 }
504
505 sub _pri_cols {
506 my $self = shift;
507 my @pcols = $self->primary_columns
508 or $self->throw_exception (sprintf(
509 'Operation requires a primary key to be declared on %s via set_primary_key',
510 ref $self,
511 ));
512 return @pcols;
503513 }
504514
505515 =head2 add_unique_constraint
8686 =head2 STORABLE_thaw
8787
8888 Thaws frozen handle. Resets the internal schema reference to the package
89 variable C<$thaw_schema>. The recomened way of setting this is to use
89 variable C<$thaw_schema>. The recommended way of setting this is to use
9090 C<< $schema->thaw($ice) >> which handles this for you.
9191
9292 =cut
7474 shift->result_source_instance->primary_columns(@_);
7575 }
7676
77 sub _pri_cols {
78 shift->result_source_instance->_pri_cols(@_);
79 }
80
7781 sub add_unique_constraint {
7882 shift->result_source_instance->add_unique_constraint(@_);
7983 }
449449 to C<update>, e.g. ( { %{ $href } } )
450450
451451 If the values passed or any of the column values set on the object
452 contain scalar references, eg:
452 contain scalar references, e.g.:
453453
454454 $row->last_modified(\'NOW()');
455455 # OR
951951 the new object.
952952
953953 Relationships will be followed by the copy procedure B<only> if the
954 relationship specifes a true value for its
954 relationship specifies a true value for its
955955 L<cascade_copy|DBIx::Class::Relationship::Base> attribute. C<cascade_copy>
956956 is set by default on C<has_many> relationships and unset on all others.
957957
974974 $new->insert;
975975
976976 # Its possible we'll have 2 relations to the same Source. We need to make
977 # sure we don't try to insert the same row twice esle we'll violate unique
977 # sure we don't try to insert the same row twice else we'll violate unique
978978 # constraints
979979 my $rels_copied = {};
980980
9595
9696 This module was originally written to support Oracle < 9i where ANSI joins
9797 weren't supported at all, but became the module for Oracle >= 8 because
98 Oracle's optimising of ANSI joins is horrible. (See:
99 http://scsys.co.uk:8001/7495)
98 Oracle's optimising of ANSI joins is horrible.
10099
101100 =head1 SYNOPSIS
102101
302302 then it is assumed you can do the upgrade as a single step). It
303303 then iterates through the list of versions between the current db
304304 version and the schema version applying one update at a time until
305 all relvant updates are applied.
305 all relevant updates are applied.
306306
307307 The individual update steps are performed by using
308308 L</upgrade_single_step>, which will apply the update and also
543543 compatibility between the old versions table (SchemaVersions) and the new one
544544 (dbix_class_schema_versions).
545545
546 To avoid the checks on connect, set the env var DBIC_NO_VERSION_CHECK or alternatively you can set the ignore_version attr in the forth argument like so:
546 To avoid the checks on connect, set the environment var DBIC_NO_VERSION_CHECK or alternatively you can set the ignore_version attr in the forth argument like so:
547547
548548 my $schema = MyApp::Schema->connect(
549549 $dsn,
8181
8282 With no arguments, this method uses L<Module::Find> to load all your
8383 Result classes from a sub-namespace F<Result> under your Schema class'
84 namespace. Eg. With a Schema of I<MyDB::Schema> all files in
84 namespace, i.e. with a Schema of I<MyDB::Schema> all files in
8585 I<MyDB::Schema::Result> are assumed to be Result classes.
8686
8787 It also finds all ResultSet classes in the namespace F<ResultSet> and
747747 L<DBIx::Class::ResultSet/create>, and a arrayref of the resulting row
748748 objects is returned.
749749
750 i.e.,
750 e.g.
751751
752752 $schema->populate('Artist', [
753753 [ qw/artistid name/ ],
850850
851851 It also attaches a corresponding L<DBIx::Class::ResultSource> object to the
852852 new $schema object. If C<$additional_base_class> is given, the new composed
853 classes will inherit from first the corresponding classe from the current
853 classes will inherit from first the corresponding class from the current
854854 schema then the base class.
855855
856856 For example, for a schema with My::Schema::CD and My::Schema::Artist classes,
11541154
11551155 Provided as the recommended way of thawing schema objects. You can call
11561156 C<Storable::thaw> directly if you wish, but the thawed objects will not have a
1157 reference to any schema, so are rather useless
1157 reference to any schema, so are rather useless.
11581158
11591159 =cut
11601160
11661166
11671167 =head2 freeze
11681168
1169 This doesn't actualy do anything more than call L<Storable/freeze>, it is just
1170 provided here for symetry.
1169 This doesn't actually do anything more than call L<Storable/freeze>, it is just
1170 provided here for symmetry.
11711171
11721172 =cut
11731173
1616 triggers, incorrectly flagging those versions of perl to be buggy. A
1717 more comprehensive check has been moved into the test suite in
1818 C<t/99rh_perl_perf_bug.t> and further information about the bug has been
19 put in L<DBIx::Class::Manual::Troubleshooting>
19 put in L<DBIx::Class::Manual::Troubleshooting>.
2020
2121 Other checks may be added from time to time.
2222
+0
-47
lib/DBIx/Class/Storage/DBI/AmbiguousGlob.pm less more
0 package DBIx::Class::Storage::DBI::AmbiguousGlob;
1
2 use strict;
3 use warnings;
4
5 use base 'DBIx::Class::Storage::DBI';
6 use mro 'c3';
7
8 =head1 NAME
9
10 DBIx::Class::Storage::DBI::AmbiguousGlob - Storage component for RDBMS choking on count(*)
11
12 =head1 DESCRIPTION
13
14 Some servers choke on things like:
15
16 COUNT(*) FROM (SELECT tab1.col, tab2.col FROM tab1 JOIN tab2 ... )
17
18 claiming that col is a duplicate column (it loses the table specifiers by
19 the time it gets to the *). Thus for any subquery count we select only the
20 primary keys of the main table in the inner query. This hopefully still
21 hits the indexes and keeps the server happy.
22
23 At this point the only overridden method is C<_subq_count_select()>
24
25 =cut
26
27 sub _subq_count_select {
28 my ($self, $source, $rs_attrs) = @_;
29
30 return $rs_attrs->{group_by} if $rs_attrs->{group_by};
31
32 my @pcols = map { join '.', $rs_attrs->{alias}, $_ } ($source->primary_columns);
33 return @pcols ? \@pcols : [ 1 ];
34 }
35
36 =head1 AUTHORS
37
38 See L<DBIx::Class/CONTRIBUTORS>
39
40 =head1 LICENSE
41
42 You may distribute this code under the same terms as Perl itself.
43
44 =cut
45
46 1;
2222 throw implicit type conversion errors.
2323
2424 As long as a column L<data_type|DBIx::Class::ResultSource/add_columns> is
25 defined, and it resolves to a base RDBMS native type via L</_native_data_type> as
25 defined and resolves to a base RDBMS native type via L</_native_data_type> as
2626 defined in your Storage driver, the placeholder for this column will be
2727 converted to:
2828
22 use strict;
33 use warnings;
44
5 use base qw/DBIx::Class::Storage::DBI::AmbiguousGlob DBIx::Class::Storage::DBI/;
5 use base qw/DBIx::Class::Storage::DBI/;
66 use mro 'c3';
77
88 use List::Util();
340340 Thus compromise between usability and perfection is the MSSQL-specific
341341 L<resultset attribute|DBIx::Class::ResultSet/ATTRIBUTES> C<unsafe_subselect_ok>.
342342 It is deliberately not possible to set this on the Storage level, as the user
343 should inspect (and preferrably regression-test) the return of every such
343 should inspect (and preferably regression-test) the return of every such
344344 ResultSet individually. The example above would work if written like:
345345
346346 $rs->search ({}, {
353353 If it is possible to rewrite the search() in a way that will avoid the need
354354 for this flag - you are urged to do so. If DBIC internals insist that an
355355 ordered subselect is necessary for an operation, and you believe there is a
356 differnt/better way to get the same result - please file a bugreport.
356 different/better way to get the same result - please file a bugreport.
357357
358358 =head1 AUTHOR
359359
2525 my ($rs, $op, $values) = @_;
2626
2727 my $rsrc = $rs->result_source;
28 my @pcols = $rsrc->primary_columns;
28 my @pcols = $rsrc->_pri_cols;
2929 my $attrs = $rs->_resolved_attrs;
3030
3131 # naive check - this is an internal method after all, we should know what we are doing
7878
7979 =head1 IMPLEMENTATION NOTES
8080
81 MS Access supports the @@IDENTITY function for retriving the id of the latest inserted row.
81 MS Access supports the @@IDENTITY function for retrieving the id of the latest inserted row.
8282 @@IDENTITY is global to the connection, so to support the possibility of getting the last inserted
8383 id for different tables, the insert() function stores the inserted id on a per table basis.
8484 last_insert_id() then just returns the stored value.
11
22 use strict;
33 use warnings;
4 use Scope::Guard ();
5 use Context::Preserve ();
46
57 =head1 NAME
68
2628
2729 use base qw/DBIx::Class::Storage::DBI/;
2830 use mro 'c3';
31
32 sub deployment_statements {
33 my $self = shift;;
34 my ($schema, $type, $version, $dir, $sqltargs, @rest) = @_;
35
36 $sqltargs ||= {};
37 my $quote_char = $self->schema->storage->sql_maker->quote_char;
38 $sqltargs->{quote_table_names} = $quote_char ? 1 : 0;
39 $sqltargs->{quote_field_names} = $quote_char ? 1 : 0;
40
41 my $oracle_version = eval { $self->_get_dbh->get_info(18) };
42
43 $sqltargs->{producer_args}{oracle_version} = $oracle_version;
44
45 $self->next::method($schema, $type, $version, $dir, $sqltargs, @rest);
46 }
2947
3048 sub _dbh_last_insert_id {
3149 my ($self, $dbh, $source, @columns) = @_;
4159 sub _dbh_get_autoinc_seq {
4260 my ($self, $dbh, $source, $col) = @_;
4361
44 # look up the correct sequence automatically
45 my $sql = q{
46 SELECT trigger_body FROM ALL_TRIGGERS t
47 WHERE t.table_name = ?
48 AND t.triggering_event = 'INSERT'
49 AND t.status = 'ENABLED'
50 };
62 my $sql_maker = $self->sql_maker;
63
64 my $source_name;
65 if ( ref $source->name eq 'SCALAR' ) {
66 $source_name = ${$source->name};
67 }
68 else {
69 $source_name = $source->name;
70 }
71 $source_name = uc($source_name) unless $sql_maker->quote_char;
5172
5273 # trigger_body is a LONG
5374 local $dbh->{LongReadLen} = 64 * 1024 if ($dbh->{LongReadLen} < 64 * 1024);
5475
55 my $sth;
56
57 my $source_name;
58 if ( ref $source->name ne 'SCALAR' ) {
59 $source_name = $source->name;
60 }
61 else {
62 $source_name = ${$source->name};
63 }
64
65 # check for fully-qualified name (eg. SCHEMA.TABLENAME)
66 if ( my ( $schema, $table ) = $source_name =~ /(\w+)\.(\w+)/ ) {
67 $sql = q{
68 SELECT trigger_body FROM ALL_TRIGGERS t
69 WHERE t.owner = ? AND t.table_name = ?
70 AND t.triggering_event = 'INSERT'
71 AND t.status = 'ENABLED'
72 };
73 $sth = $dbh->prepare($sql);
74 $sth->execute( uc($schema), uc($table) );
75 }
76 else {
77 $sth = $dbh->prepare($sql);
78 $sth->execute( uc( $source_name ) );
79 }
76 # disable default bindtype
77 local $sql_maker->{bindtype} = 'normal';
78
79 # look up the correct sequence automatically
80 my ( $schema, $table ) = $source_name =~ /(\w+)\.(\w+)/;
81 my ($sql, @bind) = $sql_maker->select (
82 'ALL_TRIGGERS',
83 ['trigger_body'],
84 {
85 $schema ? (owner => $schema) : (),
86 table_name => $table || $source_name,
87 triggering_event => 'INSERT',
88 status => 'ENABLED',
89 },
90 );
91 my $sth = $dbh->prepare($sql);
92 $sth->execute (@bind);
93
8094 while (my ($insert_trigger) = $sth->fetchrow_array) {
81 return uc($1) if $insert_trigger =~ m!(\w+)\.nextval!i; # col name goes here???
82 }
83 $self->throw_exception("Unable to find a sequence INSERT trigger on table '" . $source->name . "'.");
95 return $1 if $insert_trigger =~ m!("?\w+"?)\.nextval!i; # col name goes here???
96 }
97 $self->throw_exception("Unable to find a sequence INSERT trigger on table '$source_name'.");
8498 }
8599
86100 sub _sequence_fetch {
159173 sub columns_info_for {
160174 my ($self, $table) = @_;
161175
162 $self->next::method(uc($table));
176 $self->next::method($table);
163177 }
164178
165179 =head2 datetime_parser_type
325339 return $new_alias;
326340 }
327341
342 =head2 with_deferred_fk_checks
343
344 Runs a coderef between:
345
346 alter session set constraints = deferred
347 ...
348 alter session set constraints = immediate
349
350 to defer foreign key checks.
351
352 Constraints must be declared C<DEFERRABLE> for this to work.
353
354 =cut
355
356 sub with_deferred_fk_checks {
357 my ($self, $sub) = @_;
358
359 my $txn_scope_guard = $self->txn_scope_guard;
360
361 $self->_do_query('alter session set constraints = deferred');
362
363 my $sg = Scope::Guard->new(sub {
364 $self->_do_query('alter session set constraints = immediate');
365 });
366
367 return Context::Preserve::preserve_context(sub { $sub->() },
368 after => sub { $txn_scope_guard->commit });
369 }
370
328371 =head1 AUTHOR
329372
330373 See L<DBIx::Class/CONTRIBUTORS>.
2222
2323 This module was originally written to support Oracle < 9i where ANSI joins
2424 weren't supported at all, but became the module for Oracle >= 8 because
25 Oracle's optimising of ANSI joins is horrible. (See:
26 http://scsys.co.uk:8001/7495)
25 Oracle's optimising of ANSI joins is horrible.
2726
2827 =head1 SYNOPSIS
2928
4342 It should properly support left joins, and right joins. Full outer joins are
4443 not possible due to the fact that Oracle requires the entire query be written
4544 to union the results of a left and right join, and by the time this module is
46 called to create the where query and table definition part of the sql query,
45 called to create the where query and table definition part of the SQL query,
4746 it's already too late.
4847
4948 =head1 METHODS
88 use DBD::Pg qw(:pg_types);
99
1010 # Ask for a DBD::Pg with array support
11 warn "DBD::Pg 2.9.2 or greater is strongly recommended\n"
11 warn __PACKAGE__.": DBD::Pg 2.9.2 or greater is strongly recommended\n"
1212 if ($DBD::Pg::VERSION < 2.009002); # pg uses (used?) version::qv()
1313
1414 sub with_deferred_fk_checks {
1818 database's (L<DBIx::Class::Storage::DBI::Replicated::Replicant>), defines a
1919 method by which query load can be spread out across each replicant in the pool.
2020
21 This Balancer just get's whatever is the first replicant in the pool
21 This Balancer just gets whichever is the first replicant in the pool.
2222
2323 =head1 ATTRIBUTES
2424
109109 This method should be defined in the class which consumes this role.
110110
111111 Given a pool object, return the next replicant that will serve queries. The
112 default behavior is to grap the first replicant it finds but you can write
112 default behavior is to grab the first replicant it finds but you can write
113113 your own subclasses of L<DBIx::Class::Storage::DBI::Replicated::Balancer> to
114114 support other balance systems.
115115
88 This is an introductory document for L<DBIx::Class::Storage::Replication>.
99
1010 This document is not an overview of what replication is or why you should be
11 using it. It is not a document explaing how to setup MySQL native replication
12 either. Copious external resources are avialable for both. This document
11 using it. It is not a document explaining how to setup MySQL native replication
12 either. Copious external resources are available for both. This document
1313 presumes you have the basics down.
1414
1515 =head1 DESCRIPTION
3232 For an easy way to start playing with MySQL native replication, see:
3333 L<MySQL::Sandbox>.
3434
35 If you are using this with a L<Catalyst> based appplication, you may also wish
35 If you are using this with a L<Catalyst> based application, you may also want
3636 to see more recent updates to L<Catalyst::Model::DBIC::Schema>, which has
3737 support for replication configuration options as well.
3838
4747 and let L<DBIx::Class> do its thing.
4848
4949 If you want to use replication, you will override this setting so that the
50 replicated storage engine will 'wrap' your underlying storages and present to
51 the end programmer a unified interface. This wrapper storage class will
50 replicated storage engine will 'wrap' your underlying storages and present
51 a unified interface to the end programmer. This wrapper storage class will
5252 delegate method calls to either a master database or one or more replicated
5353 databases based on if they are read only (by default sent to the replicants)
5454 or write (reserved for the master). Additionally, the Replicated storage
7171 storage itself (L<DBIx::Class::Storage::DBI::Replicated>). A replicated storage
7272 takes a pool of replicants (L<DBIx::Class::Storage::DBI::Replicated::Pool>)
7373 and a software balancer (L<DBIx::Class::Storage::DBI::Replicated::Pool>). The
74 balancer does the job of splitting up all the read traffic amongst each
75 replicant in the Pool. Currently there are two types of balancers, a Random one
74 balancer does the job of splitting up all the read traffic amongst the
75 replicants in the Pool. Currently there are two types of balancers, a Random one
7676 which chooses a Replicant in the Pool using a naive randomizer algorithm, and a
7777 First replicant, which just uses the first one in the Pool (and obviously is
7878 only of value when you have a single replicant).
144144 This object (L<DBIx::Class::Storage::DBI::Replicated::Pool>) manages all the
145145 declared replicants. 'maximum_lag' is the number of seconds a replicant is
146146 allowed to lag behind the master before being temporarily removed from the pool.
147 Keep in mind that the Balancer option 'auto_validate_every' determins how often
147 Keep in mind that the Balancer option 'auto_validate_every' determines how often
148148 a replicant is tested against this condition, so the true possible lag can be
149149 higher than the number you set. The default is zero.
150150
151151 No matter how low you set the maximum_lag or the auto_validate_every settings,
152152 there is always the chance that your replicants will lag a bit behind the
153153 master for the supported replication system built into MySQL. You can ensure
154 reliabily reads by using a transaction, which will force both read and write
154 reliable reads by using a transaction, which will force both read and write
155155 activity to the master, however this will increase the load on your master
156156 database.
157157
2222 =head1 DESCRIPTION
2323
2424 In a replicated storage type, there is at least one replicant to handle the
25 read only traffic. The Pool class manages this replicant, or list of
25 read-only traffic. The Pool class manages this replicant, or list of
2626 replicants, and gives some methods for querying information about their status.
2727
2828 =head1 ATTRIBUTES
5252
5353 This is an integer representing a time since the last time the replicants were
5454 validated. It's nothing fancy, just an integer provided via the perl L<time|perlfunc/time>
55 builtin.
55 built-in.
5656
5757 =cut
5858
8686 =head2 replicants
8787
8888 A hashref of replicant, with the key being the dsn and the value returning the
89 actual replicant storage. For example if the $dsn element is something like:
89 actual replicant storage. For example, if the $dsn element is something like:
9090
9191 "dbi:SQLite:dbname=dbfile"
9292
116116
117117 =item delete_replicant ($key)
118118
119 removes the replicant under $key from the pool
119 Removes the replicant under $key from the pool
120120
121121 =back
122122
267267 connect. For the master database this is desirable, but since replicants are
268268 allowed to fail, this behavior is not desirable. This method wraps the call
269269 to ensure_connected in an eval in order to catch any generated errors. That
270 way a slave can go completely offline (ie, the box itself can die) without
270 way a slave can go completely offline (e.g. the box itself can die) without
271271 bringing down your entire pool of databases.
272272
273273 =cut
364364 inactive, and thus removed from the replication pool.
365365
366366 This tests L<all_replicants>, since a replicant that has been previous marked
367 as inactive can be reactived should it start to pass the validation tests again.
367 as inactive can be reactivated should it start to pass the validation tests again.
368368
369369 See L<DBIx::Class::Storage::DBI> for more about checking if a replicating
370370 connection is not following a master or is lagging.
3232 =head2 active
3333
3434 This is a boolean which allows you to programmatically activate or deactivate a
35 replicant from the pool. This way to you do stuff like disallow a replicant
36 when it get's too far behind the master, if it stops replicating, etc.
35 replicant from the pool. This way you can do stuff like disallow a replicant
36 when it gets too far behind the master, if it stops replicating, etc.
3737
3838 This attribute DOES NOT reflect a replicant's internal status, i.e. if it is
3939 properly replicating from a master and has not fallen too many seconds behind a
4040 reliability threshold. For that, use L</is_replicating> and L</lag_behind_master>.
4141 Since the implementation of those functions database specific (and not all DBIC
42 supported DB's support replication) you should refer your database specific
42 supported DBs support replication) you should refer your database-specific
4343 storage driver for more information.
4444
4545 =cut
1313 use DBIx::Class::Storage::DBI::Replicated::Types qw/BalancerClassNamePart DBICSchema DBICStorageDBI/;
1414 use MooseX::Types::Moose qw/ClassName HashRef Object/;
1515 use Scalar::Util 'reftype';
16 use Hash::Merge 'merge';
16 use Hash::Merge;
1717 use List::Util qw/min max/;
1818
1919 use namespace::clean -except => 'meta';
2525 =head1 SYNOPSIS
2626
2727 The Following example shows how to change an existing $schema to a replicated
28 storage type, add some replicated (readonly) databases, and perform reporting
28 storage type, add some replicated (read-only) databases, and perform reporting
2929 tasks.
3030
3131 You should set the 'storage_type attribute to a replicated type. You should
7474 Warning: This class is marked BETA. This has been running a production
7575 website using MySQL native replication as its backend and we have some decent
7676 test coverage but the code hasn't yet been stressed by a variety of databases.
77 Individual DB's may have quirks we are not aware of. Please use this in first
77 Individual DBs may have quirks we are not aware of. Please use this in first
7878 development and pass along your experiences/bug fixes.
7979
8080 This class implements replicated data store for DBI. Currently you can define
8888 to all existing storages. This way our storage class is a drop in replacement
8989 for L<DBIx::Class::Storage::DBI>.
9090
91 Read traffic is spread across the replicants (slaves) occuring to a user
91 Read traffic is spread across the replicants (slaves) occurring to a user
9292 selected algorithm. The default algorithm is random weighted.
9393
9494 =head1 NOTES
9595
96 The consistency betweeen master and replicants is database specific. The Pool
96 The consistency between master and replicants is database specific. The Pool
9797 gives you a method to validate its replicants, removing and replacing them
9898 when they fail/pass predefined criteria. Please make careful use of the ways
9999 to force a query to run against Master when needed.
372372
373373 =head2 around: connect_info
374374
375 Preserve master's C<connect_info> options (for merging with replicants.)
376 Also set any Replicated related options from connect_info, such as
375 Preserves master's C<connect_info> options (for merging with replicants.)
376 Also sets any Replicated-related options from connect_info, such as
377377 C<pool_type>, C<pool_args>, C<balancer_type> and C<balancer_args>.
378378
379379 =cut
382382 my ($next, $self, $info, @extra) = @_;
383383
384384 my $wantarray = wantarray;
385
386 my $merge = Hash::Merge->new('LEFT_PRECEDENT');
385387
386388 my %opts;
387389 for my $arg (@$info) {
388390 next unless (reftype($arg)||'') eq 'HASH';
389 %opts = %{ merge($arg, \%opts) };
391 %opts = %{ $merge->merge($arg, \%opts) };
390392 }
391393 delete $opts{dsn};
392394
395397 if $opts{pool_type};
396398
397399 $self->pool_args(
398 merge((delete $opts{pool_args} || {}), $self->pool_args)
400 $merge->merge((delete $opts{pool_args} || {}), $self->pool_args)
399401 );
400402
401403 $self->pool($self->_build_pool)
407409 if $opts{balancer_type};
408410
409411 $self->balancer_args(
410 merge((delete $opts{balancer_args} || {}), $self->balancer_args)
412 $merge->merge((delete $opts{balancer_args} || {}), $self->balancer_args)
411413 );
412414
413415 $self->balancer($self->_build_balancer)
552554 $self->throw_exception('too many hashrefs in connect_info')
553555 if @hashes > 2;
554556
555 my %opts = %{ merge(reverse @hashes) };
557 my $merge = Hash::Merge->new('LEFT_PRECEDENT');
558 my %opts = %{ $merge->merge(reverse @hashes) };
556559
557560 # delete them
558561 splice @$r, $i+1, ($#{$r} - $i), ();
565568 delete $master_opts{dbh_maker};
566569
567570 # merge with master
568 %opts = %{ merge(\%opts, \%master_opts) };
571 %opts = %{ $merge->merge(\%opts, \%master_opts) };
569572
570573 # update
571574 $r->[$i] = \%opts;
593596 =head2 execute_reliably ($coderef, ?@args)
594597
595598 Given a coderef, saves the current state of the L</read_handler>, forces it to
596 use reliable storage (ie sets it to the master), executes a coderef and then
599 use reliable storage (e.g. sets it to the master), executes a coderef and then
597600 restores the original state.
598601
599602 Example:
673676 =head2 set_balanced_storage
674677
675678 Sets the current $schema to be use the </balancer> for all reads, while all
676 writea are sent to the master only
679 writes are sent to the master only
677680
678681 =cut
679682
1818
1919 This package defines the following attributes.
2020
21 head2 _query_count
21 =head2 _query_count
2222
2323 Is the attribute holding the current query count. It defines a public reader
2424 called 'query_count' which you can use to access the total number of queries
4141
4242 =head2 _query_start
4343
44 override on the method so that we count the queries.
44 Override on the method so that we count the queries.
4545
4646 =cut
4747
2424
2525 /opt/sqlanywhere11/sdk/perl
2626
27 Recommended L<DBIx::Class::Storage::DBI/connect_info> settings:
27 Recommended L<connect_info|DBIx::Class::Storage::DBI/connect_info> settings:
2828
2929 on_connect_call => 'datetime_setup'
3030
9595
9696 on_connect_call => 'datetime_setup'
9797
98 In L<DBIx::Class::Storage::DBI/connect_info> to set the date and timestamp
99 formats (as temporary options for the session) for use with
98 In L<connect_info|DBIx::Class::Storage::DBI/connect_info> to set the date and
99 timestamp formats (as temporary options for the session) for use with
100100 L<DBIx::Class::InflateColumn::DateTime>.
101101
102102 The C<TIMESTAMP> data type supports up to 6 digits after the decimal point for
140140
141141 =head1 MAXIMUM CURSORS
142142
143 A L<DBIx::Class>> application can use a lot of cursors, due to the usage of
144 L<DBI/prepare_cached>.
143 A L<DBIx::Class> application can use a lot of cursors, due to the usage of
144 L<prepare_cached|DBI/prepare_cached>.
145145
146146 The default cursor maximum is C<50>, which can be a bit too low. This limit can
147147 be turned off (or increased) by the DBA by executing:
6767
6868 =head1 DESCRIPTION
6969
70 If you're using this driver than your version of Sybase, or the libraries you
71 use to connect to it, do not support placeholders.
70 If you're using this driver then your version of Sybase or the libraries you
71 use to connect to it do not support placeholders.
7272
7373 You can also enable this driver explicitly using:
7474
8080 $sth->execute >> for details on the pros and cons of using placeholders.
8181
8282 One advantage of not using placeholders is that C<select @@identity> will work
83 for obtainging the last insert id of an C<IDENTITY> column, instead of having to
83 for obtaining the last insert id of an C<IDENTITY> column, instead of having to
8484 do C<select max(col)> in a transaction as the base Sybase driver does.
8585
8686 When using this driver, bind variables will be interpolated (properly quoted of
727727 sub _update_blobs {
728728 my ($self, $source, $blob_cols, $where) = @_;
729729
730 my (@primary_cols) = $source->primary_columns;
731
732 $self->throw_exception('Cannot update TEXT/IMAGE column(s) without a primary key')
733 unless @primary_cols;
730 my @primary_cols = eval { $source->_pri_cols };
731 $self->throw_exception("Cannot update TEXT/IMAGE column(s): $@")
732 if $@;
734733
735734 # check if we're updating a single row by PK
736735 my $pk_cols_in_where = 0;
762761 my $table = $source->name;
763762
764763 my %row = %$row;
765 my (@primary_cols) = $source->primary_columns;
766
767 $self->throw_exception('Cannot update TEXT/IMAGE column(s) without a primary key')
768 unless @primary_cols;
764 my @primary_cols = eval { $source->_pri_cols} ;
765 $self->throw_exception("Cannot update TEXT/IMAGE column(s): $@")
766 if $@;
769767
770768 $self->throw_exception('Cannot update TEXT/IMAGE column(s) without primary key values')
771769 if ((grep { defined $row{$_} } @primary_cols) != @primary_cols);
976974 definitions in your Result classes, and are mapped to a Sybase type (if it isn't
977975 already) using a mapping based on L<SQL::Translator>.
978976
979 In other configurations, placeholers will work just as they do with the Sybase
977 In other configurations, placeholders will work just as they do with the Sybase
980978 Open Client libraries.
981979
982980 Inserts or updates of TEXT/IMAGE columns will B<NOT> work with FreeTDS.
997995
998996 =head1 TRANSACTIONS
999997
1000 Due to limitations of the TDS protocol, L<DBD::Sybase>, or both; you cannot
1001 begin a transaction while there are active cursors; nor can you use multiple
998 Due to limitations of the TDS protocol, L<DBD::Sybase>, or both, you cannot
999 begin a transaction while there are active cursors, nor can you use multiple
10021000 active cursors within a transaction. An active cursor is, for example, a
10031001 L<ResultSet|DBIx::Class::ResultSet> that has been executed using C<next> or
10041002 C<first> but has not been exhausted or L<reset|DBIx::Class::ResultSet/reset>.
44
55 use base qw/
66 DBIx::Class::Storage::DBI::MultiColumnIn
7 DBIx::Class::Storage::DBI::AmbiguousGlob
87 DBIx::Class::Storage::DBI
98 /;
109 use mro 'c3';
3939 # Each of these methods need _determine_driver called before itself
4040 # in order to function reliably. This is a purely DRY optimization
4141 my @rdbms_specific_methods = qw/
42 deployment_statements
4243 sqlt_type
4344 build_datetime_parser
4445 datetime_parser_type
341342 =item name_sep
342343
343344 This only needs to be used in conjunction with C<quote_char>, and is used to
344 specify the charecter that seperates elements (schemas, tables, columns) from
345 specify the character that separates elements (schemas, tables, columns) from
345346 each other. In most cases this is simply a C<.>.
346347
347348 The consequences of not supplying this value is that L<SQL::Abstract>
777778
778779 =back
779780
780 Verifies that the the current database handle is active and ready to execute
781 an SQL statement (i.e. the connection did not get stale, server is still
781 Verifies that the current database handle is active and ready to execute
782 an SQL statement (e.g. the connection did not get stale, server is still
782783 answering, etc.) This method is used internally by L</dbh>.
783784
784785 =cut
16001601 my $rsrc = $rs->result_source;
16011602
16021603 # quick check if we got a sane rs on our hands
1603 my @pcols = $rsrc->primary_columns;
1604 unless (@pcols) {
1605 $self->throw_exception (
1606 sprintf (
1607 "You must declare primary key(s) on source '%s' (via set_primary_key) in order to update or delete complex resultsets",
1608 $rsrc->source_name || $rsrc->from
1609 )
1610 );
1611 }
1604 my @pcols = $rsrc->_pri_cols;
16121605
16131606 my $sel = $rs->_resolved_attrs->{select};
16141607 $sel = [ $sel ] unless ref $sel eq 'ARRAY';
16611654 my ($rs, $op, $values) = @_;
16621655
16631656 my $rsrc = $rs->result_source;
1664 my @pcols = $rsrc->primary_columns;
1657 my @pcols = $rsrc->_pri_cols;
16651658
16661659 my $guard = $self->txn_scope_guard;
16671660
18951888 #
18961889 sub _subq_count_select {
18971890 my ($self, $source, $rs_attrs) = @_;
1898 return $rs_attrs->{group_by} if $rs_attrs->{group_by};
1891
1892 if (my $groupby = $rs_attrs->{group_by}) {
1893
1894 my $avail_columns = $self->_resolve_column_info ($rs_attrs->{from});
1895
1896 my $sel_index;
1897 for my $sel (@{$rs_attrs->{select}}) {
1898 if (ref $sel eq 'HASH' and $sel->{-as}) {
1899 $sel_index->{$sel->{-as}} = $sel;
1900 }
1901 }
1902
1903 my @selection;
1904 for my $g_part (@$groupby) {
1905 if (ref $g_part or $avail_columns->{$g_part}) {
1906 push @selection, $g_part;
1907 }
1908 elsif ($sel_index->{$g_part}) {
1909 push @selection, $sel_index->{$g_part};
1910 }
1911 else {
1912 $self->throw_exception ("group_by criteria '$g_part' not contained within current resultset source(s)");
1913 }
1914 }
1915
1916 return \@selection;
1917 }
18991918
19001919 my @pcols = map { join '.', $rs_attrs->{alias}, $_ } ($source->primary_columns);
19011920 return @pcols ? \@pcols : [ 1 ];
24412460 }
24422461 $self->_query_end($line);
24432462 };
2444 my @statements = $self->deployment_statements($schema, $type, undef, $dir, { %{ $sqltargs || {} }, no_comments => 1 } );
2463 my @statements = $schema->deployment_statements($type, undef, $dir, { %{ $sqltargs || {} }, no_comments => 1 } );
24452464 if (@statements > 1) {
24462465 foreach my $statement (@statements) {
24472466 $deploy->( $statement );
9696 }
9797
9898 # construct the inner $from for the subquery
99 # we need to prune first, because this will determine if we need a group_bu below
99 # we need to prune first, because this will determine if we need a group_by below
100100 my $inner_from = $self->_prune_unused_joins ($from, $inner_select, $where, $inner_attrs);
101101
102102 # if a multi-type join was needed in the subquery - add a group_by to simulate the
8888 =head2 commit
8989
9090 Commit the transaction, and stop guarding the scope. If this method is not
91 called and this object goes out of scope (i.e. an exception is thrown) then
91 called and this object goes out of scope (e.g. an exception is thrown) then
9292 the transaction is rolled back, via L<DBIx::Class::Storage/txn_rollback>
9393
9494 =cut
101101
102102 Ash Berlin, 2008.
103103
104 Insipred by L<Scope::Guard> by chocolateboy.
104 Inspired by L<Scope::Guard> by chocolateboy.
105105
106106 This module is free software. It may be used, redistributed and/or modified
107107 under the same terms as Perl itself.
352352 =head2 debugfh
353353
354354 Set or retrieve the filehandle used for trace/debug output. This should be
355 an IO::Handle compatible ojbect (only the C<print> method is used. Initially
355 an IO::Handle compatible object (only the C<print> method is used. Initially
356356 set to be STDERR - although see information on the
357357 L<DBIC_TRACE> environment variable.
358358
2626 # Always remember to do all digits for the version even if they're 0
2727 # i.e. first release of 0.XX *must* be 0.XX000. This avoids fBSD ports
2828 # brain damage and presumably various other packaging systems too
29 $VERSION = '0.08119';
30
31 $VERSION = eval $VERSION; # numify for warning-free dev releases
29 $VERSION = '0.08120';
30
31 $VERSION = eval $VERSION if $VERSION =~ /_/; # numify for warning-free dev releases
3232
3333 sub MODIFY_CODE_ATTRIBUTES {
3434 my ($class,$code,@attrs) = @_;
5555
5656 The community can be found via:
5757
58 Mailing list: http://lists.scsys.co.uk/mailman/listinfo/dbix-class/
59
60 SVN: http://dev.catalyst.perl.org/repos/bast/DBIx-Class/
61
62 SVNWeb: http://dev.catalyst.perl.org/svnweb/bast/browse/DBIx-Class/
63
64 IRC: irc.perl.org#dbix-class
58 =over
59
60 =item * IRC: L<irc.perl.org#dbix-class (click for instant chatroom login)
61 |http://mibbit.com/chat/#dbix-class@irc.perl.org>
62
63 =item * Mailing list: L<http://lists.scsys.co.uk/mailman/listinfo/dbix-class>
64
65 =item * RT Bug Tracker: L<https://rt.cpan.org/Dist/Display.html?Queue=DBIx-Class>
66
67 =item * SVNWeb: L<http://dev.catalyst.perl.org/svnweb/bast/browse/DBIx-Class/0.08>
68
69 =item * SVN: L<http://dev.catalyst.perl.org/repos/bast/DBIx-Class/0.08>
70
71 =back
6572
6673 =head1 SYNOPSIS
6774
302309
303310 norbi: Norbert Buchmuller <norbi@nix.hu>
304311
312 nuba: Nuba Princigalli <nuba@cpan.org>
313
305314 Numa: Dan Sully <daniel@cpan.org>
306315
307316 ovid: Curtis "Ovid" Poe <ovid@cpan.org>
370379
371380 =head1 COPYRIGHT
372381
373 Copyright (c) 2005 - 2009 the DBIx::Class L</AUTHOR> and L</CONTRIBUTORS>
382 Copyright (c) 2005 - 2010 the DBIx::Class L</AUTHOR> and L</CONTRIBUTORS>
374383 as listed above.
375384
376385 =head1 LICENSE
3232 sub parse {
3333 # this is a hack to prevent schema leaks due to a retarded SQLT implementation
3434 # DO NOT REMOVE (until SQLT2 is out, the all of this will be rewritten anyway)
35 Scalar::Util::weaken ($_[1]);
35 Scalar::Util::weaken ($_[1]) if ref ($_[1]);
3636
3737 my ($tr, $data) = @_;
3838 my $args = $tr->parser_args;
44
55 BEGIN {
66 use DBIx::Class;
7 die ( "The following modules are required for the dbicadmin utility\n"
7 die ( 'The following modules are required for the dbicadmin utility: '
88 . DBIx::Class::Optional::Dependencies->req_missing_for ('admin_script')
9 . "\n"
910 ) unless DBIx::Class::Optional::Dependencies->req_ok_for ('admin_script');
1011 }
1112
12 use Getopt::Long::Descriptive;
13 use DBIx::Class::Admin::Descriptive;
14 #use Getopt::Long::Descriptive;
1315 use DBIx::Class::Admin;
1416
17 my $short_description = "utility for administrating DBIx::Class schemata";
18 my $synopsis_text =q|
19 deploy a schema to a database
20 %c --schema=MyApp::Schema \
21 --connect='["dbi:SQLite:my.db", "", ""]' \
22 --deploy
23
24 update an existing record
25 %c --schema=MyApp::Schema --class=Employee \
26 --connect='["dbi:SQLite:my.db", "", ""]' \
27 --op=update --set='{ "name": "New_Employee" }'
28 |;
29
1530 my ($opts, $usage) = describe_options(
16 "%c: %o",
31 "%c: %o",
1732 (
1833 ['Actions'],
1934 ["action" => hidden => { one_of => [
20 ['create|c' => 'Create version diffs needs preversion',],
21 ['upgrade|u' => 'Upgrade the database to the current schema '],
22 ['install|i' => 'Install the schema to the database',],
23 ['deploy|d' => 'Deploy the schema to the database',],
24 ['select|s' => 'Select data from the schema', ],
25 ['insert|i' => 'Insert data into the schema', ],
26 ['update|u' => 'Update data in the schema', ],
27 ['delete|D' => 'Delete data from the schema',],
35 ['create' => 'Create version diffs needs preversion',],
36 ['upgrade' => 'Upgrade the database to the current schema '],
37 ['install' => 'Install the schema version tables to an existing database',],
38 ['deploy' => 'Deploy the schema to the database',],
39 ['select' => 'Select data from the schema', ],
40 ['insert' => 'Insert data into the schema', ],
41 ['update' => 'Update data in the schema', ],
42 ['delete' => 'Delete data from the schema',],
2843 ['op:s' => 'compatiblity option all of the above can be suppied as --op=<action>'],
29 ['help|h' => 'display this help'],
44 ['help' => 'display this help', { implies => { schema_class => '__dummy__' } } ],
45 ['selfinject-pod' => 'hidden', { implies => { schema_class => '__dummy__' } } ],
3046 ], required=> 1 }],
31 ['Options'],
32 ['schema-class|schema|C:s' => 'The class of the schema to load', { required => 1 } ],
33 ['resultset|resultset_class|class|r:s' => 'The resultset to operate on for data manipulation' ],
34 ['config-stanza|S:s' => 'Where in the config to find the connection_info, supply in form MyApp::Model::DB',],
35 ['config|f:s' => 'Supply the config file for parsing by Config::Any', { depends => 'config_stanza'} ],
36 ['connect-info|n:s%' => 'Supply the connect info as additonal options ie -I dsn=<dsn> user=<user> password=<pass> '],
47 ['Arguments'],
48 ['schema-class:s' => 'The class of the schema to load', { required => 1 } ],
49 ['resultset|resultset-class|class:s' => 'The resultset to operate on for data manipulation' ],
50 ['config-stanza:s' => 'Where in the config to find the connection_info, supply in form MyApp::Model::DB',],
51 ['config:s' => 'Supply the config file for parsing by Config::Any', { depends => 'config_stanza'} ],
52 ['connect-info:s%' => 'Supply the connect info as additonal options ie -I dsn=<dsn> user=<user> password=<pass> '],
3753 ['connect:s' => 'Supply the connect info as a json string' ],
38 ['sql-dir|q:s' => 'The directory where sql diffs will be created'],
39 ['sql-type|t:s' => 'The RDBMs flavour you wish to use'],
40 ['version|v:i' => 'Supply a version install'],
41 ['preversion|p:s' => 'The previous version to diff against',],
54 ['sql-dir:s' => 'The directory where sql diffs will be created'],
55 ['sql-type:s' => 'The RDBMs flavour you wish to use'],
56 ['version:i' => 'Supply a version install'],
57 ['preversion:s' => 'The previous version to diff against',],
4258 ['set:s' => 'JSON data used to perform data operations' ],
43 ['lib|I:s' => 'Additonal library path to search in'],
4459 ['attrs:s' => 'JSON string to be used for the second argument for search'],
4560 ['where:s' => 'JSON string to be used for the where clause of search'],
4661 ['force' => 'Be forceful with some operations'],
5166
5267 die "please only use one of --config or --connect-info\n" if ($opts->{config} and $opts->{connect_info});
5368
69 if($opts->{selfinject_pod}) {
70
71 die "This is an internal method, do not call!!!\n"
72 unless $ENV{MAKELEVEL};
73
74 $usage->synopsis($synopsis_text);
75 $usage->short_description($short_description);
76 exec (
77 $^X,
78 qw/-p -0777 -i -e/,
79 (
80 's/^# auto_pod_begin.*^# auto_pod_end/'
81 . quotemeta($usage->pod)
82 . '/ms'
83 ),
84 __FILE__
85 );
86 }
87
88 if($opts->{help}) {
89 $usage->die();
90 }
91
5492 # option compatability mangle
5593 if($opts->{connect}) {
5694 $opts->{connect_info} = delete $opts->{connect};
83121 }
84122 }
85123
86 =head1 AUTHOR
87
88 See L<DBIx::Class/CONTRIBUTORS>.
124
125 __END__
126
127 =head1 NAME
128
129 dbicadmin - utility for administrating DBIx::Class schemata
130
131 =head1 SYNOPSIS
132
133 dbicadmin: [long options...]
134
135 deploy a schema to a database
136 dbicadmin --schema=MyApp::Schema \
137 --connect='["dbi:SQLite:my.db", "", ""]' \
138 --deploy
139
140 update an existing record
141 dbicadmin --schema=MyApp::Schema --class=Employee \
142 --connect='["dbi:SQLite:my.db", "", ""]' \
143 --op=update --set='{ "name": "New_Employee" }'
144
145
146
147 =head1 OPTIONS
148
149 =over
150
151 =back
152
153 =head2 Actions
154
155 =cut
156
157 =over
158
159 =item B<--create>
160
161 Create version diffs needs preversion
162
163 =cut
164
165 =item B<--upgrade>
166
167 Upgrade the database to the current schema
168
169 =cut
170
171 =item B<--install>
172
173 Install the schema version tables to an existing database
174
175 =cut
176
177 =item B<--deploy>
178
179 Deploy the schema to the database
180
181 =cut
182
183 =item B<--select>
184
185 Select data from the schema
186
187 =cut
188
189 =item B<--insert>
190
191 Insert data into the schema
192
193 =cut
194
195 =item B<--update>
196
197 Update data in the schema
198
199 =cut
200
201 =item B<--delete>
202
203 Delete data from the schema
204
205 =cut
206
207 =item B<--op>
208
209 compatiblity option all of the above can be suppied as --op=<action>
210
211 =cut
212
213 =item B<--help>
214
215 display this help
216
217 =cut
218
219 =item B<--selfinject-pod>
220
221 hidden
222
223 =cut
224
225 =back
226
227 =head2 Arguments
228
229 =cut
230
231 =over
232
233 =item B<--schema-class>
234
235 The class of the schema to load
236
237 =cut
238
239 =item B<--resultset> or B<--resultset-class> or B<--class>
240
241 The resultset to operate on for data manipulation
242
243 =cut
244
245 =item B<--config-stanza>
246
247 Where in the config to find the connection_info, supply in form MyApp::Model::DB
248
249 =cut
250
251 =item B<--config>
252
253 Supply the config file for parsing by Config::Any
254
255 =cut
256
257 =item B<--connect-info>
258
259 Supply the connect info as additonal options ie -I dsn=<dsn> user=<user> password=<pass>
260
261 =cut
262
263 =item B<--connect>
264
265 Supply the connect info as a json string
266
267 =cut
268
269 =item B<--sql-dir>
270
271 The directory where sql diffs will be created
272
273 =cut
274
275 =item B<--sql-type>
276
277 The RDBMs flavour you wish to use
278
279 =cut
280
281 =item B<--version>
282
283 Supply a version install
284
285 =cut
286
287 =item B<--preversion>
288
289 The previous version to diff against
290
291 =cut
292
293 =item B<--set>
294
295 JSON data used to perform data operations
296
297 =cut
298
299 =item B<--attrs>
300
301 JSON string to be used for the second argument for search
302
303 =cut
304
305 =item B<--where>
306
307 JSON string to be used for the where clause of search
308
309 =cut
310
311 =item B<--force>
312
313 Be forceful with some operations
314
315 =cut
316
317 =item B<--trace>
318
319 Turn on DBIx::Class trace output
320
321 =cut
322
323 =item B<--quiet>
324
325 Be less verbose
326
327 =cut
328
329 =back
330
331
332 =head1 AUTHORS
333
334 See L<DBIx::Class/CONTRIBUTORS>
89335
90336 =head1 LICENSE
91337
92338 You may distribute this code under the same terms as Perl itself
93339
94340 =cut
341
342 # vim: et ft=perl
44 use lib qw(t/lib);
55 use DBICTest;
66
7 my @MODULES = (
8 'Test::Pod 1.26',
9 );
10
117 # Don't run tests for installs
128 unless ( DBICTest::AuthorCheck->is_author || $ENV{AUTOMATED_TESTING} || $ENV{RELEASE_TESTING} ) {
139 plan( skip_all => "Author tests not required for installation" );
1410 }
1511
16 # Load the testing modules
17 foreach my $MODULE ( @MODULES ) {
18 eval "use $MODULE";
19 if ( $@ ) {
20 $ENV{RELEASE_TESTING}
21 ? die( "Failed to load required release-testing module $MODULE" )
22 : plan( skip_all => "$MODULE not available for testing" );
23 }
12 require DBIx::Class;
13 unless ( DBIx::Class::Optional::Dependencies->req_ok_for ('test_pod') ) {
14 my $missing = DBIx::Class::Optional::Dependencies->req_missing_for ('test_pod');
15 $ENV{RELEASE_TESTING} || DBICTest::AuthorCheck->is_author
16 ? die ("Failed to load release-testing module requirements: $missing")
17 : plan skip_all => "Test needs: $missing"
2418 }
2519
26 all_pod_files_ok();
20 Test::Pod::all_pod_files_ok();
55 use lib qw(t/lib);
66 use DBICTest;
77
8 my @MODULES = (
9 'Test::Pod::Coverage 1.08',
10 'Pod::Coverage 0.20',
11 );
12
138 # Don't run tests for installs
149 unless ( DBICTest::AuthorCheck->is_author || $ENV{AUTOMATED_TESTING} || $ENV{RELEASE_TESTING} ) {
1510 plan( skip_all => "Author tests not required for installation" );
1611 }
1712
18 # Load the testing modules
19 foreach my $MODULE ( @MODULES ) {
20 eval "use $MODULE";
21 if ( $@ ) {
22 $ENV{RELEASE_TESTING}
23 ? die( "Failed to load required release-testing module $MODULE" )
24 : plan( skip_all => "$MODULE not available for testing" );
25 }
13 require DBIx::Class;
14 unless ( DBIx::Class::Optional::Dependencies->req_ok_for ('test_podcoverage') ) {
15 my $missing = DBIx::Class::Optional::Dependencies->req_missing_for ('test_podcoverage');
16 $ENV{RELEASE_TESTING} || DBICTest::AuthorCheck->is_author
17 ? die ("Failed to load release-testing module requirements: $missing")
18 : plan skip_all => "Test needs: $missing"
2619 }
2720
2821 # Since this is about checking documentation, a little documentation
9285 /]
9386 },
9487
95 'DBIx::Class::Admin::Types' => { skip => 1 },
88 'DBIx::Class::Admin::*' => { skip => 1 },
9689 'DBIx::Class::ClassResolver::PassThrough' => { skip => 1 },
9790 'DBIx::Class::Componentised' => { skip => 1 },
9891 'DBIx::Class::Relationship::*' => { skip => 1 },
147140 if exists($ex->{ignore});
148141
149142 # run the test with the potentially modified parm set
150 pod_coverage_ok($module, $parms, "$module POD coverage");
143 Test::Pod::Coverage::pod_coverage_ok($module, $parms, "$module POD coverage");
151144 }
152145 }
153146
44 use lib qw(t/lib);
55
66 BEGIN {
7 eval { require Test::Memory::Cycle; require Devel::Cycle };
8 if ($@ or Devel::Cycle->VERSION < 1.10) {
9 plan skip_all => "leak test needs Test::Memory::Cycle and Devel::Cycle >= 1.10";
10 };
7 require DBIx::Class;
8 plan skip_all => 'Test needs: ' . DBIx::Class::Optional::Dependencies->req_missing_for ('test_cycle')
9 unless ( DBIx::Class::Optional::Dependencies->req_ok_for ('test_cycle') );
1110 }
1211
1312 use DBICTest;
66
77 my $schema = DBICTest->init_schema();
88
9 BEGIN {
10 eval "use DBD::SQLite";
11 plan $@ ? (skip_all => 'needs DBD::SQLite for testing') : (tests => 6);
12 }
13
149 my $art = $schema->resultset("Artist")->find(1);
1510
1611 isa_ok $art => 'DBICTest::Artist';
1914
2015 ok($art->name($name) eq $name, 'update');
2116
22 {
17 {
2318 my @changed_keys = $art->is_changed;
2419 is( scalar (@changed_keys), 0, 'field changed but same value' );
25 }
20 }
2621
2722 $art->discard_changes;
2823
3328 my $art_100 = $schema->resultset("Artist")->find(100);
3429 $art_100->artistid(101);
3530 ok($art_100->update(), 'update allows pk mutation via column accessor');
31
32 done_testing;
5252 $dbh->do("DROP SEQUENCE nonpkid_seq");
5353 $dbh->do("DROP TABLE artist");
5454 $dbh->do("DROP TABLE sequence_test");
55 $dbh->do("DROP TABLE track");
5556 $dbh->do("DROP TABLE cd");
56 $dbh->do("DROP TABLE track");
5757 };
5858 $dbh->do("CREATE SEQUENCE artist_seq START WITH 1 MAXVALUE 999999 MINVALUE 0");
5959 $dbh->do("CREATE SEQUENCE cd_seq START WITH 1 MAXVALUE 999999 MINVALUE 0");
6060 $dbh->do("CREATE SEQUENCE pkid1_seq START WITH 1 MAXVALUE 999999 MINVALUE 0");
6161 $dbh->do("CREATE SEQUENCE pkid2_seq START WITH 10 MAXVALUE 999999 MINVALUE 0");
6262 $dbh->do("CREATE SEQUENCE nonpkid_seq START WITH 20 MAXVALUE 999999 MINVALUE 0");
63
6364 $dbh->do("CREATE TABLE artist (artistid NUMBER(12), name VARCHAR(255), rank NUMBER(38), charfield VARCHAR2(10))");
65 $dbh->do("ALTER TABLE artist ADD (CONSTRAINT artist_pk PRIMARY KEY (artistid))");
66
6467 $dbh->do("CREATE TABLE sequence_test (pkid1 NUMBER(12), pkid2 NUMBER(12), nonpkid NUMBER(12), name VARCHAR(255))");
68 $dbh->do("ALTER TABLE sequence_test ADD (CONSTRAINT sequence_test_constraint PRIMARY KEY (pkid1, pkid2))");
69
6570 $dbh->do("CREATE TABLE cd (cdid NUMBER(12), artist NUMBER(12), title VARCHAR(255), year VARCHAR(4), genreid NUMBER(12), single_track NUMBER(12))");
66 $dbh->do("CREATE TABLE track (trackid NUMBER(12), cd NUMBER(12), position NUMBER(12), title VARCHAR(255), last_updated_on DATE, last_updated_at DATE, small_dt DATE)");
67
68 $dbh->do("ALTER TABLE artist ADD (CONSTRAINT artist_pk PRIMARY KEY (artistid))");
6971 $dbh->do("ALTER TABLE cd ADD (CONSTRAINT cd_pk PRIMARY KEY (cdid))");
70 $dbh->do("ALTER TABLE sequence_test ADD (CONSTRAINT sequence_test_constraint PRIMARY KEY (pkid1, pkid2))");
72
73 $dbh->do("CREATE TABLE track (trackid NUMBER(12), cd NUMBER(12) REFERENCES cd(cdid) DEFERRABLE, position NUMBER(12), title VARCHAR(255), last_updated_on DATE, last_updated_at DATE, small_dt DATE)");
74
7175 $dbh->do(qq{
7276 CREATE OR REPLACE TRIGGER artist_insert_trg
7377 BEFORE INSERT ON artist
217221 is( scalar @results, 1, "Group by with limit OK" );
218222 }
219223
224 # test with_deferred_fk_checks
225 lives_ok {
226 $schema->storage->with_deferred_fk_checks(sub {
227 $schema->resultset('Track')->create({
228 trackid => 999, cd => 999, position => 1, title => 'deferred FK track'
229 });
230 $schema->resultset('CD')->create({
231 artist => 1, cdid => 999, year => '2003', title => 'deferred FK cd'
232 });
233 });
234 } 'with_deferred_fk_checks code survived';
235
236 is eval { $schema->resultset('Track')->find(999)->title }, 'deferred FK track',
237 'code in with_deferred_fk_checks worked';
238
239 throws_ok {
240 $schema->resultset('Track')->create({
241 trackid => 1, cd => 9999, position => 1, title => 'Track1'
242 });
243 } qr/constraint/i, 'with_deferred_fk_checks is off';
244
220245 # test auto increment using sequences WITHOUT triggers
221246 for (1..5) {
222247 my $st = $schema->resultset('SequenceTest')->create({ name => 'foo' });
270295 $dbh->do("DROP SEQUENCE nonpkid_seq");
271296 $dbh->do("DROP TABLE artist");
272297 $dbh->do("DROP TABLE sequence_test");
298 $dbh->do("DROP TABLE track");
273299 $dbh->do("DROP TABLE cd");
274 $dbh->do("DROP TABLE track");
275300 $dbh->do("DROP TABLE bindtype_test");
276301 }
277302 }
55 use DBICTest;
66
77 my $schema = DBICTest->init_schema();
8
9 BEGIN {
10 eval "use DBD::SQLite";
11 plan $@ ? (skip_all => 'needs DBD::SQLite for testing') : (tests => 10);
12 }
138
149 # test LIMIT
1510 my $it = $schema->resultset("CD")->search( {},
7671 );
7772 is( $it->count, 1, "complex abstract count ok" );
7873
74 done_testing;
88 my $schema = DBICTest->init_schema();
99
1010 my $orig_debug = $schema->storage->debug;
11
12 BEGIN {
13 eval "use DBD::SQLite";
14 plan $@
15 ? ( skip_all => 'needs DBD::SQLite for testing' )
16 : ( tests => 33 );
17 }
1811
1912 # test the abstract join => SQL generator
2013 my $sa = new DBIx::Class::SQLAHacks;
239232 is(cd_count(), 5, '5 rows in table cd');
240233 is(tk_count(), 3, '3 rows in table twokeys');
241234 }
235
236 done_testing;
99 plan skip_all =>
1010 'Test needs ' . DBIx::Class::Optional::Dependencies->req_missing_for ('deploy')
1111 unless DBIx::Class::Optional::Dependencies->req_ok_for ('deploy')
12 }
13
14 my $custom_deployment_statements_called = 0;
15
16 sub DBICTest::Schema::deployment_statements {
17 $custom_deployment_statements_called = 1;
18 my $self = shift;
19 return $self->next::method(@_);
1220 }
1321
1422 my $schema = DBICTest->init_schema (no_deploy => 1);
4351
4452
4553
46 # replace the sqlt calback with a custom version ading an index
47 $schema->source('Track')->sqlt_deploy_callback(sub {
48 my ($self, $sqlt_table) = @_;
49
50 is (
51 $sqlt_table->schema->translator->producer_type,
52 join ('::', 'SQL::Translator::Producer', $schema->storage->sqlt_type),
53 'Production type passed to translator object',
54 );
55
56 if ($schema->storage->sqlt_type eq 'SQLite' ) {
57 $sqlt_table->add_index( name => 'track_title', fields => ['title'] )
58 or die $sqlt_table->error;
59 }
60
61 $self->default_sqlt_deploy_hook($sqlt_table);
62 });
63
64 $schema->deploy; # do not remove, this fires the is() test in the callback above
65
54 {
55 my $deploy_hook_called = 0;
56
57 # replace the sqlt calback with a custom version ading an index
58 $schema->source('Track')->sqlt_deploy_callback(sub {
59 my ($self, $sqlt_table) = @_;
60
61 $deploy_hook_called = 1;
62
63 is (
64 $sqlt_table->schema->translator->producer_type,
65 join ('::', 'SQL::Translator::Producer', $schema->storage->sqlt_type),
66 'Production type passed to translator object',
67 );
68
69 if ($schema->storage->sqlt_type eq 'SQLite' ) {
70 $sqlt_table->add_index( name => 'track_title', fields => ['title'] )
71 or die $sqlt_table->error;
72 }
73
74 $self->default_sqlt_deploy_hook($sqlt_table);
75 });
76
77 $schema->deploy; # do not remove, this fires the is() test in the callback above
78 ok($deploy_hook_called, 'deploy hook got called');
79 ok($custom_deployment_statements_called, '->deploy used the schemas deploy_statements method');
80 }
6681
6782
6883 my $translator = SQL::Translator->new(
1717 # Test for SQLT-related leaks
1818 {
1919 my $s = DBICTest::Schema->clone;
20 create_schema ({ schema => $s });
20 my $sqlt_schema = create_schema ({ schema => $s });
2121 Scalar::Util::weaken ($s);
2222
2323 ok (!$s, 'Schema not leaked');
24
25 isa_ok ($sqlt_schema, 'SQL::Translator::Schema', 'SQLT schema object produced');
2426 }
27
28 # make sure classname-style works
29 lives_ok { isa_ok (create_schema ({ schema => 'DBICTest::Schema' }), 'SQL::Translator::Schema', 'SQLT schema object produced') };
2530
2631
2732 my $schema = DBICTest->init_schema();
1818
1919 use Path::Class;
2020
21 use ok 'DBIx::Class::Admin';
21 use_ok 'DBIx::Class::Admin';
2222
2323
2424 my $sql_dir = dir(qw/t var/);
33 use Test::More;
44
55 use Test::Exception;
6 use Test::Deep;
76
87 BEGIN {
98 require DBIx::Class;
1413 use lib 't/lib';
1514 use DBICTest;
1615
17 use ok 'DBIx::Class::Admin';
16 use_ok 'DBIx::Class::Admin';
1817
1918
2019 { # test data maniplulation functions
5352 ];
5453 my $data;
5554 lives_ok { $data = $admin->select('Employee')} 'can retrive data from database';
56 cmp_deeply($data, $expected_data, 'DB matches whats expected');
55 is_deeply($data, $expected_data, 'DB matches whats expected');
5756
5857 $admin->delete('Employee', {name=>'Trout'});
5958 my $del_rs = $employees->search({name => 'Trout'});
77 use_ok('DBICTest');
88
99 my $schema = DBICTest->init_schema;
10
11 BEGIN {
12 eval "use DBD::SQLite";
13 plan $@
14 ? ( skip_all => 'needs DBD::SQLite for testing' )
15 : ( tests => 13 );
16 }
1710
1811 my $where_bind = {
1912 where => \'name like ?',
121114 bind => [ 'Spoon%' ] });
122115 is ( $rs->count, 1, '...cookbook + chained search with extra bind' );
123116 }
117
118 done_testing;
7373 $rs = $schema->resultset('Tag')->search({ tag => 'Blue' }, { '+select' => { max => 'tagid' }, distinct => 1 });
7474 is($get_count->($rs), 4, 'Count with +select aggreggate');
7575
76 $rs = $schema->resultset('Tag')->search({}, { select => 'length(me.tag)', distinct => 1 });
76 $rs = $schema->resultset('Tag')->search({}, { select => [\'length(me.tag)'], distinct => 1 });
7777 is($get_count->($rs), 3, 'Count by distinct function result as select literal');
7878 }
7979
0 use strict;
1 use warnings;
2
3 use Test::More;
4
5 use lib qw(t/lib);
6
7 use DBICTest;
8
9 my $schema = DBICTest->init_schema();
10
11 my $rs = $schema->resultset ('CD')->search ({}, {
12 select => [
13 { substr => [ 'title', 1, 1 ], -as => 'initial' },
14 { count => '*' },
15 ],
16 as => [qw/title_initial cnt/],
17 group_by => ['initial'],
18 order_by => { -desc => 'initial' },
19 result_class => 'DBIx::Class::ResultClass::HashRefInflator',
20 });
21
22 is_deeply (
23 [$rs->all],
24 [
25 { title_initial => 'S', cnt => '1' },
26 { title_initial => 'G', cnt => '1' },
27 { title_initial => 'F', cnt => '1' },
28 { title_initial => 'C', cnt => '2' },
29 ],
30 'Correct result',
31 );
32
33 is ($rs->count, 4, 'Correct count');
34
35 done_testing;
2525
2626 my $cd1 = $rs->find ({cdid => 1});
2727 is_deeply ( $cd1, $datahashref1, 'first/find return the same thing');
28
29 my $cd2 = $rs->search({ cdid => 1 })->single;
30 is_deeply ( $cd2, $datahashref1, 'first/search+single return the same thing');
2831 }
2932
3033 sub check_cols_of {
2626 my $root = _find_co_root()
2727 or return;
2828
29 my $optdeps = file('lib/DBIx/Class/Optional/Dependencies.pm');
30
2931 # not using file->stat as it invokes File::stat which in turn breaks stat(_)
30 my ($mf_pl_mtime, $mf_mtime) = ( map
32 my ($mf_pl_mtime, $mf_mtime, $optdeps_mtime) = ( map
3133 { (stat ($root->file ($_)) )[9] }
32 qw/Makefile.PL Makefile/
34 (qw|Makefile.PL Makefile|, $optdeps)
3335 );
3436
3537 return unless $mf_pl_mtime; # something went wrong during co_root detection ?
3638
37 if (
38 not -d $root->subdir ('inc')
39 or
40 not $mf_mtime
41 or
42 $mf_mtime < $mf_pl_mtime
43 ) {
39 my @fail_reasons;
40
41 if(not -d $root->subdir ('inc')) {
42 push @fail_reasons, "Missing ./inc directory";
43 }
44
45 if (not $mf_mtime) {
46 push @fail_reasons, "Missing ./Makefile";
47 }
48 elsif($mf_mtime < $mf_pl_mtime) {
49 push @fail_reasons, "./Makefile.PL is newer than ./Makefile";
50 }
51
52 if ($mf_mtime < $optdeps_mtime) {
53 push @fail_reasons, "./$optdeps is newer than ./Makefile";
54 }
55
56 if (@fail_reasons) {
4457 print STDERR <<'EOE';
45
46
4758
4859
4960 !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
6778 The DBIC team
6879
6980
81 Reasons you received this message:
7082
7183 EOE
84
85 foreach my $r (@fail_reasons) {
86 print STDERR " * $r\n";
87 }
88 print STDERR "\n\n\n";
7289
7390 exit 1;
7491 }
44
55 use lib qw(t/lib);
66 use DBIC::SqlMakerTest;
7
8 BEGIN {
9 eval "use DBD::SQLite";
10 plan $@
11 ? ( skip_all => 'needs DBD::SQLite for testing' )
12 : ( tests => 7 );
13 }
147
158
169 use_ok('DBICTest');
7164 $schema->storage->sql_maker->name_sep('.');
7265
7366 is($schema->storage->sql_maker->update('group', \%data), 'UPDATE `group` SET `name` = ?, `order` = ?', 'quoted table names for UPDATE');
67
68 done_testing;
44
55 use lib qw(t/lib);
66 use DBIC::SqlMakerTest;
7
8 BEGIN {
9 eval "use DBD::SQLite";
10 plan $@
11 ? ( skip_all => 'needs DBD::SQLite for testing' )
12 : ( tests => 7 );
13 }
147
158 use_ok('DBICTest');
169 use_ok('DBIC::DebugObj');
8881 );
8982
9083 is($schema->storage->sql_maker->update('group', \%data), 'UPDATE `group` SET `name` = ?, `order` = ?', 'quoted table names for UPDATE');
84
85 done_testing;
0 use Class::C3;
10 use strict;
2 use Test::More;
31 use warnings;
42
5 BEGIN {
6 eval "use DBD::SQLite";
7 plan $@
8 ? ( skip_all => 'needs DBD::SQLite for testing' )
9 : ( tests => 4 );
10 }
3 use Test::More;
4 use Test::Warn;
5 use Test::Exception;
116
127 use lib qw(t/lib);
13
148 use_ok( 'DBICTest' );
159 use_ok( 'DBICTest::Schema' );
10
1611 my $schema = DBICTest->init_schema;
1712
18 {
19 my $warnings;
20 local $SIG{__WARN__} = sub { $warnings .= $_[0] };
21 eval {
22 $schema->resultset('CD')
23 ->create({ title => 'vacation in antarctica' })
24 };
25 like $@, qr/NULL/; # as opposed to some other error
26 unlike( $warnings, qr/uninitialized value/, "No warning from Storage" );
27 }
13 warnings_are ( sub {
14 throws_ok (sub {
15 $schema->resultset('CD')->create({ title => 'vacation in antarctica' });
16 }, qr/NULL/); # as opposed to some other error
17 }, [], 'No warnings besides exception' );
2818
19 done_testing;