[svn-upgrade] Integrating new upstream version, libdbix-class-perl (0.08113)
Jonathan Yu
14 years ago
0 | 0 | Revision history for DBIx::Class |
1 | ||
2 | 0.08113 2009-11-13 23:13:00 (UTC) | |
3 | - Fix populate with has_many bug | |
4 | (RT #50828) | |
5 | - Fix Oracle autoincrement broken for Resultsets with scalar refs | |
6 | (RT #50874) | |
7 | - Complete Sybase RDBMS support including: | |
8 | - Support for TEXT/IMAGE columns | |
9 | - Support for the 'money' datatype | |
10 | - Transaction savepoints support | |
11 | - DateTime inflation support | |
12 | - Support for bind variables when connecting to a newer Sybase with | |
13 | OpenClient libraries | |
14 | - Support for connections via FreeTDS with CASTs for bind variables | |
15 | when needed | |
16 | - Support for interpolated variables with proper quoting when | |
17 | connecting to an older Sybase and/or via FreeTDS | |
18 | - bulk API support for populate() | |
19 | - Transaction support for MSSQL via DBD::Sybase | |
20 | - Add is_paged method to DBIx::Class::ResultSet so that we can | |
21 | check that if we want a pager | |
22 | - Skip versioning test on really old perls lacking Time::HiRes | |
23 | (RT #50209) | |
24 | - Fixed on_connect_do/call regression when used with a coderef | |
25 | connector (RT #50003) | |
26 | - A couple of fixes to Ordered to remedy subclassing issues | |
27 | - Fixed another lingering problem with PostgreSQL | |
28 | auto-increment support and its interaction with multiple | |
29 | schemas | |
30 | - Remove some IN workarounds, and require a recent version of | |
31 | SQLA instead | |
32 | - Improvements to populate's handling of mixed scalarref values | |
33 | - Fixed regression losing result_class after $rs->find (introduced | |
34 | in 0.08108) | |
35 | - Fix in_storage() to return 1|0 as per existing documentation | |
36 | - Centralize handling of _determine_driver calls prior to certain | |
37 | ::Storage::DBI methods | |
38 | - Fix update/delete arbitrary condition handling (RT#51409) | |
39 | - POD improvements | |
1 | 40 | |
2 | 41 | 0.08112 2009-09-21 10:57:00 (UTC) |
3 | 42 | - Remove the recommends from Makefile.PL, DBIx::Class is not |
101 | 140 | nonexisting prefetch |
102 | 141 | - make_column_dirty() now overwrites the deflated value with an |
103 | 142 | inflated one if such exists |
104 | - Fixed set_$rel with where restriction deleting rows outside | |
143 | - Fixed set_$rel with where restriction deleting rows outside | |
105 | 144 | the restriction |
106 | 145 | - populate() returns the created objects or an arrayref of the |
107 | 146 | created objects depending on scalar vs. list context |
153 | 192 | side of the relation, to avoid duplicates |
154 | 193 | - DBIC now properly handles empty inserts (invoking all default |
155 | 194 | values from the DB, normally via INSERT INTO tbl DEFAULT VALUES |
156 | - Fix find_or_new/create to stop returning random rows when | |
195 | - Fix find_or_new/create to stop returning random rows when | |
157 | 196 | default value insert is requested (RT#28875) |
158 | 197 | - Make IC::DT extra warning state the column name too |
159 | 198 | - It is now possible to transparrently search() on columns |
175 | 214 | - Change ->count code to work correctly with DISTINCT (distinct => 1) |
176 | 215 | via GROUP BY |
177 | 216 | - Removed interpolation of bind vars for as_query - placeholders |
178 | are preserved and nested query bind variables are properly | |
217 | are preserved and nested query bind variables are properly | |
179 | 218 | merged in the correct order |
180 | - Refactor DBIx::Class::Storage::DBI::Sybase to automatically | |
219 | - Refactor DBIx::Class::Storage::DBI::Sybase to automatically | |
181 | 220 | load a subclass, namely Microsoft_SQL_Server.pm |
182 | 221 | (similar to DBIx::Class::Storage::DBI::ODBC) |
183 | 222 | - Refactor InflateColumn::DateTime to allow components to |
240 | 279 | - not try and insert things tagged on via new_related unless required |
241 | 280 | - Possible to set locale in IC::DateTime extra => {} config |
242 | 281 | - Calling the accessor of a belongs_to when the foreign_key |
243 | was NULL and the row was not stored would unexpectedly fail | |
282 | was NULL and the row was not stored would unexpectedly fail | |
244 | 283 | - Split sql statements for deploy only if SQLT::Producer returned a scalar |
245 | 284 | containing all statements to be executed |
246 | 285 | - Add as_query() for ResultSet and ResultSetColumn. This makes subqueries |
268 | 307 | - new order_by => { -desc => 'colname' } syntax supported |
269 | 308 | - PG array datatype supported |
270 | 309 | - insert should use store_column, not set_column to avoid marking |
271 | clean just-stored values as dirty. New test for this | |
272 | - regression test for source_name | |
310 | clean just-stored values as dirty. New test for this | |
311 | - regression test for source_name | |
273 | 312 | |
274 | 313 | 0.08099_05 2008-10-30 21:30:00 (UTC) |
275 | 314 | - Rewrite of Storage::DBI::connect_info(), extended with an |
283 | 322 | - Fixed up related resultsets and multi-create |
284 | 323 | - Fixed superfluous connection in ODBC::_rebless |
285 | 324 | - Fixed undef PK for first insert in ODBC::Microsoft_SQL_Server |
286 | - Added virtual method to Versioned so a user can create upgrade | |
325 | - Added virtual method to Versioned so a user can create upgrade | |
287 | 326 | path across multiple versions (jgoulah) |
288 | 327 | - Better (and marginally faster) implementation of the HashRefInflator |
289 | 328 | hash construction algorithm |
292 | 331 | |
293 | 332 | 0.08099_04 2008-07-24 01:00:00 |
294 | 333 | - Functionality to storage to enable a sub to be run without FK checks |
295 | - Fixed $schema->clone bug which caused clone and source to share | |
334 | - Fixed $schema->clone bug which caused clone and source to share | |
296 | 335 | internal hash refs |
297 | 336 | - Added register_extra_source methods for additional sources |
298 | 337 | - Added datetime_undef_if_invalid for InflateColumn::DateTime to |
318 | 357 | - Add warnings for non-unique ResultSet::find queries |
319 | 358 | - Changed Storage::DBI::Replication to Storage::DBI::Replicated and |
320 | 359 | refactored support. |
321 | - By default now deploy/diff et al. will ignore constraint and index | |
360 | - By default now deploy/diff et al. will ignore constraint and index | |
322 | 361 | names |
323 | 362 | - Add ResultSet::_is_deterministic_value, make new_result filter the |
324 | 363 | values passed to new to drop values that would generate invalid SQL. |
325 | - Use Sub::Name to name closures before installing them. Fixes | |
364 | - Use Sub::Name to name closures before installing them. Fixes | |
326 | 365 | incompatibility with Moose method modifiers on generated methods. |
327 | 366 | |
328 | 367 | 0.08010 2008-03-01 10:30 |
331 | 370 | 0.08009 2008-01-20 13:30 |
332 | 371 | - Made search_rs smarter about when to preserve the cache to fix |
333 | 372 | mm prefetch usage |
334 | - Added Storage::DBI subclass for MSSQL over ODBC. | |
373 | - Added Storage::DBI subclass for MSSQL over ODBC. | |
335 | 374 | - Added freeze, thaw and dclone methods to Schema so that thawed |
336 | 375 | objects will get re-attached to the schema. |
337 | 376 | - Moved dbicadmin to JSON::Any wrapped JSON.pm for a sane API |
345 | 384 | foreign and self parts the wrong way round in the condition |
346 | 385 | - ResultSetColumn::func() now returns all results if called in list |
347 | 386 | context; this makes things like func('DISTINCT') work as expected |
348 | - Many-to-many relationships now warn if the utility methods would | |
387 | - Many-to-many relationships now warn if the utility methods would | |
349 | 388 | clash |
350 | 389 | - InflateColumn::DateTime now accepts an extra parameter of timezone |
351 | 390 | to set timezone on the DT object (thanks Sergio Salvi) |
352 | - Added sqlt_deploy_hook to result classes so that indexes can be | |
391 | - Added sqlt_deploy_hook to result classes so that indexes can be | |
353 | 392 | added. |
354 | - Added startup checks to warn loudly if we appear to be running on | |
393 | - Added startup checks to warn loudly if we appear to be running on | |
355 | 394 | RedHat systems from perl-5.8.8-10 and up that have the bless/overload |
356 | 395 | patch applied (badly) which causes 2x -> 100x performance penalty. |
357 | 396 | (Jon Schutz) |
358 | - ResultSource::reverse_relationship_info can distinguish between | |
397 | - ResultSource::reverse_relationship_info can distinguish between | |
359 | 398 | sources using the same table |
360 | 399 | - Row::insert will now not fall over if passed duplicate related objects |
361 | - Row::copy will not fall over if you have two relationships to the | |
400 | - Row::copy will not fall over if you have two relationships to the | |
362 | 401 | same source with a unique constraint on it |
363 | 402 | |
364 | 403 | 0.08007 2007-09-04 19:36:00 |
370 | 409 | - Move to using Class::C3::Componentised |
371 | 410 | - Remove warn statement from DBIx::Class::Row |
372 | 411 | |
373 | 0.08005 2007-08-06 | |
412 | 0.08005 2007-08-06 | |
374 | 413 | - add timestamp fix re rt.cpan 26978 - no test yet but change |
375 | 414 | clearly should cause no regressions |
376 | 415 | - provide alias for related_resultset via local() so it's set |
385 | 424 | (original fix from diz) |
386 | 425 | |
387 | 426 | 0.08004 2007-08-06 19:00:00 |
388 | - fix storage connect code to not trigger bug via auto-viv | |
427 | - fix storage connect code to not trigger bug via auto-viv | |
389 | 428 | (test from aherzog) |
390 | 429 | - fixup cursor_class to be an 'inherited' attr for per-package defaults |
391 | 430 | - add default_resultset_attributes entry to Schema |
0 | (Potential) Features for 0.09 | |
1 | ============================= | |
2 | ||
3 | Row/find caching - would be pretty useful | |
4 | - Need to have good definitions of when the cache should be queried and when invalidated | |
5 | - Be able to supply own expiry? | |
6 | - Be able to invalidate manually? Single item / entire cache / single table | |
7 | ||
8 | Remove compose_connection / DB.pm | |
9 | - Everyone has probably forgotten what this is anyway.. | |
10 | ||
11 | Syntax improvements? | |
12 | - "as" to "alias" ? | |
13 | - "belongs_to" to "contains/refers/something" | |
14 | ||
15 | Using inflated objects/references as values in searches | |
16 | - should deflate then run search | |
17 | ||
18 | SQL/API feature complete? | |
19 | - UNION | |
20 | - proper join conditions! | |
21 | - function calls on the LHS of conditions.. | |
22 | ||
23 | Moosification - ouch | |
24 | ||
25 | Metamodel stuff - introspection | |
26 | ||
27 | Prefetch improvements | |
28 | - slow on mysql, speedup? | |
29 | - multi has_many prefetch | |
30 | ||
31 | Magically "discover" needed joins/prefetches and add them | |
32 | - eg $books->search({ 'author.name' => 'Fred'}), autoadds: join => 'author' | |
33 | - also guess aliases when supplying column names that are on joined/related tables | |
34 | ||
35 | Storage API/restructure | |
36 | - call update/insert etc on the ResultSource, which then calls to storage | |
37 | - handle different storages/db-specific code better | |
38 | - better cross-db code .. eg LIKE/ILIKE | |
39 | ||
40 | Relationships | |
41 | - single vs filter, discrepancies.. remove one of them and make behave the same? | |
42 | ||
43 | Joining/searching weird attribute tables? | |
44 | - Support legacy/badly designed junk better.. | |
45 | ||
46 | Documentation - improvements | |
47 | - better indexing for finding of stuff in general | |
48 | - more cross-referencing of docs |
0 | Changes | |
1 | examples/Schema/db/example.sql | |
2 | examples/Schema/insertdb.pl | |
3 | examples/Schema/MyDatabase/Main.pm | |
4 | examples/Schema/MyDatabase/Main/Result/Artist.pm | |
5 | examples/Schema/MyDatabase/Main/Result/Cd.pm | |
6 | examples/Schema/MyDatabase/Main/Result/Track.pm | |
7 | examples/Schema/testdb.pl | |
8 | inc/Module/AutoInstall.pm | |
9 | inc/Module/Install.pm | |
10 | inc/Module/Install/AutoInstall.pm | |
11 | inc/Module/Install/Base.pm | |
12 | inc/Module/Install/Can.pm | |
13 | inc/Module/Install/Fetch.pm | |
14 | inc/Module/Install/Include.pm | |
15 | inc/Module/Install/Makefile.pm | |
16 | inc/Module/Install/Metadata.pm | |
17 | inc/Module/Install/Scripts.pm | |
18 | inc/Module/Install/Win32.pm | |
19 | inc/Module/Install/WriteAll.pm | |
20 | lib/DBIx/Class.pm | |
21 | lib/DBIx/Class/AccessorGroup.pm | |
22 | lib/DBIx/Class/CDBICompat.pm | |
23 | lib/DBIx/Class/CDBICompat/AbstractSearch.pm | |
24 | lib/DBIx/Class/CDBICompat/AccessorMapping.pm | |
25 | lib/DBIx/Class/CDBICompat/AttributeAPI.pm | |
26 | lib/DBIx/Class/CDBICompat/AutoUpdate.pm | |
27 | lib/DBIx/Class/CDBICompat/ColumnCase.pm | |
28 | lib/DBIx/Class/CDBICompat/ColumnGroups.pm | |
29 | lib/DBIx/Class/CDBICompat/ColumnsAsHash.pm | |
30 | lib/DBIx/Class/CDBICompat/Constraints.pm | |
31 | lib/DBIx/Class/CDBICompat/Constructor.pm | |
32 | lib/DBIx/Class/CDBICompat/Copy.pm | |
33 | lib/DBIx/Class/CDBICompat/DestroyWarning.pm | |
34 | lib/DBIx/Class/CDBICompat/GetSet.pm | |
35 | lib/DBIx/Class/CDBICompat/ImaDBI.pm | |
36 | lib/DBIx/Class/CDBICompat/Iterator.pm | |
37 | lib/DBIx/Class/CDBICompat/LazyLoading.pm | |
38 | lib/DBIx/Class/CDBICompat/LiveObjectIndex.pm | |
39 | lib/DBIx/Class/CDBICompat/NoObjectIndex.pm | |
40 | lib/DBIx/Class/CDBICompat/Pager.pm | |
41 | lib/DBIx/Class/CDBICompat/ReadOnly.pm | |
42 | lib/DBIx/Class/CDBICompat/Relationship.pm | |
43 | lib/DBIx/Class/CDBICompat/Relationships.pm | |
44 | lib/DBIx/Class/CDBICompat/Retrieve.pm | |
45 | lib/DBIx/Class/CDBICompat/SQLTransformer.pm | |
46 | lib/DBIx/Class/CDBICompat/Stringify.pm | |
47 | lib/DBIx/Class/CDBICompat/TempColumns.pm | |
48 | lib/DBIx/Class/CDBICompat/Triggers.pm | |
49 | lib/DBIx/Class/ClassResolver/PassThrough.pm | |
50 | lib/DBIx/Class/Componentised.pm | |
51 | lib/DBIx/Class/Core.pm | |
52 | lib/DBIx/Class/Cursor.pm | |
53 | lib/DBIx/Class/DB.pm | |
54 | lib/DBIx/Class/Exception.pm | |
55 | lib/DBIx/Class/InflateColumn.pm | |
56 | lib/DBIx/Class/InflateColumn/DateTime.pm | |
57 | lib/DBIx/Class/InflateColumn/File.pm | |
58 | lib/DBIx/Class/Manual.pod | |
59 | lib/DBIx/Class/Manual/Component.pod | |
60 | lib/DBIx/Class/Manual/Cookbook.pod | |
61 | lib/DBIx/Class/Manual/DocMap.pod | |
62 | lib/DBIx/Class/Manual/Example.pod | |
63 | lib/DBIx/Class/Manual/FAQ.pod | |
64 | lib/DBIx/Class/Manual/Glossary.pod | |
65 | lib/DBIx/Class/Manual/Intro.pod | |
66 | lib/DBIx/Class/Manual/Joining.pod | |
67 | lib/DBIx/Class/Manual/Reading.pod | |
68 | lib/DBIx/Class/Manual/Troubleshooting.pod | |
69 | lib/DBIx/Class/Ordered.pm | |
70 | lib/DBIx/Class/PK.pm | |
71 | lib/DBIx/Class/PK/Auto.pm | |
72 | lib/DBIx/Class/PK/Auto/DB2.pm | |
73 | lib/DBIx/Class/PK/Auto/MSSQL.pm | |
74 | lib/DBIx/Class/PK/Auto/MySQL.pm | |
75 | lib/DBIx/Class/PK/Auto/Oracle.pm | |
76 | lib/DBIx/Class/PK/Auto/Pg.pm | |
77 | lib/DBIx/Class/PK/Auto/SQLite.pm | |
78 | lib/DBIx/Class/Relationship.pm | |
79 | lib/DBIx/Class/Relationship/Accessor.pm | |
80 | lib/DBIx/Class/Relationship/Base.pm | |
81 | lib/DBIx/Class/Relationship/BelongsTo.pm | |
82 | lib/DBIx/Class/Relationship/CascadeActions.pm | |
83 | lib/DBIx/Class/Relationship/HasMany.pm | |
84 | lib/DBIx/Class/Relationship/HasOne.pm | |
85 | lib/DBIx/Class/Relationship/Helpers.pm | |
86 | lib/DBIx/Class/Relationship/ManyToMany.pm | |
87 | lib/DBIx/Class/Relationship/ProxyMethods.pm | |
88 | lib/DBIx/Class/ResultClass/HashRefInflator.pm | |
89 | lib/DBIx/Class/ResultSet.pm | |
90 | lib/DBIx/Class/ResultSetColumn.pm | |
91 | lib/DBIx/Class/ResultSetManager.pm | |
92 | lib/DBIx/Class/ResultSetProxy.pm | |
93 | lib/DBIx/Class/ResultSource.pm | |
94 | lib/DBIx/Class/ResultSource/Table.pm | |
95 | lib/DBIx/Class/ResultSource/View.pm | |
96 | lib/DBIx/Class/ResultSourceHandle.pm | |
97 | lib/DBIx/Class/ResultSourceProxy.pm | |
98 | lib/DBIx/Class/ResultSourceProxy/Table.pm | |
99 | lib/DBIx/Class/Row.pm | |
100 | lib/DBIx/Class/Schema.pm | |
101 | lib/DBIx/Class/Schema/Versioned.pm | |
102 | lib/DBIx/Class/Serialize/Storable.pm | |
103 | lib/DBIx/Class/SQLAHacks.pm | |
104 | lib/DBIx/Class/SQLAHacks/MSSQL.pm | |
105 | lib/DBIx/Class/SQLAHacks/MySQL.pm | |
106 | lib/DBIx/Class/SQLAHacks/OracleJoins.pm | |
107 | lib/DBIx/Class/StartupCheck.pm | |
108 | lib/DBIx/Class/Storage.pm | |
109 | lib/DBIx/Class/Storage/DBI.pm | |
110 | lib/DBIx/Class/Storage/DBI/AmbiguousGlob.pm | |
111 | lib/DBIx/Class/Storage/DBI/AutoCast.pm | |
112 | lib/DBIx/Class/Storage/DBI/Cursor.pm | |
113 | lib/DBIx/Class/Storage/DBI/DB2.pm | |
114 | lib/DBIx/Class/Storage/DBI/MSSQL.pm | |
115 | lib/DBIx/Class/Storage/DBI/MultiColumnIn.pm | |
116 | lib/DBIx/Class/Storage/DBI/mysql.pm | |
117 | lib/DBIx/Class/Storage/DBI/NoBindVars.pm | |
118 | lib/DBIx/Class/Storage/DBI/ODBC.pm | |
119 | lib/DBIx/Class/Storage/DBI/ODBC/ACCESS.pm | |
120 | lib/DBIx/Class/Storage/DBI/ODBC/DB2_400_SQL.pm | |
121 | lib/DBIx/Class/Storage/DBI/ODBC/Microsoft_SQL_Server.pm | |
122 | lib/DBIx/Class/Storage/DBI/Oracle.pm | |
123 | lib/DBIx/Class/Storage/DBI/Oracle/Generic.pm | |
124 | lib/DBIx/Class/Storage/DBI/Oracle/WhereJoins.pm | |
125 | lib/DBIx/Class/Storage/DBI/Pg.pm | |
126 | lib/DBIx/Class/Storage/DBI/Replicated.pm | |
127 | lib/DBIx/Class/Storage/DBI/Replicated/Balancer.pm | |
128 | lib/DBIx/Class/Storage/DBI/Replicated/Balancer/First.pm | |
129 | lib/DBIx/Class/Storage/DBI/Replicated/Balancer/Random.pm | |
130 | lib/DBIx/Class/Storage/DBI/Replicated/Introduction.pod | |
131 | lib/DBIx/Class/Storage/DBI/Replicated/Pool.pm | |
132 | lib/DBIx/Class/Storage/DBI/Replicated/Replicant.pm | |
133 | lib/DBIx/Class/Storage/DBI/Replicated/Types.pm | |
134 | lib/DBIx/Class/Storage/DBI/Replicated/WithDSN.pm | |
135 | lib/DBIx/Class/Storage/DBI/Role/QueryCounter.pm | |
136 | lib/DBIx/Class/Storage/DBI/SQLite.pm | |
137 | lib/DBIx/Class/Storage/DBI/Sybase.pm | |
138 | lib/DBIx/Class/Storage/DBI/Sybase/Base.pm | |
139 | lib/DBIx/Class/Storage/DBI/Sybase/Microsoft_SQL_Server.pm | |
140 | lib/DBIx/Class/Storage/DBI/Sybase/Microsoft_SQL_Server/NoBindVars.pm | |
141 | lib/DBIx/Class/Storage/DBI/Sybase/MSSQL.pm | |
142 | lib/DBIx/Class/Storage/Statistics.pm | |
143 | lib/DBIx/Class/Storage/TxnScopeGuard.pm | |
144 | lib/DBIx/Class/UTF8Columns.pm | |
145 | lib/SQL/Translator/Parser/DBIx/Class.pm | |
146 | lib/SQL/Translator/Producer/DBIx/Class/File.pm | |
147 | Makefile.PL | |
148 | MANIFEST This list of files | |
149 | META.yml | |
150 | README | |
151 | script/dbicadmin | |
152 | t/02pod.t | |
153 | t/03podcoverage.t | |
154 | t/04dont_break_c3.t | |
155 | t/05components.t | |
156 | t/100extra_source.t | |
157 | t/100populate.t | |
158 | t/101populate_rs.t | |
159 | t/102load_classes.t | |
160 | t/103many_to_many_warning.t | |
161 | t/104view.t | |
162 | t/18insert_default.t | |
163 | t/19quotes.t | |
164 | t/19quotes_newstyle.t | |
165 | t/20setuperrors.t | |
166 | t/26dumper.t | |
167 | t/30dbicplain.t | |
168 | t/34exception_action.t | |
169 | t/39load_namespaces_1.t | |
170 | t/39load_namespaces_2.t | |
171 | t/39load_namespaces_3.t | |
172 | t/39load_namespaces_4.t | |
173 | t/39load_namespaces_exception.t | |
174 | t/39load_namespaces_rt41083.t | |
175 | t/40resultsetmanager.t | |
176 | t/41orrible.t | |
177 | t/42toplimit.t | |
178 | t/46where_attribute.t | |
179 | t/50fork.t | |
180 | t/51threads.t | |
181 | t/51threadtxn.t | |
182 | t/52cycle.t | |
183 | t/54taint.t | |
184 | t/55storage_stress.t | |
185 | t/60core.t | |
186 | t/61findnot.t | |
187 | t/63register_class.t | |
188 | t/63register_source.t | |
189 | t/64db.t | |
190 | t/65multipk.t | |
191 | t/67pager.t | |
192 | t/69update.t | |
193 | t/70auto.t | |
194 | t/71mysql.t | |
195 | t/72pg.t | |
196 | t/73oracle.t | |
197 | t/745db2.t | |
198 | t/746db2_400.t | |
199 | t/746mssql.t | |
200 | t/746sybase.t | |
201 | t/74mssql.t | |
202 | t/75limit.t | |
203 | t/76joins.t | |
204 | t/76select.t | |
205 | t/77join_count.t | |
206 | t/78self_referencial.t | |
207 | t/79aliasing.t | |
208 | t/80unique.t | |
209 | t/81transactions.t | |
210 | t/82cascade_copy.t | |
211 | t/83cache.t | |
212 | t/84serialize.t | |
213 | t/85utf8.t | |
214 | t/86might_have.t | |
215 | t/86sqlt.t | |
216 | t/87ordered.t | |
217 | t/88result_set_column.t | |
218 | t/89dbicadmin.t | |
219 | t/90ensure_class_loaded.t | |
220 | t/90join_torture.t | |
221 | t/91merge_attr.t | |
222 | t/93autocast.t | |
223 | t/93nobindvars.t | |
224 | t/93single_accessor_object.t | |
225 | t/94pk_mutation.t | |
226 | t/94versioning.t | |
227 | t/95sql_maker.t | |
228 | t/95sql_maker_quote.t | |
229 | t/96_is_deteministic_value.t | |
230 | t/97result_class.t | |
231 | t/98savepoints.t | |
232 | t/99dbic_sqlt_parser.t | |
233 | t/bind/attribute.t | |
234 | t/bind/bindtype_columns.t | |
235 | t/bind/order_by.t | |
236 | t/cdbi/01-columns.t | |
237 | t/cdbi/02-Film.t | |
238 | t/cdbi/03-subclassing.t | |
239 | t/cdbi/04-lazy.t | |
240 | t/cdbi/06-hasa.t | |
241 | t/cdbi/08-inheritcols.t | |
242 | t/cdbi/09-has_many.t | |
243 | t/cdbi/11-triggers.t | |
244 | t/cdbi/12-filter.t | |
245 | t/cdbi/13-constraint.t | |
246 | t/cdbi/14-might_have.t | |
247 | t/cdbi/15-accessor.t | |
248 | t/cdbi/16-reserved.t | |
249 | t/cdbi/18-has_a.t | |
250 | t/cdbi/19-set_sql.t | |
251 | t/cdbi/21-iterator.t | |
252 | t/cdbi/22-deflate_order.t | |
253 | t/cdbi/22-self_referential.t | |
254 | t/cdbi/23-cascade.t | |
255 | t/cdbi/24-meta_info.t | |
256 | t/cdbi/26-mutator.t | |
257 | t/cdbi/30-pager.t | |
258 | t/cdbi/68-inflate_has_a.t | |
259 | t/cdbi/98-failure.t | |
260 | t/cdbi/abstract/search_where.t | |
261 | t/cdbi/columns_as_hashes.t | |
262 | t/cdbi/columns_dont_override_custom_accessors.t | |
263 | t/cdbi/construct.t | |
264 | t/cdbi/copy.t | |
265 | t/cdbi/DeepAbstractSearch/01_search.t | |
266 | t/cdbi/early_column_heisenbug.t | |
267 | t/cdbi/has_many_loads_foreign_class.t | |
268 | t/cdbi/hasa_without_loading.t | |
269 | t/cdbi/max_min_value_of.t | |
270 | t/cdbi/mk_group_accessors.t | |
271 | t/cdbi/multi_column_set.t | |
272 | t/cdbi/object_cache.t | |
273 | t/cdbi/retrieve_from_sql_with_limit.t | |
274 | t/cdbi/set_to_undef.t | |
275 | t/cdbi/set_vs_DateTime.t | |
276 | t/cdbi/sweet/08pager.t | |
277 | t/cdbi/testlib/Actor.pm | |
278 | t/cdbi/testlib/ActorAlias.pm | |
279 | t/cdbi/testlib/Blurb.pm | |
280 | t/cdbi/testlib/CDBase.pm | |
281 | t/cdbi/testlib/DBIC/Test/SQLite.pm | |
282 | t/cdbi/testlib/Director.pm | |
283 | t/cdbi/testlib/Film.pm | |
284 | t/cdbi/testlib/Lazy.pm | |
285 | t/cdbi/testlib/Log.pm | |
286 | t/cdbi/testlib/MyBase.pm | |
287 | t/cdbi/testlib/MyFilm.pm | |
288 | t/cdbi/testlib/MyFoo.pm | |
289 | t/cdbi/testlib/MyStar.pm | |
290 | t/cdbi/testlib/MyStarLink.pm | |
291 | t/cdbi/testlib/MyStarLinkMCPK.pm | |
292 | t/cdbi/testlib/Order.pm | |
293 | t/cdbi/testlib/OtherFilm.pm | |
294 | t/cdbi/testlib/OtherThing.pm | |
295 | t/cdbi/testlib/Thing.pm | |
296 | t/count/count_rs.t | |
297 | t/count/distinct.t | |
298 | t/count/grouped_pager.t | |
299 | t/count/in_subquery.t | |
300 | t/count/joined.t | |
301 | t/count/prefetch.t | |
302 | t/delete/m2m.t | |
303 | t/delete/related.t | |
304 | t/discard_changes_in_DESTROY.t | |
305 | t/from_subquery.t | |
306 | t/inflate/core.t | |
307 | t/inflate/datetime.t | |
308 | t/inflate/datetime_determine_parser.t | |
309 | t/inflate/datetime_mssql.t | |
310 | t/inflate/datetime_mysql.t | |
311 | t/inflate/datetime_oracle.t | |
312 | t/inflate/datetime_pg.t | |
313 | t/inflate/file_column.t | |
314 | t/inflate/hri.t | |
315 | t/inflate/serialize.t | |
316 | t/lib/DBIC/DebugObj.pm | |
317 | t/lib/DBIC/SqlMakerTest.pm | |
318 | t/lib/DBICNSTest/Bogus/A.pm | |
319 | t/lib/DBICNSTest/Bogus/B.pm | |
320 | t/lib/DBICNSTest/Bogus/Bigos.pm | |
321 | t/lib/DBICNSTest/OtherRslt/D.pm | |
322 | t/lib/DBICNSTest/Result/A.pm | |
323 | t/lib/DBICNSTest/Result/B.pm | |
324 | t/lib/DBICNSTest/ResultSet/A.pm | |
325 | t/lib/DBICNSTest/ResultSet/C.pm | |
326 | t/lib/DBICNSTest/RSBase.pm | |
327 | t/lib/DBICNSTest/RSet/A.pm | |
328 | t/lib/DBICNSTest/RSet/C.pm | |
329 | t/lib/DBICNSTest/Rslt/A.pm | |
330 | t/lib/DBICNSTest/Rslt/B.pm | |
331 | t/lib/DBICNSTest/RtBug41083/ResultSet.pm | |
332 | t/lib/DBICNSTest/RtBug41083/ResultSet/Foo.pm | |
333 | t/lib/DBICNSTest/RtBug41083/ResultSet_A/A.pm | |
334 | t/lib/DBICNSTest/RtBug41083/Schema/Foo.pm | |
335 | t/lib/DBICNSTest/RtBug41083/Schema/Foo/Sub.pm | |
336 | t/lib/DBICNSTest/RtBug41083/Schema_A/A.pm | |
337 | t/lib/DBICNSTest/RtBug41083/Schema_A/A/Sub.pm | |
338 | t/lib/DBICTest.pm | |
339 | t/lib/DBICTest/AuthorCheck.pm | |
340 | t/lib/DBICTest/BaseResult.pm | |
341 | t/lib/DBICTest/BaseResultSet.pm | |
342 | t/lib/DBICTest/ErrorComponent.pm | |
343 | t/lib/DBICTest/FakeComponent.pm | |
344 | t/lib/DBICTest/ForeignComponent.pm | |
345 | t/lib/DBICTest/ForeignComponent/TestComp.pm | |
346 | t/lib/DBICTest/OptionalComponent.pm | |
347 | t/lib/DBICTest/Plain.pm | |
348 | t/lib/DBICTest/Plain/Test.pm | |
349 | t/lib/DBICTest/ResultSetManager.pm | |
350 | t/lib/DBICTest/ResultSetManager/Foo.pm | |
351 | t/lib/DBICTest/Schema.pm | |
352 | t/lib/DBICTest/Schema/Artist.pm | |
353 | t/lib/DBICTest/Schema/ArtistGUID.pm | |
354 | t/lib/DBICTest/Schema/ArtistSourceName.pm | |
355 | t/lib/DBICTest/Schema/ArtistSubclass.pm | |
356 | t/lib/DBICTest/Schema/ArtistUndirectedMap.pm | |
357 | t/lib/DBICTest/Schema/Artwork.pm | |
358 | t/lib/DBICTest/Schema/Artwork_to_Artist.pm | |
359 | t/lib/DBICTest/Schema/BindType.pm | |
360 | t/lib/DBICTest/Schema/Bookmark.pm | |
361 | t/lib/DBICTest/Schema/BooksInLibrary.pm | |
362 | t/lib/DBICTest/Schema/CD.pm | |
363 | t/lib/DBICTest/Schema/CD_to_Producer.pm | |
364 | t/lib/DBICTest/Schema/Collection.pm | |
365 | t/lib/DBICTest/Schema/CollectionObject.pm | |
366 | t/lib/DBICTest/Schema/CustomSql.pm | |
367 | t/lib/DBICTest/Schema/Dummy.pm | |
368 | t/lib/DBICTest/Schema/Employee.pm | |
369 | t/lib/DBICTest/Schema/Encoded.pm | |
370 | t/lib/DBICTest/Schema/Event.pm | |
371 | t/lib/DBICTest/Schema/EventTZ.pm | |
372 | t/lib/DBICTest/Schema/EventTZDeprecated.pm | |
373 | t/lib/DBICTest/Schema/EventTZPg.pm | |
374 | t/lib/DBICTest/Schema/FileColumn.pm | |
375 | t/lib/DBICTest/Schema/ForceForeign.pm | |
376 | t/lib/DBICTest/Schema/FourKeys.pm | |
377 | t/lib/DBICTest/Schema/FourKeys_to_TwoKeys.pm | |
378 | t/lib/DBICTest/Schema/Genre.pm | |
379 | t/lib/DBICTest/Schema/Image.pm | |
380 | t/lib/DBICTest/Schema/LinerNotes.pm | |
381 | t/lib/DBICTest/Schema/Link.pm | |
382 | t/lib/DBICTest/Schema/Lyrics.pm | |
383 | t/lib/DBICTest/Schema/LyricVersion.pm | |
384 | t/lib/DBICTest/Schema/Money.pm | |
385 | t/lib/DBICTest/Schema/NoPrimaryKey.pm | |
386 | t/lib/DBICTest/Schema/NoSuchClass.pm | |
387 | t/lib/DBICTest/Schema/OneKey.pm | |
388 | t/lib/DBICTest/Schema/Owners.pm | |
389 | t/lib/DBICTest/Schema/Producer.pm | |
390 | t/lib/DBICTest/Schema/SelfRef.pm | |
391 | t/lib/DBICTest/Schema/SelfRefAlias.pm | |
392 | t/lib/DBICTest/Schema/SequenceTest.pm | |
393 | t/lib/DBICTest/Schema/Serialized.pm | |
394 | t/lib/DBICTest/Schema/Tag.pm | |
395 | t/lib/DBICTest/Schema/Track.pm | |
396 | t/lib/DBICTest/Schema/TreeLike.pm | |
397 | t/lib/DBICTest/Schema/TwoKeys.pm | |
398 | t/lib/DBICTest/Schema/TwoKeyTreeLike.pm | |
399 | t/lib/DBICTest/Schema/TypedObject.pm | |
400 | t/lib/DBICTest/Schema/Year1999CDs.pm | |
401 | t/lib/DBICTest/Schema/Year2000CDs.pm | |
402 | t/lib/DBICTest/Stats.pm | |
403 | t/lib/DBICTest/SyntaxErrorComponent1.pm | |
404 | t/lib/DBICTest/SyntaxErrorComponent2.pm | |
405 | t/lib/DBICTest/SyntaxErrorComponent3.pm | |
406 | t/lib/DBICTest/Taint/Classes/Auto.pm | |
407 | t/lib/DBICTest/Taint/Classes/Manual.pm | |
408 | t/lib/DBICTest/Taint/Namespaces/Result/Test.pm | |
409 | t/lib/DBICVersionNew.pm | |
410 | t/lib/DBICVersionOrig.pm | |
411 | t/lib/sqlite.sql | |
412 | t/multi_create/cd_single.t | |
413 | t/multi_create/diamond.t | |
414 | t/multi_create/existing_in_chain.t | |
415 | t/multi_create/has_many.t | |
416 | t/multi_create/in_memory.t | |
417 | t/multi_create/insert_defaults.t | |
418 | t/multi_create/m2m.t | |
419 | t/multi_create/multilev_single_PKeqFK.t | |
420 | t/multi_create/reentrance_count.t | |
421 | t/multi_create/standard.t | |
422 | t/multi_create/torture.t | |
423 | t/ordered/cascade_delete.t | |
424 | t/prefetch/attrs_untouched.t | |
425 | t/prefetch/count.t | |
426 | t/prefetch/diamond.t | |
427 | t/prefetch/double_prefetch.t | |
428 | t/prefetch/grouped.t | |
429 | t/prefetch/incomplete.t | |
430 | t/prefetch/join_type.t | |
431 | t/prefetch/multiple_hasmany.t | |
432 | t/prefetch/standard.t | |
433 | t/prefetch/via_search_related.t | |
434 | t/prefetch/with_limit.t | |
435 | t/relationship/after_update.t | |
436 | t/relationship/core.t | |
437 | t/relationship/doesnt_exist.t | |
438 | t/relationship/update_or_create_multi.t | |
439 | t/relationship/update_or_create_single.t | |
440 | t/resultset/as_query.t | |
441 | t/resultset/update_delete.t | |
442 | t/resultset_class.t | |
443 | t/resultset_overload.t | |
444 | t/search/preserve_original_rs.t | |
445 | t/search/subquery.t | |
446 | t/storage/base.t | |
447 | t/storage/dbh_do.t | |
448 | t/storage/dbi_coderef.t | |
449 | t/storage/debug.t | |
450 | t/storage/disable_sth_caching.t | |
451 | t/storage/error.t | |
452 | t/storage/exception.t | |
453 | t/storage/on_connect_call.t | |
454 | t/storage/on_connect_do.t | |
455 | t/storage/ping_count.t | |
456 | t/storage/reconnect.t | |
457 | t/storage/replication.t | |
458 | t/storage/stats.t | |
459 | t/update/type_aware.t | |
460 | t/zzzzzzz_perl_perf_bug.t | |
461 | t/zzzzzzz_sqlite_deadlock.t |
0 | ^(?!script/|examples/|lib/|inc/|t/|Makefile.PL$|README$|MANIFEST$|Changes$|META.yml$) | |
1 | ||
2 | ||
3 | # Avoid version control files. | |
4 | \bRCS\b | |
5 | \bCVS\b | |
6 | ,v$ | |
7 | \B\.svn\b | |
8 | \B\.git\b | |
9 | \B\.gitignore\b | |
10 | \b_darcs\b | |
11 | ||
12 | # Avoid Makemaker generated and utility files. | |
13 | \bMakefile$ | |
14 | \bblib | |
15 | \bMakeMaker-\d | |
16 | \bpm_to_blib$ | |
17 | \bblibdirs$ | |
18 | ^MANIFEST\.SKIP$ | |
19 | ||
20 | # for developers only :) | |
21 | ^TODO$ | |
22 | ^Features_09$ | |
23 | ||
24 | # Avoid Module::Build generated and utility files. | |
25 | \bBuild$ | |
26 | \b_build | |
27 | ||
28 | # Avoid temp and backup files. | |
29 | ~$ | |
30 | \.tmp$ | |
31 | \.old$ | |
32 | \.bak$ | |
33 | \..*?\.sw[po]$ | |
34 | \#$ | |
35 | \b\.# | |
36 | ||
37 | # avoid OS X finder files | |
38 | \.DS_Store$ | |
39 | ||
40 | # Don't ship the test db | |
41 | ^t/var | |
42 | ||
43 | # Don't ship the last dist we built :) | |
44 | \.tar\.gz$ | |
45 | ||
46 | # Skip maint stuff | |
47 | ^maint/ | |
48 | ||
49 | # Avoid patch remnants | |
50 | \.orig$ | |
51 | \.rej$ | |
52 | ||
53 | # Dont use Module::Build anymore | |
54 | ^Build.PL$ |
0 | --- | |
1 | abstract: 'Extensible and flexible object <-> relational mapper.' | |
2 | author: | |
3 | - 'mst: Matt S. Trout <mst@shadowcatsystems.co.uk>' | |
4 | build_requires: | |
5 | ExtUtils::MakeMaker: 6.42 | |
6 | File::Temp: 0.22 | |
7 | Test::Builder: 0.33 | |
8 | Test::Deep: 0 | |
9 | Test::Exception: 0 | |
10 | Test::More: 0.92 | |
11 | Test::Warn: 0.21 | |
12 | configure_requires: | |
13 | ExtUtils::MakeMaker: 6.42 | |
14 | distribution_type: module | |
15 | generated_by: 'Module::Install version 0.91' | |
16 | license: perl | |
17 | meta-spec: | |
18 | url: http://module-build.sourceforge.net/META-spec-v1.4.html | |
19 | version: 1.4 | |
20 | name: DBIx-Class | |
21 | no_index: | |
22 | DBIx::Class::SQLAHacks: [] | |
23 | DBIx::Class::SQLAHacks::MSSQL: [] | |
24 | DBIx::Class::Storage::DBI::AmbiguousGlob: [] | |
25 | DBIx::Class::Storage::DBI::Sybase::Base: [] | |
26 | DBIx::Class::Storage::DBI::Sybase::Microsoft_SQL_Server: [] | |
27 | DBIx::Class::Storage::DBI::Sybase::Microsoft_SQL_Server::NoBindVars: [] | |
28 | directory: | |
29 | - examples | |
30 | - inc | |
31 | - t | |
32 | requires: | |
33 | Carp::Clan: 6.0 | |
34 | Class::Accessor::Grouped: 0.09000 | |
35 | Class::C3::Componentised: 1.0005 | |
36 | Class::Inspector: 1.24 | |
37 | DBD::SQLite: 1.25 | |
38 | DBI: 1.605 | |
39 | Data::Page: 2.00 | |
40 | JSON::Any: 1.18 | |
41 | List::Util: 0 | |
42 | MRO::Compat: 0.09 | |
43 | Module::Find: 0.06 | |
44 | Path::Class: 0.16 | |
45 | SQL::Abstract: 1.58 | |
46 | SQL::Abstract::Limit: 0.13 | |
47 | Scalar::Util: 0 | |
48 | Scope::Guard: 0.03 | |
49 | Storable: 0 | |
50 | Sub::Name: 0.04 | |
51 | perl: 5.6.1 | |
52 | resources: | |
53 | IRC: irc://irc.perl.org/#dbix-class | |
54 | MailingList: http://lists.scsys.co.uk/cgi-bin/mailman/listinfo/dbix-class | |
55 | license: http://dev.perl.org/licenses/ | |
56 | repository: http://dev.catalyst.perl.org/svnweb/bast/browse/DBIx-Class/ | |
57 | version: 0.08112 |
41 | 41 | requires 'Module::Find' => '0.06'; |
42 | 42 | requires 'Path::Class' => '0.16'; |
43 | 43 | requires 'Scope::Guard' => '0.03'; |
44 | requires 'SQL::Abstract' => '1.58'; | |
44 | requires 'SQL::Abstract' => '1.60'; | |
45 | 45 | requires 'SQL::Abstract::Limit' => '0.13'; |
46 | 46 | requires 'Sub::Name' => '0.04'; |
47 | requires 'Data::Dumper::Concise' => '1.000'; | |
47 | 48 | |
48 | 49 | my %replication_requires = ( |
49 | 50 | 'Moose', => '0.87', |
113 | 114 | 'DateTime::Format::Oracle' => '0', |
114 | 115 | ) : () |
115 | 116 | , |
117 | ||
118 | $ENV{DBICTEST_SYBASE_DSN} | |
119 | ? ( | |
120 | 'DateTime::Format::Sybase' => 0, | |
121 | ) : () | |
122 | , | |
116 | 123 | ); |
117 | 124 | #************************************************************************# |
118 | 125 | # Make ABSOLUTELY SURE that nothing on the list above is a real require, # |
131 | 138 | |
132 | 139 | resources 'IRC' => 'irc://irc.perl.org/#dbix-class'; |
133 | 140 | resources 'license' => 'http://dev.perl.org/licenses/'; |
134 | resources 'repository' => 'http://dev.catalyst.perl.org/svnweb/bast/browse/DBIx-Class/'; | |
141 | resources 'repository' => 'http://dev.catalyst.perl.org/repos/bast/DBIx-Class/'; | |
135 | 142 | resources 'MailingList' => 'http://lists.scsys.co.uk/cgi-bin/mailman/listinfo/dbix-class'; |
136 | 143 | |
137 | no_index 'DBIx::Class::Storage::DBI::Sybase::Base'; | |
144 | no_index 'DBIx::Class::Storage::DBI::Sybase::Common'; | |
138 | 145 | no_index 'DBIx::Class::SQLAHacks'; |
139 | 146 | no_index 'DBIx::Class::SQLAHacks::MSSQL'; |
140 | 147 | no_index 'DBIx::Class::Storage::DBI::AmbiguousGlob'; |
0 | NAME | |
1 | DBIx::Class - Extensible and flexible object <-> relational mapper. | |
2 | ||
3 | GETTING HELP/SUPPORT | |
4 | The community can be found via: | |
5 | ||
6 | Mailing list: http://lists.scsys.co.uk/mailman/listinfo/dbix-class/ | |
7 | ||
8 | SVN: http://dev.catalyst.perl.org/repos/bast/DBIx-Class/ | |
9 | ||
10 | SVNWeb: http://dev.catalyst.perl.org/svnweb/bast/browse/DBIx-Class/ | |
11 | ||
12 | IRC: irc.perl.org#dbix-class | |
13 | ||
14 | SYNOPSIS | |
15 | Create a schema class called MyDB/Schema.pm: | |
16 | ||
17 | package MyDB::Schema; | |
18 | use base qw/DBIx::Class::Schema/; | |
19 | ||
20 | __PACKAGE__->load_namespaces(); | |
21 | ||
22 | 1; | |
23 | ||
24 | Create a result class to represent artists, who have many CDs, in | |
25 | MyDB/Schema/Result/Artist.pm: | |
26 | ||
27 | See DBIx::Class::ResultSource for docs on defining result classes. | |
28 | ||
29 | package MyDB::Schema::Result::Artist; | |
30 | use base qw/DBIx::Class/; | |
31 | ||
32 | __PACKAGE__->load_components(qw/Core/); | |
33 | __PACKAGE__->table('artist'); | |
34 | __PACKAGE__->add_columns(qw/ artistid name /); | |
35 | __PACKAGE__->set_primary_key('artistid'); | |
36 | __PACKAGE__->has_many(cds => 'MyDB::Schema::Result::CD'); | |
37 | ||
38 | 1; | |
39 | ||
40 | A result class to represent a CD, which belongs to an artist, in | |
41 | MyDB/Schema/Result/CD.pm: | |
42 | ||
43 | package MyDB::Schema::Result::CD; | |
44 | use base qw/DBIx::Class/; | |
45 | ||
46 | __PACKAGE__->load_components(qw/Core/); | |
47 | __PACKAGE__->table('cd'); | |
48 | __PACKAGE__->add_columns(qw/ cdid artistid title year /); | |
49 | __PACKAGE__->set_primary_key('cdid'); | |
50 | __PACKAGE__->belongs_to(artist => 'MyDB::Schema::Artist', 'artistid'); | |
51 | ||
52 | 1; | |
53 | ||
54 | Then you can use these classes in your application's code: | |
55 | ||
56 | # Connect to your database. | |
57 | use MyDB::Schema; | |
58 | my $schema = MyDB::Schema->connect($dbi_dsn, $user, $pass, \%dbi_params); | |
59 | ||
60 | # Query for all artists and put them in an array, | |
61 | # or retrieve them as a result set object. | |
62 | # $schema->resultset returns a DBIx::Class::ResultSet | |
63 | my @all_artists = $schema->resultset('Artist')->all; | |
64 | my $all_artists_rs = $schema->resultset('Artist'); | |
65 | ||
66 | # Output all artists names | |
67 | # $artist here is a DBIx::Class::Row, which has accessors | |
68 | # for all its columns. Rows are also subclasses of your Result class. | |
69 | foreach $artist (@artists) { | |
70 | print $artist->name, "\n"; | |
71 | } | |
72 | ||
73 | # Create a result set to search for artists. | |
74 | # This does not query the DB. | |
75 | my $johns_rs = $schema->resultset('Artist')->search( | |
76 | # Build your WHERE using an SQL::Abstract structure: | |
77 | { name => { like => 'John%' } } | |
78 | ); | |
79 | ||
80 | # Execute a joined query to get the cds. | |
81 | my @all_john_cds = $johns_rs->search_related('cds')->all; | |
82 | ||
83 | # Fetch the next available row. | |
84 | my $first_john = $johns_rs->next; | |
85 | ||
86 | # Specify ORDER BY on the query. | |
87 | my $first_john_cds_by_title_rs = $first_john->cds( | |
88 | undef, | |
89 | { order_by => 'title' } | |
90 | ); | |
91 | ||
92 | # Create a result set that will fetch the artist data | |
93 | # at the same time as it fetches CDs, using only one query. | |
94 | my $millennium_cds_rs = $schema->resultset('CD')->search( | |
95 | { year => 2000 }, | |
96 | { prefetch => 'artist' } | |
97 | ); | |
98 | ||
99 | my $cd = $millennium_cds_rs->next; # SELECT ... FROM cds JOIN artists ... | |
100 | my $cd_artist_name = $cd->artist->name; # Already has the data so no 2nd query | |
101 | ||
102 | # new() makes a DBIx::Class::Row object but doesnt insert it into the DB. | |
103 | # create() is the same as new() then insert(). | |
104 | my $new_cd = $schema->resultset('CD')->new({ title => 'Spoon' }); | |
105 | $new_cd->artist($cd->artist); | |
106 | $new_cd->insert; # Auto-increment primary key filled in after INSERT | |
107 | $new_cd->title('Fork'); | |
108 | ||
109 | $schema->txn_do(sub { $new_cd->update }); # Runs the update in a transaction | |
110 | ||
111 | # change the year of all the millennium CDs at once | |
112 | $millennium_cds_rs->update({ year => 2002 }); | |
113 | ||
114 | DESCRIPTION | |
115 | This is an SQL to OO mapper with an object API inspired by Class::DBI | |
116 | (with a compatibility layer as a springboard for porting) and a | |
117 | resultset API that allows abstract encapsulation of database operations. | |
118 | It aims to make representing queries in your code as perl-ish as | |
119 | possible while still providing access to as many of the capabilities of | |
120 | the database as possible, including retrieving related records from | |
121 | multiple tables in a single query, JOIN, LEFT JOIN, COUNT, DISTINCT, | |
122 | GROUP BY, ORDER BY and HAVING support. | |
123 | ||
124 | DBIx::Class can handle multi-column primary and foreign keys, complex | |
125 | queries and database-level paging, and does its best to only query the | |
126 | database in order to return something you've directly asked for. If a | |
127 | resultset is used as an iterator it only fetches rows off the statement | |
128 | handle as requested in order to minimise memory usage. It has | |
129 | auto-increment support for SQLite, MySQL, PostgreSQL, Oracle, SQL Server | |
130 | and DB2 and is known to be used in production on at least the first | |
131 | four, and is fork- and thread-safe out of the box (although your DBD may | |
132 | not be). | |
133 | ||
134 | This project is still under rapid development, so large new features may | |
135 | be marked EXPERIMENTAL - such APIs are still usable but may have edge | |
136 | bugs. Failing test cases are *always* welcome and point releases are put | |
137 | out rapidly as bugs are found and fixed. | |
138 | ||
139 | We do our best to maintain full backwards compatibility for published | |
140 | APIs, since DBIx::Class is used in production in many organisations, and | |
141 | even backwards incompatible changes to non-published APIs will be fixed | |
142 | if they're reported and doing so doesn't cost the codebase anything. | |
143 | ||
144 | The test suite is quite substantial, and several developer releases are | |
145 | generally made to CPAN before the branch for the next release is merged | |
146 | back to trunk for a major release. | |
147 | ||
148 | WHERE TO GO NEXT | |
149 | DBIx::Class::Manual::DocMap lists each task you might want help on, and | |
150 | the modules where you will find documentation. | |
151 | ||
152 | AUTHOR | |
153 | mst: Matt S. Trout <mst@shadowcatsystems.co.uk> | |
154 | ||
155 | (I mostly consider myself "project founder" these days but the AUTHOR | |
156 | heading is traditional :) | |
157 | ||
158 | CONTRIBUTORS | |
159 | abraxxa: Alexander Hartmaier <alex_hartmaier@hotmail.com> | |
160 | ||
161 | aherzog: Adam Herzog <adam@herzogdesigns.com> | |
162 | ||
163 | andyg: Andy Grundman <andy@hybridized.org> | |
164 | ||
165 | ank: Andres Kievsky | |
166 | ||
167 | arcanez: Justin Hunter <justin.d.hunter@gmail.com> | |
168 | ||
169 | ash: Ash Berlin <ash@cpan.org> | |
170 | ||
171 | bert: Norbert Csongradi <bert@cpan.org> | |
172 | ||
173 | blblack: Brandon L. Black <blblack@gmail.com> | |
174 | ||
175 | bluefeet: Aran Deltac <bluefeet@cpan.org> | |
176 | ||
177 | bricas: Brian Cassidy <bricas@cpan.org> | |
178 | ||
179 | brunov: Bruno Vecchi <vecchi.b@gmail.com> | |
180 | ||
181 | caelum: Rafael Kitover <rkitover@cpan.org> | |
182 | ||
183 | castaway: Jess Robinson | |
184 | ||
185 | claco: Christopher H. Laco | |
186 | ||
187 | clkao: CL Kao | |
188 | ||
189 | da5id: David Jack Olrik <djo@cpan.org> | |
190 | ||
191 | debolaz: Anders Nor Berle <berle@cpan.org> | |
192 | ||
193 | dkubb: Dan Kubb <dan.kubb-cpan@onautopilot.com> | |
194 | ||
195 | dnm: Justin Wheeler <jwheeler@datademons.com> | |
196 | ||
197 | dwc: Daniel Westermann-Clark <danieltwc@cpan.org> | |
198 | ||
199 | dyfrgi: Michael Leuchtenburg <michael@slashhome.org> | |
200 | ||
201 | frew: Arthur Axel "fREW" Schmidt <frioux@gmail.com> | |
202 | ||
203 | gphat: Cory G Watson <gphat@cpan.org> | |
204 | ||
205 | groditi: Guillermo Roditi <groditi@cpan.org> | |
206 | ||
207 | ilmari: Dagfinn Ilmari Mannsåker <ilmari@ilmari.org> | |
208 | ||
209 | jasonmay: Jason May <jason.a.may@gmail.com> | |
210 | ||
211 | jesper: Jesper Krogh | |
212 | ||
213 | jgoulah: John Goulah <jgoulah@cpan.org> | |
214 | ||
215 | jguenther: Justin Guenther <jguenther@cpan.org> | |
216 | ||
217 | jnapiorkowski: John Napiorkowski <jjn1056@yahoo.com> | |
218 | ||
219 | jon: Jon Schutz <jjschutz@cpan.org> | |
220 | ||
221 | jshirley: J. Shirley <jshirley@gmail.com> | |
222 | ||
223 | konobi: Scott McWhirter | |
224 | ||
225 | lukes: Luke Saunders <luke.saunders@gmail.com> | |
226 | ||
227 | marcus: Marcus Ramberg <mramberg@cpan.org> | |
228 | ||
229 | mattlaw: Matt Lawrence | |
230 | ||
231 | michaelr: Michael Reddick <michael.reddick@gmail.com> | |
232 | ||
233 | ned: Neil de Carteret | |
234 | ||
235 | nigel: Nigel Metheringham <nigelm@cpan.org> | |
236 | ||
237 | ningu: David Kamholz <dkamholz@cpan.org> | |
238 | ||
239 | Nniuq: Ron "Quinn" Straight" <quinnfazigu@gmail.org> | |
240 | ||
241 | norbi: Norbert Buchmuller <norbi@nix.hu> | |
242 | ||
243 | Numa: Dan Sully <daniel@cpan.org> | |
244 | ||
245 | oyse: Øystein Torget <oystein.torget@dnv.com> | |
246 | ||
247 | paulm: Paul Makepeace | |
248 | ||
249 | penguin: K J Cheetham | |
250 | ||
251 | perigrin: Chris Prather <chris@prather.org> | |
252 | ||
253 | peter: Peter Collingbourne <peter@pcc.me.uk> | |
254 | ||
255 | phaylon: Robert Sedlacek <phaylon@dunkelheit.at> | |
256 | ||
257 | plu: Johannes Plunien <plu@cpan.org> | |
258 | ||
259 | quicksilver: Jules Bean | |
260 | ||
261 | rafl: Florian Ragwitz <rafl@debian.org> | |
262 | ||
263 | rbuels: Robert Buels <rmb32@cornell.edu> | |
264 | ||
265 | rdj: Ryan D Johnson <ryan@innerfence.com> | |
266 | ||
267 | ribasushi: Peter Rabbitson <rabbit+dbic@rabbit.us> | |
268 | ||
269 | rjbs: Ricardo Signes <rjbs@cpan.org> | |
270 | ||
271 | robkinyon: Rob Kinyon <rkinyon@cpan.org> | |
272 | ||
273 | sc_: Just Another Perl Hacker | |
274 | ||
275 | scotty: Scotty Allen <scotty@scottyallen.com> | |
276 | ||
277 | semifor: Marc Mims <marc@questright.com> | |
278 | ||
279 | solomon: Jared Johnson <jaredj@nmgi.com> | |
280 | ||
281 | spb: Stephen Bennett <stephen@freenode.net> | |
282 | ||
283 | sszabo: Stephan Szabo <sszabo@bigpanda.com> | |
284 | ||
285 | teejay : Aaron Trevena <teejay@cpan.org> | |
286 | ||
287 | Todd Lipcon | |
288 | ||
289 | Tom Hukins | |
290 | ||
291 | typester: Daisuke Murase <typester@cpan.org> | |
292 | ||
293 | victori: Victor Igumnov <victori@cpan.org> | |
294 | ||
295 | wdh: Will Hawes | |
296 | ||
297 | willert: Sebastian Willert <willert@cpan.org> | |
298 | ||
299 | wreis: Wallace Reis <wreis@cpan.org> | |
300 | ||
301 | zamolxes: Bogdan Lucaciu <bogdan@wiz.ro> | |
302 | ||
303 | COPYRIGHT | |
304 | Copyright (c) 2005 - 2009 the DBIx::Class "AUTHOR" and "CONTRIBUTORS" as | |
305 | listed above. | |
306 | ||
307 | LICENSE | |
308 | This library is free software and may be distributed under the same | |
309 | terms as perl itself. | |
310 |
0 | 2005-04-16 by mst | |
1 | - set_from_related should take undef | |
2 | - ResultSource objects caching ->resultset causes interesting problems | |
3 | - find why XSUB dumper kills schema in Catalyst (may be Pg only?) | |
4 | ||
5 | 2006-03-25 by mst | |
6 | - find a way to un-wantarray search without breaking compat | |
7 | - delay relationship setup if done via ->load_classes | |
8 | - double-sided relationships | |
9 | - make short form of class specifier in relationships work | |
10 | ||
11 | 2006-01-31 by bluefeet | |
12 | - Create a DBIx::Class::FilterColumn to replace inflate/deflate. This | |
13 | component would provide a new syntax for filtering column update and | |
14 | retrieval through a simple syntax. The syntax would be: | |
15 | __PACKAGE__->add_columns(phone => { set=>sub{ ... }, get=>sub{ ... } }); | |
16 | We should still support the old inflate/deflate syntax, but this new | |
17 | way should be recommended. | |
18 | ||
19 | 2006-03-18 by bluefeet | |
20 | - Support table locking. | |
21 | ||
22 | 2006-03-21 by bluefeet | |
23 | - When subclassing a dbic class make it so you don't have to do | |
24 | __PACKAGE__->table(__PACKAGE__->table()); for the result set to | |
25 | return the correct object type. | |
26 | ||
27 | 2006-05-25 by mst (TODOed by bluefeet) | |
28 | Add the search attributes "limit" and "rows_per_page". | |
29 | limit: work as expected just like offset does | |
30 | rows_per_page: only be used if you used the page attr or called $rs->page | |
31 | rows: modify to be an alias that gets used to populate either as appropriate, | |
32 | if you haven't specified one of the others | |
33 | ||
34 | 2008-10-30 by ribasushi | |
35 | - Rewrite the test suite to rely on $schema->deploy, allowing for seamless | |
36 | testing of various RDBMS using the same tests | |
37 | - Automatically infer quote_char/name_sep from $schema->storage | |
38 | - Fix and properly test chained search attribute merging | |
39 | - Recursive update() (all code seems to be already available) |
0 | #line 1 | |
1 | package Module::AutoInstall; | |
2 | ||
3 | use strict; | |
4 | use Cwd (); | |
5 | use ExtUtils::MakeMaker (); | |
6 | ||
7 | use vars qw{$VERSION}; | |
8 | BEGIN { | |
9 | $VERSION = '1.03'; | |
10 | } | |
11 | ||
12 | # special map on pre-defined feature sets | |
13 | my %FeatureMap = ( | |
14 | '' => 'Core Features', # XXX: deprecated | |
15 | '-core' => 'Core Features', | |
16 | ); | |
17 | ||
18 | # various lexical flags | |
19 | my ( @Missing, @Existing, %DisabledTests, $UnderCPAN, $HasCPANPLUS ); | |
20 | my ( | |
21 | $Config, $CheckOnly, $SkipInstall, $AcceptDefault, $TestOnly, $AllDeps | |
22 | ); | |
23 | my ( $PostambleActions, $PostambleUsed ); | |
24 | ||
25 | # See if it's a testing or non-interactive session | |
26 | _accept_default( $ENV{AUTOMATED_TESTING} or ! -t STDIN ); | |
27 | _init(); | |
28 | ||
29 | sub _accept_default { | |
30 | $AcceptDefault = shift; | |
31 | } | |
32 | ||
33 | sub missing_modules { | |
34 | return @Missing; | |
35 | } | |
36 | ||
37 | sub do_install { | |
38 | __PACKAGE__->install( | |
39 | [ | |
40 | $Config | |
41 | ? ( UNIVERSAL::isa( $Config, 'HASH' ) ? %{$Config} : @{$Config} ) | |
42 | : () | |
43 | ], | |
44 | @Missing, | |
45 | ); | |
46 | } | |
47 | ||
48 | # initialize various flags, and/or perform install | |
49 | sub _init { | |
50 | foreach my $arg ( | |
51 | @ARGV, | |
52 | split( | |
53 | /[\s\t]+/, | |
54 | $ENV{PERL_AUTOINSTALL} || $ENV{PERL_EXTUTILS_AUTOINSTALL} || '' | |
55 | ) | |
56 | ) | |
57 | { | |
58 | if ( $arg =~ /^--config=(.*)$/ ) { | |
59 | $Config = [ split( ',', $1 ) ]; | |
60 | } | |
61 | elsif ( $arg =~ /^--installdeps=(.*)$/ ) { | |
62 | __PACKAGE__->install( $Config, @Missing = split( /,/, $1 ) ); | |
63 | exit 0; | |
64 | } | |
65 | elsif ( $arg =~ /^--default(?:deps)?$/ ) { | |
66 | $AcceptDefault = 1; | |
67 | } | |
68 | elsif ( $arg =~ /^--check(?:deps)?$/ ) { | |
69 | $CheckOnly = 1; | |
70 | } | |
71 | elsif ( $arg =~ /^--skip(?:deps)?$/ ) { | |
72 | $SkipInstall = 1; | |
73 | } | |
74 | elsif ( $arg =~ /^--test(?:only)?$/ ) { | |
75 | $TestOnly = 1; | |
76 | } | |
77 | elsif ( $arg =~ /^--all(?:deps)?$/ ) { | |
78 | $AllDeps = 1; | |
79 | } | |
80 | } | |
81 | } | |
82 | ||
83 | # overrides MakeMaker's prompt() to automatically accept the default choice | |
84 | sub _prompt { | |
85 | goto &ExtUtils::MakeMaker::prompt unless $AcceptDefault; | |
86 | ||
87 | my ( $prompt, $default ) = @_; | |
88 | my $y = ( $default =~ /^[Yy]/ ); | |
89 | ||
90 | print $prompt, ' [', ( $y ? 'Y' : 'y' ), '/', ( $y ? 'n' : 'N' ), '] '; | |
91 | print "$default\n"; | |
92 | return $default; | |
93 | } | |
94 | ||
95 | # the workhorse | |
96 | sub import { | |
97 | my $class = shift; | |
98 | my @args = @_ or return; | |
99 | my $core_all; | |
100 | ||
101 | print "*** $class version " . $class->VERSION . "\n"; | |
102 | print "*** Checking for Perl dependencies...\n"; | |
103 | ||
104 | my $cwd = Cwd::cwd(); | |
105 | ||
106 | $Config = []; | |
107 | ||
108 | my $maxlen = length( | |
109 | ( | |
110 | sort { length($b) <=> length($a) } | |
111 | grep { /^[^\-]/ } | |
112 | map { | |
113 | ref($_) | |
114 | ? ( ( ref($_) eq 'HASH' ) ? keys(%$_) : @{$_} ) | |
115 | : '' | |
116 | } | |
117 | map { +{@args}->{$_} } | |
118 | grep { /^[^\-]/ or /^-core$/i } keys %{ +{@args} } | |
119 | )[0] | |
120 | ); | |
121 | ||
122 | # We want to know if we're under CPAN early to avoid prompting, but | |
123 | # if we aren't going to try and install anything anyway then skip the | |
124 | # check entirely since we don't want to have to load (and configure) | |
125 | # an old CPAN just for a cosmetic message | |
126 | ||
127 | $UnderCPAN = _check_lock(1) unless $SkipInstall; | |
128 | ||
129 | while ( my ( $feature, $modules ) = splice( @args, 0, 2 ) ) { | |
130 | my ( @required, @tests, @skiptests ); | |
131 | my $default = 1; | |
132 | my $conflict = 0; | |
133 | ||
134 | if ( $feature =~ m/^-(\w+)$/ ) { | |
135 | my $option = lc($1); | |
136 | ||
137 | # check for a newer version of myself | |
138 | _update_to( $modules, @_ ) and return if $option eq 'version'; | |
139 | ||
140 | # sets CPAN configuration options | |
141 | $Config = $modules if $option eq 'config'; | |
142 | ||
143 | # promote every features to core status | |
144 | $core_all = ( $modules =~ /^all$/i ) and next | |
145 | if $option eq 'core'; | |
146 | ||
147 | next unless $option eq 'core'; | |
148 | } | |
149 | ||
150 | print "[" . ( $FeatureMap{ lc($feature) } || $feature ) . "]\n"; | |
151 | ||
152 | $modules = [ %{$modules} ] if UNIVERSAL::isa( $modules, 'HASH' ); | |
153 | ||
154 | unshift @$modules, -default => &{ shift(@$modules) } | |
155 | if ( ref( $modules->[0] ) eq 'CODE' ); # XXX: bugward combatability | |
156 | ||
157 | while ( my ( $mod, $arg ) = splice( @$modules, 0, 2 ) ) { | |
158 | if ( $mod =~ m/^-(\w+)$/ ) { | |
159 | my $option = lc($1); | |
160 | ||
161 | $default = $arg if ( $option eq 'default' ); | |
162 | $conflict = $arg if ( $option eq 'conflict' ); | |
163 | @tests = @{$arg} if ( $option eq 'tests' ); | |
164 | @skiptests = @{$arg} if ( $option eq 'skiptests' ); | |
165 | ||
166 | next; | |
167 | } | |
168 | ||
169 | printf( "- %-${maxlen}s ...", $mod ); | |
170 | ||
171 | if ( $arg and $arg =~ /^\D/ ) { | |
172 | unshift @$modules, $arg; | |
173 | $arg = 0; | |
174 | } | |
175 | ||
176 | # XXX: check for conflicts and uninstalls(!) them. | |
177 | my $cur = _load($mod); | |
178 | if (_version_cmp ($cur, $arg) >= 0) | |
179 | { | |
180 | print "loaded. ($cur" . ( $arg ? " >= $arg" : '' ) . ")\n"; | |
181 | push @Existing, $mod => $arg; | |
182 | $DisabledTests{$_} = 1 for map { glob($_) } @skiptests; | |
183 | } | |
184 | else { | |
185 | if (not defined $cur) # indeed missing | |
186 | { | |
187 | print "missing." . ( $arg ? " (would need $arg)" : '' ) . "\n"; | |
188 | } | |
189 | else | |
190 | { | |
191 | # no need to check $arg as _version_cmp ($cur, undef) would satisfy >= above | |
192 | print "too old. ($cur < $arg)\n"; | |
193 | } | |
194 | ||
195 | push @required, $mod => $arg; | |
196 | } | |
197 | } | |
198 | ||
199 | next unless @required; | |
200 | ||
201 | my $mandatory = ( $feature eq '-core' or $core_all ); | |
202 | ||
203 | if ( | |
204 | !$SkipInstall | |
205 | and ( | |
206 | $CheckOnly | |
207 | or ($mandatory and $UnderCPAN) | |
208 | or $AllDeps | |
209 | or _prompt( | |
210 | qq{==> Auto-install the } | |
211 | . ( @required / 2 ) | |
212 | . ( $mandatory ? ' mandatory' : ' optional' ) | |
213 | . qq{ module(s) from CPAN?}, | |
214 | $default ? 'y' : 'n', | |
215 | ) =~ /^[Yy]/ | |
216 | ) | |
217 | ) | |
218 | { | |
219 | push( @Missing, @required ); | |
220 | $DisabledTests{$_} = 1 for map { glob($_) } @skiptests; | |
221 | } | |
222 | ||
223 | elsif ( !$SkipInstall | |
224 | and $default | |
225 | and $mandatory | |
226 | and | |
227 | _prompt( qq{==> The module(s) are mandatory! Really skip?}, 'n', ) | |
228 | =~ /^[Nn]/ ) | |
229 | { | |
230 | push( @Missing, @required ); | |
231 | $DisabledTests{$_} = 1 for map { glob($_) } @skiptests; | |
232 | } | |
233 | ||
234 | else { | |
235 | $DisabledTests{$_} = 1 for map { glob($_) } @tests; | |
236 | } | |
237 | } | |
238 | ||
239 | if ( @Missing and not( $CheckOnly or $UnderCPAN ) ) { | |
240 | require Config; | |
241 | ||
242 | "*** Dependencies will be installed the next time you type '$Config::Config{make}'.\n"; | |
243 | ||
244 | # make an educated guess of whether we'll need root permission. | |
245 | print " (You may need to do that as the 'root' user.)\n" | |
246 | if eval '$>'; | |
247 | } | |
248 | print "*** $class configuration finished.\n"; | |
249 | ||
250 | chdir $cwd; | |
251 | ||
252 | # import to main:: | |
253 | no strict 'refs'; | |
254 | *{'main::WriteMakefile'} = \&Write if caller(0) eq 'main'; | |
255 | } | |
256 | ||
257 | sub _running_under { | |
258 | my $thing = shift; | |
259 | print <<"END_MESSAGE"; | |
260 | *** Since we're running under ${thing}, I'll just let it take care | |
261 | of the dependency's installation later. | |
262 | END_MESSAGE | |
263 | return 1; | |
264 | } | |
265 | ||
266 | # Check to see if we are currently running under CPAN.pm and/or CPANPLUS; | |
267 | # if we are, then we simply let it taking care of our dependencies | |
268 | sub _check_lock { | |
269 | return unless @Missing or @_; | |
270 | ||
271 | my $cpan_env = $ENV{PERL5_CPAN_IS_RUNNING}; | |
272 | ||
273 | if ($ENV{PERL5_CPANPLUS_IS_RUNNING}) { | |
274 | return _running_under($cpan_env ? 'CPAN' : 'CPANPLUS'); | |
275 | } | |
276 | ||
277 | require CPAN; | |
278 | ||
279 | if ($CPAN::VERSION > '1.89') { | |
280 | if ($cpan_env) { | |
281 | return _running_under('CPAN'); | |
282 | } | |
283 | return; # CPAN.pm new enough, don't need to check further | |
284 | } | |
285 | ||
286 | # last ditch attempt, this -will- configure CPAN, very sorry | |
287 | ||
288 | _load_cpan(1); # force initialize even though it's already loaded | |
289 | ||
290 | # Find the CPAN lock-file | |
291 | my $lock = MM->catfile( $CPAN::Config->{cpan_home}, ".lock" ); | |
292 | return unless -f $lock; | |
293 | ||
294 | # Check the lock | |
295 | local *LOCK; | |
296 | return unless open(LOCK, $lock); | |
297 | ||
298 | if ( | |
299 | ( $^O eq 'MSWin32' ? _under_cpan() : <LOCK> == getppid() ) | |
300 | and ( $CPAN::Config->{prerequisites_policy} || '' ) ne 'ignore' | |
301 | ) { | |
302 | print <<'END_MESSAGE'; | |
303 | ||
304 | *** Since we're running under CPAN, I'll just let it take care | |
305 | of the dependency's installation later. | |
306 | END_MESSAGE | |
307 | return 1; | |
308 | } | |
309 | ||
310 | close LOCK; | |
311 | return; | |
312 | } | |
313 | ||
314 | sub install { | |
315 | my $class = shift; | |
316 | ||
317 | my $i; # used below to strip leading '-' from config keys | |
318 | my @config = ( map { s/^-// if ++$i; $_ } @{ +shift } ); | |
319 | ||
320 | my ( @modules, @installed ); | |
321 | while ( my ( $pkg, $ver ) = splice( @_, 0, 2 ) ) { | |
322 | ||
323 | # grep out those already installed | |
324 | if ( _version_cmp( _load($pkg), $ver ) >= 0 ) { | |
325 | push @installed, $pkg; | |
326 | } | |
327 | else { | |
328 | push @modules, $pkg, $ver; | |
329 | } | |
330 | } | |
331 | ||
332 | return @installed unless @modules; # nothing to do | |
333 | return @installed if _check_lock(); # defer to the CPAN shell | |
334 | ||
335 | print "*** Installing dependencies...\n"; | |
336 | ||
337 | return unless _connected_to('cpan.org'); | |
338 | ||
339 | my %args = @config; | |
340 | my %failed; | |
341 | local *FAILED; | |
342 | if ( $args{do_once} and open( FAILED, '.#autoinstall.failed' ) ) { | |
343 | while (<FAILED>) { chomp; $failed{$_}++ } | |
344 | close FAILED; | |
345 | ||
346 | my @newmod; | |
347 | while ( my ( $k, $v ) = splice( @modules, 0, 2 ) ) { | |
348 | push @newmod, ( $k => $v ) unless $failed{$k}; | |
349 | } | |
350 | @modules = @newmod; | |
351 | } | |
352 | ||
353 | if ( _has_cpanplus() and not $ENV{PERL_AUTOINSTALL_PREFER_CPAN} ) { | |
354 | _install_cpanplus( \@modules, \@config ); | |
355 | } else { | |
356 | _install_cpan( \@modules, \@config ); | |
357 | } | |
358 | ||
359 | print "*** $class installation finished.\n"; | |
360 | ||
361 | # see if we have successfully installed them | |
362 | while ( my ( $pkg, $ver ) = splice( @modules, 0, 2 ) ) { | |
363 | if ( _version_cmp( _load($pkg), $ver ) >= 0 ) { | |
364 | push @installed, $pkg; | |
365 | } | |
366 | elsif ( $args{do_once} and open( FAILED, '>> .#autoinstall.failed' ) ) { | |
367 | print FAILED "$pkg\n"; | |
368 | } | |
369 | } | |
370 | ||
371 | close FAILED if $args{do_once}; | |
372 | ||
373 | return @installed; | |
374 | } | |
375 | ||
376 | sub _install_cpanplus { | |
377 | my @modules = @{ +shift }; | |
378 | my @config = _cpanplus_config( @{ +shift } ); | |
379 | my $installed = 0; | |
380 | ||
381 | require CPANPLUS::Backend; | |
382 | my $cp = CPANPLUS::Backend->new; | |
383 | my $conf = $cp->configure_object; | |
384 | ||
385 | return unless $conf->can('conf') # 0.05x+ with "sudo" support | |
386 | or _can_write($conf->_get_build('base')); # 0.04x | |
387 | ||
388 | # if we're root, set UNINST=1 to avoid trouble unless user asked for it. | |
389 | my $makeflags = $conf->get_conf('makeflags') || ''; | |
390 | if ( UNIVERSAL::isa( $makeflags, 'HASH' ) ) { | |
391 | # 0.03+ uses a hashref here | |
392 | $makeflags->{UNINST} = 1 unless exists $makeflags->{UNINST}; | |
393 | ||
394 | } else { | |
395 | # 0.02 and below uses a scalar | |
396 | $makeflags = join( ' ', split( ' ', $makeflags ), 'UNINST=1' ) | |
397 | if ( $makeflags !~ /\bUNINST\b/ and eval qq{ $> eq '0' } ); | |
398 | ||
399 | } | |
400 | $conf->set_conf( makeflags => $makeflags ); | |
401 | $conf->set_conf( prereqs => 1 ); | |
402 | ||
403 | ||
404 | ||
405 | while ( my ( $key, $val ) = splice( @config, 0, 2 ) ) { | |
406 | $conf->set_conf( $key, $val ); | |
407 | } | |
408 | ||
409 | my $modtree = $cp->module_tree; | |
410 | while ( my ( $pkg, $ver ) = splice( @modules, 0, 2 ) ) { | |
411 | print "*** Installing $pkg...\n"; | |
412 | ||
413 | MY::preinstall( $pkg, $ver ) or next if defined &MY::preinstall; | |
414 | ||
415 | my $success; | |
416 | my $obj = $modtree->{$pkg}; | |
417 | ||
418 | if ( $obj and _version_cmp( $obj->{version}, $ver ) >= 0 ) { | |
419 | my $pathname = $pkg; | |
420 | $pathname =~ s/::/\\W/; | |
421 | ||
422 | foreach my $inc ( grep { m/$pathname.pm/i } keys(%INC) ) { | |
423 | delete $INC{$inc}; | |
424 | } | |
425 | ||
426 | my $rv = $cp->install( modules => [ $obj->{module} ] ); | |
427 | ||
428 | if ( $rv and ( $rv->{ $obj->{module} } or $rv->{ok} ) ) { | |
429 | print "*** $pkg successfully installed.\n"; | |
430 | $success = 1; | |
431 | } else { | |
432 | print "*** $pkg installation cancelled.\n"; | |
433 | $success = 0; | |
434 | } | |
435 | ||
436 | $installed += $success; | |
437 | } else { | |
438 | print << "."; | |
439 | *** Could not find a version $ver or above for $pkg; skipping. | |
440 | . | |
441 | } | |
442 | ||
443 | MY::postinstall( $pkg, $ver, $success ) if defined &MY::postinstall; | |
444 | } | |
445 | ||
446 | return $installed; | |
447 | } | |
448 | ||
449 | sub _cpanplus_config { | |
450 | my @config = (); | |
451 | while ( @_ ) { | |
452 | my ($key, $value) = (shift(), shift()); | |
453 | if ( $key eq 'prerequisites_policy' ) { | |
454 | if ( $value eq 'follow' ) { | |
455 | $value = CPANPLUS::Internals::Constants::PREREQ_INSTALL(); | |
456 | } elsif ( $value eq 'ask' ) { | |
457 | $value = CPANPLUS::Internals::Constants::PREREQ_ASK(); | |
458 | } elsif ( $value eq 'ignore' ) { | |
459 | $value = CPANPLUS::Internals::Constants::PREREQ_IGNORE(); | |
460 | } else { | |
461 | die "*** Cannot convert option $key = '$value' to CPANPLUS version.\n"; | |
462 | } | |
463 | } else { | |
464 | die "*** Cannot convert option $key to CPANPLUS version.\n"; | |
465 | } | |
466 | } | |
467 | return @config; | |
468 | } | |
469 | ||
470 | sub _install_cpan { | |
471 | my @modules = @{ +shift }; | |
472 | my @config = @{ +shift }; | |
473 | my $installed = 0; | |
474 | my %args; | |
475 | ||
476 | _load_cpan(); | |
477 | require Config; | |
478 | ||
479 | if (CPAN->VERSION < 1.80) { | |
480 | # no "sudo" support, probe for writableness | |
481 | return unless _can_write( MM->catfile( $CPAN::Config->{cpan_home}, 'sources' ) ) | |
482 | and _can_write( $Config::Config{sitelib} ); | |
483 | } | |
484 | ||
485 | # if we're root, set UNINST=1 to avoid trouble unless user asked for it. | |
486 | my $makeflags = $CPAN::Config->{make_install_arg} || ''; | |
487 | $CPAN::Config->{make_install_arg} = | |
488 | join( ' ', split( ' ', $makeflags ), 'UNINST=1' ) | |
489 | if ( $makeflags !~ /\bUNINST\b/ and eval qq{ $> eq '0' } ); | |
490 | ||
491 | # don't show start-up info | |
492 | $CPAN::Config->{inhibit_startup_message} = 1; | |
493 | ||
494 | # set additional options | |
495 | while ( my ( $opt, $arg ) = splice( @config, 0, 2 ) ) { | |
496 | ( $args{$opt} = $arg, next ) | |
497 | if $opt =~ /^force$/; # pseudo-option | |
498 | $CPAN::Config->{$opt} = $arg; | |
499 | } | |
500 | ||
501 | local $CPAN::Config->{prerequisites_policy} = 'follow'; | |
502 | ||
503 | while ( my ( $pkg, $ver ) = splice( @modules, 0, 2 ) ) { | |
504 | MY::preinstall( $pkg, $ver ) or next if defined &MY::preinstall; | |
505 | ||
506 | print "*** Installing $pkg...\n"; | |
507 | ||
508 | my $obj = CPAN::Shell->expand( Module => $pkg ); | |
509 | my $success = 0; | |
510 | ||
511 | if ( $obj and _version_cmp( $obj->cpan_version, $ver ) >= 0 ) { | |
512 | my $pathname = $pkg; | |
513 | $pathname =~ s/::/\\W/; | |
514 | ||
515 | foreach my $inc ( grep { m/$pathname.pm/i } keys(%INC) ) { | |
516 | delete $INC{$inc}; | |
517 | } | |
518 | ||
519 | my $rv = $args{force} ? CPAN::Shell->force( install => $pkg ) | |
520 | : CPAN::Shell->install($pkg); | |
521 | $rv ||= eval { | |
522 | $CPAN::META->instance( 'CPAN::Distribution', $obj->cpan_file, ) | |
523 | ->{install} | |
524 | if $CPAN::META; | |
525 | }; | |
526 | ||
527 | if ( $rv eq 'YES' ) { | |
528 | print "*** $pkg successfully installed.\n"; | |
529 | $success = 1; | |
530 | } | |
531 | else { | |
532 | print "*** $pkg installation failed.\n"; | |
533 | $success = 0; | |
534 | } | |
535 | ||
536 | $installed += $success; | |
537 | } | |
538 | else { | |
539 | print << "."; | |
540 | *** Could not find a version $ver or above for $pkg; skipping. | |
541 | . | |
542 | } | |
543 | ||
544 | MY::postinstall( $pkg, $ver, $success ) if defined &MY::postinstall; | |
545 | } | |
546 | ||
547 | return $installed; | |
548 | } | |
549 | ||
550 | sub _has_cpanplus { | |
551 | return ( | |
552 | $HasCPANPLUS = ( | |
553 | $INC{'CPANPLUS/Config.pm'} | |
554 | or _load('CPANPLUS::Shell::Default') | |
555 | ) | |
556 | ); | |
557 | } | |
558 | ||
559 | # make guesses on whether we're under the CPAN installation directory | |
560 | sub _under_cpan { | |
561 | require Cwd; | |
562 | require File::Spec; | |
563 | ||
564 | my $cwd = File::Spec->canonpath( Cwd::cwd() ); | |
565 | my $cpan = File::Spec->canonpath( $CPAN::Config->{cpan_home} ); | |
566 | ||
567 | return ( index( $cwd, $cpan ) > -1 ); | |
568 | } | |
569 | ||
570 | sub _update_to { | |
571 | my $class = __PACKAGE__; | |
572 | my $ver = shift; | |
573 | ||
574 | return | |
575 | if _version_cmp( _load($class), $ver ) >= 0; # no need to upgrade | |
576 | ||
577 | if ( | |
578 | _prompt( "==> A newer version of $class ($ver) is required. Install?", | |
579 | 'y' ) =~ /^[Nn]/ | |
580 | ) | |
581 | { | |
582 | die "*** Please install $class $ver manually.\n"; | |
583 | } | |
584 | ||
585 | print << "."; | |
586 | *** Trying to fetch it from CPAN... | |
587 | . | |
588 | ||
589 | # install ourselves | |
590 | _load($class) and return $class->import(@_) | |
591 | if $class->install( [], $class, $ver ); | |
592 | ||
593 | print << '.'; exit 1; | |
594 | ||
595 | *** Cannot bootstrap myself. :-( Installation terminated. | |
596 | . | |
597 | } | |
598 | ||
599 | # check if we're connected to some host, using inet_aton | |
600 | sub _connected_to { | |
601 | my $site = shift; | |
602 | ||
603 | return ( | |
604 | ( _load('Socket') and Socket::inet_aton($site) ) or _prompt( | |
605 | qq( | |
606 | *** Your host cannot resolve the domain name '$site', which | |
607 | probably means the Internet connections are unavailable. | |
608 | ==> Should we try to install the required module(s) anyway?), 'n' | |
609 | ) =~ /^[Yy]/ | |
610 | ); | |
611 | } | |
612 | ||
613 | # check if a directory is writable; may create it on demand | |
614 | sub _can_write { | |
615 | my $path = shift; | |
616 | mkdir( $path, 0755 ) unless -e $path; | |
617 | ||
618 | return 1 if -w $path; | |
619 | ||
620 | print << "."; | |
621 | *** You are not allowed to write to the directory '$path'; | |
622 | the installation may fail due to insufficient permissions. | |
623 | . | |
624 | ||
625 | if ( | |
626 | eval '$>' and lc(`sudo -V`) =~ /version/ and _prompt( | |
627 | qq( | |
628 | ==> Should we try to re-execute the autoinstall process with 'sudo'?), | |
629 | ((-t STDIN) ? 'y' : 'n') | |
630 | ) =~ /^[Yy]/ | |
631 | ) | |
632 | { | |
633 | ||
634 | # try to bootstrap ourselves from sudo | |
635 | print << "."; | |
636 | *** Trying to re-execute the autoinstall process with 'sudo'... | |
637 | . | |
638 | my $missing = join( ',', @Missing ); | |
639 | my $config = join( ',', | |
640 | UNIVERSAL::isa( $Config, 'HASH' ) ? %{$Config} : @{$Config} ) | |
641 | if $Config; | |
642 | ||
643 | return | |
644 | unless system( 'sudo', $^X, $0, "--config=$config", | |
645 | "--installdeps=$missing" ); | |
646 | ||
647 | print << "."; | |
648 | *** The 'sudo' command exited with error! Resuming... | |
649 | . | |
650 | } | |
651 | ||
652 | return _prompt( | |
653 | qq( | |
654 | ==> Should we try to install the required module(s) anyway?), 'n' | |
655 | ) =~ /^[Yy]/; | |
656 | } | |
657 | ||
658 | # load a module and return the version it reports | |
659 | sub _load { | |
660 | my $mod = pop; # class/instance doesn't matter | |
661 | my $file = $mod; | |
662 | ||
663 | $file =~ s|::|/|g; | |
664 | $file .= '.pm'; | |
665 | ||
666 | local $@; | |
667 | return eval { require $file; $mod->VERSION } || ( $@ ? undef: 0 ); | |
668 | } | |
669 | ||
670 | # Load CPAN.pm and it's configuration | |
671 | sub _load_cpan { | |
672 | return if $CPAN::VERSION and $CPAN::Config and not @_; | |
673 | require CPAN; | |
674 | if ( $CPAN::HandleConfig::VERSION ) { | |
675 | # Newer versions of CPAN have a HandleConfig module | |
676 | CPAN::HandleConfig->load; | |
677 | } else { | |
678 | # Older versions had the load method in Config directly | |
679 | CPAN::Config->load; | |
680 | } | |
681 | } | |
682 | ||
683 | # compare two versions, either use Sort::Versions or plain comparison | |
684 | # return values same as <=> | |
685 | sub _version_cmp { | |
686 | my ( $cur, $min ) = @_; | |
687 | return -1 unless defined $cur; # if 0 keep comparing | |
688 | return 1 unless $min; | |
689 | ||
690 | $cur =~ s/\s+$//; | |
691 | ||
692 | # check for version numbers that are not in decimal format | |
693 | if ( ref($cur) or ref($min) or $cur =~ /v|\..*\./ or $min =~ /v|\..*\./ ) { | |
694 | if ( ( $version::VERSION or defined( _load('version') )) and | |
695 | version->can('new') | |
696 | ) { | |
697 | ||
698 | # use version.pm if it is installed. | |
699 | return version->new($cur) <=> version->new($min); | |
700 | } | |
701 | elsif ( $Sort::Versions::VERSION or defined( _load('Sort::Versions') ) ) | |
702 | { | |
703 | ||
704 | # use Sort::Versions as the sorting algorithm for a.b.c versions | |
705 | return Sort::Versions::versioncmp( $cur, $min ); | |
706 | } | |
707 | ||
708 | warn "Cannot reliably compare non-decimal formatted versions.\n" | |
709 | . "Please install version.pm or Sort::Versions.\n"; | |
710 | } | |
711 | ||
712 | # plain comparison | |
713 | local $^W = 0; # shuts off 'not numeric' bugs | |
714 | return $cur <=> $min; | |
715 | } | |
716 | ||
717 | # nothing; this usage is deprecated. | |
718 | sub main::PREREQ_PM { return {}; } | |
719 | ||
720 | sub _make_args { | |
721 | my %args = @_; | |
722 | ||
723 | $args{PREREQ_PM} = { %{ $args{PREREQ_PM} || {} }, @Existing, @Missing } | |
724 | if $UnderCPAN or $TestOnly; | |
725 | ||
726 | if ( $args{EXE_FILES} and -e 'MANIFEST' ) { | |
727 | require ExtUtils::Manifest; | |
728 | my $manifest = ExtUtils::Manifest::maniread('MANIFEST'); | |
729 | ||
730 | $args{EXE_FILES} = | |
731 | [ grep { exists $manifest->{$_} } @{ $args{EXE_FILES} } ]; | |
732 | } | |
733 | ||
734 | $args{test}{TESTS} ||= 't/*.t'; | |
735 | $args{test}{TESTS} = join( ' ', | |
736 | grep { !exists( $DisabledTests{$_} ) } | |
737 | map { glob($_) } split( /\s+/, $args{test}{TESTS} ) ); | |
738 | ||
739 | my $missing = join( ',', @Missing ); | |
740 | my $config = | |
741 | join( ',', UNIVERSAL::isa( $Config, 'HASH' ) ? %{$Config} : @{$Config} ) | |
742 | if $Config; | |
743 | ||
744 | $PostambleActions = ( | |
745 | ($missing and not $UnderCPAN) | |
746 | ? "\$(PERL) $0 --config=$config --installdeps=$missing" | |
747 | : "\$(NOECHO) \$(NOOP)" | |
748 | ); | |
749 | ||
750 | return %args; | |
751 | } | |
752 | ||
753 | # a wrapper to ExtUtils::MakeMaker::WriteMakefile | |
754 | sub Write { | |
755 | require Carp; | |
756 | Carp::croak "WriteMakefile: Need even number of args" if @_ % 2; | |
757 | ||
758 | if ($CheckOnly) { | |
759 | print << "."; | |
760 | *** Makefile not written in check-only mode. | |
761 | . | |
762 | return; | |
763 | } | |
764 | ||
765 | my %args = _make_args(@_); | |
766 | ||
767 | no strict 'refs'; | |
768 | ||
769 | $PostambleUsed = 0; | |
770 | local *MY::postamble = \&postamble unless defined &MY::postamble; | |
771 | ExtUtils::MakeMaker::WriteMakefile(%args); | |
772 | ||
773 | print << "." unless $PostambleUsed; | |
774 | *** WARNING: Makefile written with customized MY::postamble() without | |
775 | including contents from Module::AutoInstall::postamble() -- | |
776 | auto installation features disabled. Please contact the author. | |
777 | . | |
778 | ||
779 | return 1; | |
780 | } | |
781 | ||
782 | sub postamble { | |
783 | $PostambleUsed = 1; | |
784 | ||
785 | return <<"END_MAKE"; | |
786 | ||
787 | config :: installdeps | |
788 | \t\$(NOECHO) \$(NOOP) | |
789 | ||
790 | checkdeps :: | |
791 | \t\$(PERL) $0 --checkdeps | |
792 | ||
793 | installdeps :: | |
794 | \t$PostambleActions | |
795 | ||
796 | END_MAKE | |
797 | ||
798 | } | |
799 | ||
800 | 1; | |
801 | ||
802 | __END__ | |
803 | ||
804 | #line 1056 |
0 | #line 1 | |
1 | package Module::Install::AutoInstall; | |
2 | ||
3 | use strict; | |
4 | use Module::Install::Base (); | |
5 | ||
6 | use vars qw{$VERSION @ISA $ISCORE}; | |
7 | BEGIN { | |
8 | $VERSION = '0.91'; | |
9 | @ISA = 'Module::Install::Base'; | |
10 | $ISCORE = 1; | |
11 | } | |
12 | ||
13 | sub AutoInstall { $_[0] } | |
14 | ||
15 | sub run { | |
16 | my $self = shift; | |
17 | $self->auto_install_now(@_); | |
18 | } | |
19 | ||
20 | sub write { | |
21 | my $self = shift; | |
22 | $self->auto_install(@_); | |
23 | } | |
24 | ||
25 | sub auto_install { | |
26 | my $self = shift; | |
27 | return if $self->{done}++; | |
28 | ||
29 | # Flatten array of arrays into a single array | |
30 | my @core = map @$_, map @$_, grep ref, | |
31 | $self->build_requires, $self->requires; | |
32 | ||
33 | my @config = @_; | |
34 | ||
35 | # We'll need Module::AutoInstall | |
36 | $self->include('Module::AutoInstall'); | |
37 | require Module::AutoInstall; | |
38 | ||
39 | Module::AutoInstall->import( | |
40 | (@config ? (-config => \@config) : ()), | |
41 | (@core ? (-core => \@core) : ()), | |
42 | $self->features, | |
43 | ); | |
44 | ||
45 | $self->makemaker_args( Module::AutoInstall::_make_args() ); | |
46 | ||
47 | my $class = ref($self); | |
48 | $self->postamble( | |
49 | "# --- $class section:\n" . | |
50 | Module::AutoInstall::postamble() | |
51 | ); | |
52 | } | |
53 | ||
54 | sub auto_install_now { | |
55 | my $self = shift; | |
56 | $self->auto_install(@_); | |
57 | Module::AutoInstall::do_install(); | |
58 | } | |
59 | ||
60 | 1; |
0 | #line 1 | |
1 | package Module::Install::Base; | |
2 | ||
3 | use strict 'vars'; | |
4 | use vars qw{$VERSION}; | |
5 | BEGIN { | |
6 | $VERSION = '0.91'; | |
7 | } | |
8 | ||
9 | # Suspend handler for "redefined" warnings | |
10 | BEGIN { | |
11 | my $w = $SIG{__WARN__}; | |
12 | $SIG{__WARN__} = sub { $w }; | |
13 | } | |
14 | ||
15 | #line 42 | |
16 | ||
17 | sub new { | |
18 | my $class = shift; | |
19 | unless ( defined &{"${class}::call"} ) { | |
20 | *{"${class}::call"} = sub { shift->_top->call(@_) }; | |
21 | } | |
22 | unless ( defined &{"${class}::load"} ) { | |
23 | *{"${class}::load"} = sub { shift->_top->load(@_) }; | |
24 | } | |
25 | bless { @_ }, $class; | |
26 | } | |
27 | ||
28 | #line 61 | |
29 | ||
30 | sub AUTOLOAD { | |
31 | local $@; | |
32 | my $func = eval { shift->_top->autoload } or return; | |
33 | goto &$func; | |
34 | } | |
35 | ||
36 | #line 75 | |
37 | ||
38 | sub _top { | |
39 | $_[0]->{_top}; | |
40 | } | |
41 | ||
42 | #line 90 | |
43 | ||
44 | sub admin { | |
45 | $_[0]->_top->{admin} | |
46 | or | |
47 | Module::Install::Base::FakeAdmin->new; | |
48 | } | |
49 | ||
50 | #line 106 | |
51 | ||
52 | sub is_admin { | |
53 | $_[0]->admin->VERSION; | |
54 | } | |
55 | ||
56 | sub DESTROY {} | |
57 | ||
58 | package Module::Install::Base::FakeAdmin; | |
59 | ||
60 | my $fake; | |
61 | ||
62 | sub new { | |
63 | $fake ||= bless(\@_, $_[0]); | |
64 | } | |
65 | ||
66 | sub AUTOLOAD {} | |
67 | ||
68 | sub DESTROY {} | |
69 | ||
70 | # Restore warning handler | |
71 | BEGIN { | |
72 | $SIG{__WARN__} = $SIG{__WARN__}->(); | |
73 | } | |
74 | ||
75 | 1; | |
76 | ||
77 | #line 154 |
0 | #line 1 | |
1 | package Module::Install::Can; | |
2 | ||
3 | use strict; | |
4 | use Config (); | |
5 | use File::Spec (); | |
6 | use ExtUtils::MakeMaker (); | |
7 | use Module::Install::Base (); | |
8 | ||
9 | use vars qw{$VERSION @ISA $ISCORE}; | |
10 | BEGIN { | |
11 | $VERSION = '0.91'; | |
12 | @ISA = 'Module::Install::Base'; | |
13 | $ISCORE = 1; | |
14 | } | |
15 | ||
16 | # check if we can load some module | |
17 | ### Upgrade this to not have to load the module if possible | |
18 | sub can_use { | |
19 | my ($self, $mod, $ver) = @_; | |
20 | $mod =~ s{::|\\}{/}g; | |
21 | $mod .= '.pm' unless $mod =~ /\.pm$/i; | |
22 | ||
23 | my $pkg = $mod; | |
24 | $pkg =~ s{/}{::}g; | |
25 | $pkg =~ s{\.pm$}{}i; | |
26 | ||
27 | local $@; | |
28 | eval { require $mod; $pkg->VERSION($ver || 0); 1 }; | |
29 | } | |
30 | ||
31 | # check if we can run some command | |
32 | sub can_run { | |
33 | my ($self, $cmd) = @_; | |
34 | ||
35 | my $_cmd = $cmd; | |
36 | return $_cmd if (-x $_cmd or $_cmd = MM->maybe_command($_cmd)); | |
37 | ||
38 | for my $dir ((split /$Config::Config{path_sep}/, $ENV{PATH}), '.') { | |
39 | next if $dir eq ''; | |
40 | my $abs = File::Spec->catfile($dir, $_[1]); | |
41 | return $abs if (-x $abs or $abs = MM->maybe_command($abs)); | |
42 | } | |
43 | ||
44 | return; | |
45 | } | |
46 | ||
47 | # can we locate a (the) C compiler | |
48 | sub can_cc { | |
49 | my $self = shift; | |
50 | my @chunks = split(/ /, $Config::Config{cc}) or return; | |
51 | ||
52 | # $Config{cc} may contain args; try to find out the program part | |
53 | while (@chunks) { | |
54 | return $self->can_run("@chunks") || (pop(@chunks), next); | |
55 | } | |
56 | ||
57 | return; | |
58 | } | |
59 | ||
60 | # Fix Cygwin bug on maybe_command(); | |
61 | if ( $^O eq 'cygwin' ) { | |
62 | require ExtUtils::MM_Cygwin; | |
63 | require ExtUtils::MM_Win32; | |
64 | if ( ! defined(&ExtUtils::MM_Cygwin::maybe_command) ) { | |
65 | *ExtUtils::MM_Cygwin::maybe_command = sub { | |
66 | my ($self, $file) = @_; | |
67 | if ($file =~ m{^/cygdrive/}i and ExtUtils::MM_Win32->can('maybe_command')) { | |
68 | ExtUtils::MM_Win32->maybe_command($file); | |
69 | } else { | |
70 | ExtUtils::MM_Unix->maybe_command($file); | |
71 | } | |
72 | } | |
73 | } | |
74 | } | |
75 | ||
76 | 1; | |
77 | ||
78 | __END__ | |
79 | ||
80 | #line 156 |
0 | #line 1 | |
1 | package Module::Install::Fetch; | |
2 | ||
3 | use strict; | |
4 | use Module::Install::Base (); | |
5 | ||
6 | use vars qw{$VERSION @ISA $ISCORE}; | |
7 | BEGIN { | |
8 | $VERSION = '0.91'; | |
9 | @ISA = 'Module::Install::Base'; | |
10 | $ISCORE = 1; | |
11 | } | |
12 | ||
13 | sub get_file { | |
14 | my ($self, %args) = @_; | |
15 | my ($scheme, $host, $path, $file) = | |
16 | $args{url} =~ m|^(\w+)://([^/]+)(.+)/(.+)| or return; | |
17 | ||
18 | if ( $scheme eq 'http' and ! eval { require LWP::Simple; 1 } ) { | |
19 | $args{url} = $args{ftp_url} | |
20 | or (warn("LWP support unavailable!\n"), return); | |
21 | ($scheme, $host, $path, $file) = | |
22 | $args{url} =~ m|^(\w+)://([^/]+)(.+)/(.+)| or return; | |
23 | } | |
24 | ||
25 | $|++; | |
26 | print "Fetching '$file' from $host... "; | |
27 | ||
28 | unless (eval { require Socket; Socket::inet_aton($host) }) { | |
29 | warn "'$host' resolve failed!\n"; | |
30 | return; | |
31 | } | |
32 | ||
33 | return unless $scheme eq 'ftp' or $scheme eq 'http'; | |
34 | ||
35 | require Cwd; | |
36 | my $dir = Cwd::getcwd(); | |
37 | chdir $args{local_dir} or return if exists $args{local_dir}; | |
38 | ||
39 | if (eval { require LWP::Simple; 1 }) { | |
40 | LWP::Simple::mirror($args{url}, $file); | |
41 | } | |
42 | elsif (eval { require Net::FTP; 1 }) { eval { | |
43 | # use Net::FTP to get past firewall | |
44 | my $ftp = Net::FTP->new($host, Passive => 1, Timeout => 600); | |
45 | $ftp->login("anonymous", 'anonymous@example.com'); | |
46 | $ftp->cwd($path); | |
47 | $ftp->binary; | |
48 | $ftp->get($file) or (warn("$!\n"), return); | |
49 | $ftp->quit; | |
50 | } } | |
51 | elsif (my $ftp = $self->can_run('ftp')) { eval { | |
52 | # no Net::FTP, fallback to ftp.exe | |
53 | require FileHandle; | |
54 | my $fh = FileHandle->new; | |
55 | ||
56 | local $SIG{CHLD} = 'IGNORE'; | |
57 | unless ($fh->open("|$ftp -n")) { | |
58 | warn "Couldn't open ftp: $!\n"; | |
59 | chdir $dir; return; | |
60 | } | |
61 | ||
62 | my @dialog = split(/\n/, <<"END_FTP"); | |
63 | open $host | |
64 | user anonymous anonymous\@example.com | |
65 | cd $path | |
66 | binary | |
67 | get $file $file | |
68 | quit | |
69 | END_FTP | |
70 | foreach (@dialog) { $fh->print("$_\n") } | |
71 | $fh->close; | |
72 | } } | |
73 | else { | |
74 | warn "No working 'ftp' program available!\n"; | |
75 | chdir $dir; return; | |
76 | } | |
77 | ||
78 | unless (-f $file) { | |
79 | warn "Fetching failed: $@\n"; | |
80 | chdir $dir; return; | |
81 | } | |
82 | ||
83 | return if exists $args{size} and -s $file != $args{size}; | |
84 | system($args{run}) if exists $args{run}; | |
85 | unlink($file) if $args{remove}; | |
86 | ||
87 | print(((!exists $args{check_for} or -e $args{check_for}) | |
88 | ? "done!" : "failed! ($!)"), "\n"); | |
89 | chdir $dir; return !$?; | |
90 | } | |
91 | ||
92 | 1; |
0 | #line 1 | |
1 | package Module::Install::Include; | |
2 | ||
3 | use strict; | |
4 | use Module::Install::Base (); | |
5 | ||
6 | use vars qw{$VERSION @ISA $ISCORE}; | |
7 | BEGIN { | |
8 | $VERSION = '0.91'; | |
9 | @ISA = 'Module::Install::Base'; | |
10 | $ISCORE = 1; | |
11 | } | |
12 | ||
13 | sub include { | |
14 | shift()->admin->include(@_); | |
15 | } | |
16 | ||
17 | sub include_deps { | |
18 | shift()->admin->include_deps(@_); | |
19 | } | |
20 | ||
21 | sub auto_include { | |
22 | shift()->admin->auto_include(@_); | |
23 | } | |
24 | ||
25 | sub auto_include_deps { | |
26 | shift()->admin->auto_include_deps(@_); | |
27 | } | |
28 | ||
29 | sub auto_include_dependent_dists { | |
30 | shift()->admin->auto_include_dependent_dists(@_); | |
31 | } | |
32 | ||
33 | 1; |
0 | #line 1 | |
1 | package Module::Install::Makefile; | |
2 | ||
3 | use strict 'vars'; | |
4 | use ExtUtils::MakeMaker (); | |
5 | use Module::Install::Base (); | |
6 | ||
7 | use vars qw{$VERSION @ISA $ISCORE}; | |
8 | BEGIN { | |
9 | $VERSION = '0.91'; | |
10 | @ISA = 'Module::Install::Base'; | |
11 | $ISCORE = 1; | |
12 | } | |
13 | ||
14 | sub Makefile { $_[0] } | |
15 | ||
16 | my %seen = (); | |
17 | ||
18 | sub prompt { | |
19 | shift; | |
20 | ||
21 | # Infinite loop protection | |
22 | my @c = caller(); | |
23 | if ( ++$seen{"$c[1]|$c[2]|$_[0]"} > 3 ) { | |
24 | die "Caught an potential prompt infinite loop ($c[1]|$c[2]|$_[0])"; | |
25 | } | |
26 | ||
27 | # In automated testing, always use defaults | |
28 | if ( $ENV{AUTOMATED_TESTING} and ! $ENV{PERL_MM_USE_DEFAULT} ) { | |
29 | local $ENV{PERL_MM_USE_DEFAULT} = 1; | |
30 | goto &ExtUtils::MakeMaker::prompt; | |
31 | } else { | |
32 | goto &ExtUtils::MakeMaker::prompt; | |
33 | } | |
34 | } | |
35 | ||
36 | sub makemaker_args { | |
37 | my $self = shift; | |
38 | my $args = ( $self->{makemaker_args} ||= {} ); | |
39 | %$args = ( %$args, @_ ); | |
40 | return $args; | |
41 | } | |
42 | ||
43 | # For mm args that take multiple space-seperated args, | |
44 | # append an argument to the current list. | |
45 | sub makemaker_append { | |
46 | my $self = sShift; | |
47 | my $name = shift; | |
48 | my $args = $self->makemaker_args; | |
49 | $args->{name} = defined $args->{$name} | |
50 | ? join( ' ', $args->{name}, @_ ) | |
51 | : join( ' ', @_ ); | |
52 | } | |
53 | ||
54 | sub build_subdirs { | |
55 | my $self = shift; | |
56 | my $subdirs = $self->makemaker_args->{DIR} ||= []; | |
57 | for my $subdir (@_) { | |
58 | push @$subdirs, $subdir; | |
59 | } | |
60 | } | |
61 | ||
62 | sub clean_files { | |
63 | my $self = shift; | |
64 | my $clean = $self->makemaker_args->{clean} ||= {}; | |
65 | %$clean = ( | |
66 | %$clean, | |
67 | FILES => join ' ', grep { length $_ } ($clean->{FILES} || (), @_), | |
68 | ); | |
69 | } | |
70 | ||
71 | sub realclean_files { | |
72 | my $self = shift; | |
73 | my $realclean = $self->makemaker_args->{realclean} ||= {}; | |
74 | %$realclean = ( | |
75 | %$realclean, | |
76 | FILES => join ' ', grep { length $_ } ($realclean->{FILES} || (), @_), | |
77 | ); | |
78 | } | |
79 | ||
80 | sub libs { | |
81 | my $self = shift; | |
82 | my $libs = ref $_[0] ? shift : [ shift ]; | |
83 | $self->makemaker_args( LIBS => $libs ); | |
84 | } | |
85 | ||
86 | sub inc { | |
87 | my $self = shift; | |
88 | $self->makemaker_args( INC => shift ); | |
89 | } | |
90 | ||
91 | my %test_dir = (); | |
92 | ||
93 | sub _wanted_t { | |
94 | /\.t$/ and -f $_ and $test_dir{$File::Find::dir} = 1; | |
95 | } | |
96 | ||
97 | sub tests_recursive { | |
98 | my $self = shift; | |
99 | if ( $self->tests ) { | |
100 | die "tests_recursive will not work if tests are already defined"; | |
101 | } | |
102 | my $dir = shift || 't'; | |
103 | unless ( -d $dir ) { | |
104 | die "tests_recursive dir '$dir' does not exist"; | |
105 | } | |
106 | %test_dir = (); | |
107 | require File::Find; | |
108 | File::Find::find( \&_wanted_t, $dir ); | |
109 | $self->tests( join ' ', map { "$_/*.t" } sort keys %test_dir ); | |
110 | } | |
111 | ||
112 | sub write { | |
113 | my $self = shift; | |
114 | die "&Makefile->write() takes no arguments\n" if @_; | |
115 | ||
116 | # Check the current Perl version | |
117 | my $perl_version = $self->perl_version; | |
118 | if ( $perl_version ) { | |
119 | eval "use $perl_version; 1" | |
120 | or die "ERROR: perl: Version $] is installed, " | |
121 | . "but we need version >= $perl_version"; | |
122 | } | |
123 | ||
124 | # Make sure we have a new enough MakeMaker | |
125 | require ExtUtils::MakeMaker; | |
126 | ||
127 | if ( $perl_version and $self->_cmp($perl_version, '5.006') >= 0 ) { | |
128 | # MakeMaker can complain about module versions that include | |
129 | # an underscore, even though its own version may contain one! | |
130 | # Hence the funny regexp to get rid of it. See RT #35800 | |
131 | # for details. | |
132 | $self->build_requires( 'ExtUtils::MakeMaker' => $ExtUtils::MakeMaker::VERSION =~ /^(\d+\.\d+)/ ); | |
133 | $self->configure_requires( 'ExtUtils::MakeMaker' => $ExtUtils::MakeMaker::VERSION =~ /^(\d+\.\d+)/ ); | |
134 | } else { | |
135 | # Allow legacy-compatibility with 5.005 by depending on the | |
136 | # most recent EU:MM that supported 5.005. | |
137 | $self->build_requires( 'ExtUtils::MakeMaker' => 6.42 ); | |
138 | $self->configure_requires( 'ExtUtils::MakeMaker' => 6.42 ); | |
139 | } | |
140 | ||
141 | # Generate the MakeMaker params | |
142 | my $args = $self->makemaker_args; | |
143 | $args->{DISTNAME} = $self->name; | |
144 | $args->{NAME} = $self->module_name || $self->name; | |
145 | $args->{VERSION} = $self->version; | |
146 | $args->{NAME} =~ s/-/::/g; | |
147 | if ( $self->tests ) { | |
148 | $args->{test} = { TESTS => $self->tests }; | |
149 | } | |
150 | if ( $] >= 5.005 ) { | |
151 | $args->{ABSTRACT} = $self->abstract; | |
152 | $args->{AUTHOR} = $self->author; | |
153 | } | |
154 | if ( eval($ExtUtils::MakeMaker::VERSION) >= 6.10 ) { | |
155 | $args->{NO_META} = 1; | |
156 | } | |
157 | if ( eval($ExtUtils::MakeMaker::VERSION) > 6.17 and $self->sign ) { | |
158 | $args->{SIGN} = 1; | |
159 | } | |
160 | unless ( $self->is_admin ) { | |
161 | delete $args->{SIGN}; | |
162 | } | |
163 | ||
164 | # Merge both kinds of requires into prereq_pm | |
165 | my $prereq = ($args->{PREREQ_PM} ||= {}); | |
166 | %$prereq = ( %$prereq, | |
167 | map { @$_ } | |
168 | map { @$_ } | |
169 | grep $_, | |
170 | ($self->configure_requires, $self->build_requires, $self->requires) | |
171 | ); | |
172 | ||
173 | # Remove any reference to perl, PREREQ_PM doesn't support it | |
174 | delete $args->{PREREQ_PM}->{perl}; | |
175 | ||
176 | # merge both kinds of requires into prereq_pm | |
177 | my $subdirs = ($args->{DIR} ||= []); | |
178 | if ($self->bundles) { | |
179 | foreach my $bundle (@{ $self->bundles }) { | |
180 | my ($file, $dir) = @$bundle; | |
181 | push @$subdirs, $dir if -d $dir; | |
182 | delete $prereq->{$file}; | |
183 | } | |
184 | } | |
185 | ||
186 | if ( my $perl_version = $self->perl_version ) { | |
187 | eval "use $perl_version; 1" | |
188 | or die "ERROR: perl: Version $] is installed, " | |
189 | . "but we need version >= $perl_version"; | |
190 | } | |
191 | ||
192 | $args->{INSTALLDIRS} = $self->installdirs; | |
193 | ||
194 | my %args = map { ( $_ => $args->{$_} ) } grep {defined($args->{$_})} keys %$args; | |
195 | ||
196 | my $user_preop = delete $args{dist}->{PREOP}; | |
197 | if (my $preop = $self->admin->preop($user_preop)) { | |
198 | foreach my $key ( keys %$preop ) { | |
199 | $args{dist}->{$key} = $preop->{$key}; | |
200 | } | |
201 | } | |
202 | ||
203 | my $mm = ExtUtils::MakeMaker::WriteMakefile(%args); | |
204 | $self->fix_up_makefile($mm->{FIRST_MAKEFILE} || 'Makefile'); | |
205 | } | |
206 | ||
207 | sub fix_up_makefile { | |
208 | my $self = shift; | |
209 | my $makefile_name = shift; | |
210 | my $top_class = ref($self->_top) || ''; | |
211 | my $top_version = $self->_top->VERSION || ''; | |
212 | ||
213 | my $preamble = $self->preamble | |
214 | ? "# Preamble by $top_class $top_version\n" | |
215 | . $self->preamble | |
216 | : ''; | |
217 | my $postamble = "# Postamble by $top_class $top_version\n" | |
218 | . ($self->postamble || ''); | |
219 | ||
220 | local *MAKEFILE; | |
221 | open MAKEFILE, "< $makefile_name" or die "fix_up_makefile: Couldn't open $makefile_name: $!"; | |
222 | my $makefile = do { local $/; <MAKEFILE> }; | |
223 | close MAKEFILE or die $!; | |
224 | ||
225 | $makefile =~ s/\b(test_harness\(\$\(TEST_VERBOSE\), )/$1'inc', /; | |
226 | $makefile =~ s/( -I\$\(INST_ARCHLIB\))/ -Iinc$1/g; | |
227 | $makefile =~ s/( "-I\$\(INST_LIB\)")/ "-Iinc"$1/g; | |
228 | $makefile =~ s/^(FULLPERL = .*)/$1 "-Iinc"/m; | |
229 | $makefile =~ s/^(PERL = .*)/$1 "-Iinc"/m; | |
230 | ||
231 | # Module::Install will never be used to build the Core Perl | |
232 | # Sometimes PERL_LIB and PERL_ARCHLIB get written anyway, which breaks | |
233 | # PREFIX/PERL5LIB, and thus, install_share. Blank them if they exist | |
234 | $makefile =~ s/^PERL_LIB = .+/PERL_LIB =/m; | |
235 | #$makefile =~ s/^PERL_ARCHLIB = .+/PERL_ARCHLIB =/m; | |
236 | ||
237 | # Perl 5.005 mentions PERL_LIB explicitly, so we have to remove that as well. | |
238 | $makefile =~ s/(\"?)-I\$\(PERL_LIB\)\1//g; | |
239 | ||
240 | # XXX - This is currently unused; not sure if it breaks other MM-users | |
241 | # $makefile =~ s/^pm_to_blib\s+:\s+/pm_to_blib :: /mg; | |
242 | ||
243 | open MAKEFILE, "> $makefile_name" or die "fix_up_makefile: Couldn't open $makefile_name: $!"; | |
244 | print MAKEFILE "$preamble$makefile$postamble" or die $!; | |
245 | close MAKEFILE or die $!; | |
246 | ||
247 | 1; | |
248 | } | |
249 | ||
250 | sub preamble { | |
251 | my ($self, $text) = @_; | |
252 | $self->{preamble} = $text . $self->{preamble} if defined $text; | |
253 | $self->{preamble}; | |
254 | } | |
255 | ||
256 | sub postamble { | |
257 | my ($self, $text) = @_; | |
258 | $self->{postamble} ||= $self->admin->postamble; | |
259 | $self->{postamble} .= $text if defined $text; | |
260 | $self->{postamble} | |
261 | } | |
262 | ||
263 | 1; | |
264 | ||
265 | __END__ | |
266 | ||
267 | #line 394 |
0 | #line 1 | |
1 | package Module::Install::Metadata; | |
2 | ||
3 | use strict 'vars'; | |
4 | use Module::Install::Base (); | |
5 | ||
6 | use vars qw{$VERSION @ISA $ISCORE}; | |
7 | BEGIN { | |
8 | $VERSION = '0.91'; | |
9 | @ISA = 'Module::Install::Base'; | |
10 | $ISCORE = 1; | |
11 | } | |
12 | ||
13 | my @boolean_keys = qw{ | |
14 | sign | |
15 | }; | |
16 | ||
17 | my @scalar_keys = qw{ | |
18 | name | |
19 | module_name | |
20 | abstract | |
21 | author | |
22 | version | |
23 | distribution_type | |
24 | tests | |
25 | installdirs | |
26 | }; | |
27 | ||
28 | my @tuple_keys = qw{ | |
29 | configure_requires | |
30 | build_requires | |
31 | requires | |
32 | recommends | |
33 | bundles | |
34 | resources | |
35 | }; | |
36 | ||
37 | my @resource_keys = qw{ | |
38 | homepage | |
39 | bugtracker | |
40 | repository | |
41 | }; | |
42 | ||
43 | my @array_keys = qw{ | |
44 | keywords | |
45 | }; | |
46 | ||
47 | sub Meta { shift } | |
48 | sub Meta_BooleanKeys { @boolean_keys } | |
49 | sub Meta_ScalarKeys { @scalar_keys } | |
50 | sub Meta_TupleKeys { @tuple_keys } | |
51 | sub Meta_ResourceKeys { @resource_keys } | |
52 | sub Meta_ArrayKeys { @array_keys } | |
53 | ||
54 | foreach my $key ( @boolean_keys ) { | |
55 | *$key = sub { | |
56 | my $self = shift; | |
57 | if ( defined wantarray and not @_ ) { | |
58 | return $self->{values}->{$key}; | |
59 | } | |
60 | $self->{values}->{$key} = ( @_ ? $_[0] : 1 ); | |
61 | return $self; | |
62 | }; | |
63 | } | |
64 | ||
65 | foreach my $key ( @scalar_keys ) { | |
66 | *$key = sub { | |
67 | my $self = shift; | |
68 | return $self->{values}->{$key} if defined wantarray and !@_; | |
69 | $self->{values}->{$key} = shift; | |
70 | return $self; | |
71 | }; | |
72 | } | |
73 | ||
74 | foreach my $key ( @array_keys ) { | |
75 | *$key = sub { | |
76 | my $self = shift; | |
77 | return $self->{values}->{$key} if defined wantarray and !@_; | |
78 | $self->{values}->{$key} ||= []; | |
79 | push @{$self->{values}->{$key}}, @_; | |
80 | return $self; | |
81 | }; | |
82 | } | |
83 | ||
84 | foreach my $key ( @resource_keys ) { | |
85 | *$key = sub { | |
86 | my $self = shift; | |
87 | unless ( @_ ) { | |
88 | return () unless $self->{values}->{resources}; | |
89 | return map { $_->[1] } | |
90 | grep { $_->[0] eq $key } | |
91 | @{ $self->{values}->{resources} }; | |
92 | } | |
93 | return $self->{values}->{resources}->{$key} unless @_; | |
94 | my $uri = shift or die( | |
95 | "Did not provide a value to $key()" | |
96 | ); | |
97 | $self->resources( $key => $uri ); | |
98 | return 1; | |
99 | }; | |
100 | } | |
101 | ||
102 | foreach my $key ( grep { $_ ne "resources" } @tuple_keys) { | |
103 | *$key = sub { | |
104 | my $self = shift; | |
105 | return $self->{values}->{$key} unless @_; | |
106 | my @added; | |
107 | while ( @_ ) { | |
108 | my $module = shift or last; | |
109 | my $version = shift || 0; | |
110 | push @added, [ $module, $version ]; | |
111 | } | |
112 | push @{ $self->{values}->{$key} }, @added; | |
113 | return map {@$_} @added; | |
114 | }; | |
115 | } | |
116 | ||
117 | # Resource handling | |
118 | my %lc_resource = map { $_ => 1 } qw{ | |
119 | homepage | |
120 | license | |
121 | bugtracker | |
122 | repository | |
123 | }; | |
124 | ||
125 | sub resources { | |
126 | my $self = shift; | |
127 | while ( @_ ) { | |
128 | my $name = shift or last; | |
129 | my $value = shift or next; | |
130 | if ( $name eq lc $name and ! $lc_resource{$name} ) { | |
131 | die("Unsupported reserved lowercase resource '$name'"); | |
132 | } | |
133 | $self->{values}->{resources} ||= []; | |
134 | push @{ $self->{values}->{resources} }, [ $name, $value ]; | |
135 | } | |
136 | $self->{values}->{resources}; | |
137 | } | |
138 | ||
139 | # Aliases for build_requires that will have alternative | |
140 | # meanings in some future version of META.yml. | |
141 | sub test_requires { shift->build_requires(@_) } | |
142 | sub install_requires { shift->build_requires(@_) } | |
143 | ||
144 | # Aliases for installdirs options | |
145 | sub install_as_core { $_[0]->installdirs('perl') } | |
146 | sub install_as_cpan { $_[0]->installdirs('site') } | |
147 | sub install_as_site { $_[0]->installdirs('site') } | |
148 | sub install_as_vendor { $_[0]->installdirs('vendor') } | |
149 | ||
150 | sub dynamic_config { | |
151 | my $self = shift; | |
152 | unless ( @_ ) { | |
153 | warn "You MUST provide an explicit true/false value to dynamic_config\n"; | |
154 | return $self; | |
155 | } | |
156 | $self->{values}->{dynamic_config} = $_[0] ? 1 : 0; | |
157 | return 1; | |
158 | } | |
159 | ||
160 | sub perl_version { | |
161 | my $self = shift; | |
162 | return $self->{values}->{perl_version} unless @_; | |
163 | my $version = shift or die( | |
164 | "Did not provide a value to perl_version()" | |
165 | ); | |
166 | ||
167 | # Normalize the version | |
168 | $version = $self->_perl_version($version); | |
169 | ||
170 | # We don't support the reall old versions | |
171 | unless ( $version >= 5.005 ) { | |
172 | die "Module::Install only supports 5.005 or newer (use ExtUtils::MakeMaker)\n"; | |
173 | } | |
174 | ||
175 | $self->{values}->{perl_version} = $version; | |
176 | } | |
177 | ||
178 | #Stolen from M::B | |
179 | my %license_urls = ( | |
180 | perl => 'http://dev.perl.org/licenses/', | |
181 | apache => 'http://apache.org/licenses/LICENSE-2.0', | |
182 | artistic => 'http://opensource.org/licenses/artistic-license.php', | |
183 | artistic_2 => 'http://opensource.org/licenses/artistic-license-2.0.php', | |
184 | lgpl => 'http://opensource.org/licenses/lgpl-license.php', | |
185 | lgpl2 => 'http://opensource.org/licenses/lgpl-2.1.php', | |
186 | lgpl3 => 'http://opensource.org/licenses/lgpl-3.0.html', | |
187 | bsd => 'http://opensource.org/licenses/bsd-license.php', | |
188 | gpl => 'http://opensource.org/licenses/gpl-license.php', | |
189 | gpl2 => 'http://opensource.org/licenses/gpl-2.0.php', | |
190 | gpl3 => 'http://opensource.org/licenses/gpl-3.0.html', | |
191 | mit => 'http://opensource.org/licenses/mit-license.php', | |
192 | mozilla => 'http://opensource.org/licenses/mozilla1.1.php', | |
193 | open_source => undef, | |
194 | unrestricted => undef, | |
195 | restrictive => undef, | |
196 | unknown => undef, | |
197 | ); | |
198 | ||
199 | sub license { | |
200 | my $self = shift; | |
201 | return $self->{values}->{license} unless @_; | |
202 | my $license = shift or die( | |
203 | 'Did not provide a value to license()' | |
204 | ); | |
205 | $self->{values}->{license} = $license; | |
206 | ||
207 | # Automatically fill in license URLs | |
208 | if ( $license_urls{$license} ) { | |
209 | $self->resources( license => $license_urls{$license} ); | |
210 | } | |
211 | ||
212 | return 1; | |
213 | } | |
214 | ||
215 | sub all_from { | |
216 | my ( $self, $file ) = @_; | |
217 | ||
218 | unless ( defined($file) ) { | |
219 | my $name = $self->name or die( | |
220 | "all_from called with no args without setting name() first" | |
221 | ); | |
222 | $file = join('/', 'lib', split(/-/, $name)) . '.pm'; | |
223 | $file =~ s{.*/}{} unless -e $file; | |
224 | unless ( -e $file ) { | |
225 | die("all_from cannot find $file from $name"); | |
226 | } | |
227 | } | |
228 | unless ( -f $file ) { | |
229 | die("The path '$file' does not exist, or is not a file"); | |
230 | } | |
231 | ||
232 | # Some methods pull from POD instead of code. | |
233 | # If there is a matching .pod, use that instead | |
234 | my $pod = $file; | |
235 | $pod =~ s/\.pm$/.pod/i; | |
236 | $pod = $file unless -e $pod; | |
237 | ||
238 | # Pull the different values | |
239 | $self->name_from($file) unless $self->name; | |
240 | $self->version_from($file) unless $self->version; | |
241 | $self->perl_version_from($file) unless $self->perl_version; | |
242 | $self->author_from($pod) unless $self->author; | |
243 | $self->license_from($pod) unless $self->license; | |
244 | $self->abstract_from($pod) unless $self->abstract; | |
245 | ||
246 | return 1; | |
247 | } | |
248 | ||
249 | sub provides { | |
250 | my $self = shift; | |
251 | my $provides = ( $self->{values}->{provides} ||= {} ); | |
252 | %$provides = (%$provides, @_) if @_; | |
253 | return $provides; | |
254 | } | |
255 | ||
256 | sub auto_provides { | |
257 | my $self = shift; | |
258 | return $self unless $self->is_admin; | |
259 | unless (-e 'MANIFEST') { | |
260 | warn "Cannot deduce auto_provides without a MANIFEST, skipping\n"; | |
261 | return $self; | |
262 | } | |
263 | # Avoid spurious warnings as we are not checking manifest here. | |
264 | local $SIG{__WARN__} = sub {1}; | |
265 | require ExtUtils::Manifest; | |
266 | local *ExtUtils::Manifest::manicheck = sub { return }; | |
267 | ||
268 | require Module::Build; | |
269 | my $build = Module::Build->new( | |
270 | dist_name => $self->name, | |
271 | dist_version => $self->version, | |
272 | license => $self->license, | |
273 | ); | |
274 | $self->provides( %{ $build->find_dist_packages || {} } ); | |
275 | } | |
276 | ||
277 | sub feature { | |
278 | my $self = shift; | |
279 | my $name = shift; | |
280 | my $features = ( $self->{values}->{features} ||= [] ); | |
281 | my $mods; | |
282 | ||
283 | if ( @_ == 1 and ref( $_[0] ) ) { | |
284 | # The user used ->feature like ->features by passing in the second | |
285 | # argument as a reference. Accomodate for that. | |
286 | $mods = $_[0]; | |
287 | } else { | |
288 | $mods = \@_; | |
289 | } | |
290 | ||
291 | my $count = 0; | |
292 | push @$features, ( | |
293 | $name => [ | |
294 | map { | |
295 | ref($_) ? ( ref($_) eq 'HASH' ) ? %$_ : @$_ : $_ | |
296 | } @$mods | |
297 | ] | |
298 | ); | |
299 | ||
300 | return @$features; | |
301 | } | |
302 | ||
303 | sub features { | |
304 | my $self = shift; | |
305 | while ( my ( $name, $mods ) = splice( @_, 0, 2 ) ) { | |
306 | $self->feature( $name, @$mods ); | |
307 | } | |
308 | return $self->{values}->{features} | |
309 | ? @{ $self->{values}->{features} } | |
310 | : (); | |
311 | } | |
312 | ||
313 | sub no_index { | |
314 | my $self = shift; | |
315 | my $type = shift; | |
316 | push @{ $self->{values}->{no_index}->{$type} }, @_ if $type; | |
317 | return $self->{values}->{no_index}; | |
318 | } | |
319 | ||
320 | sub read { | |
321 | my $self = shift; | |
322 | $self->include_deps( 'YAML::Tiny', 0 ); | |
323 | ||
324 | require YAML::Tiny; | |
325 | my $data = YAML::Tiny::LoadFile('META.yml'); | |
326 | ||
327 | # Call methods explicitly in case user has already set some values. | |
328 | while ( my ( $key, $value ) = each %$data ) { | |
329 | next unless $self->can($key); | |
330 | if ( ref $value eq 'HASH' ) { | |
331 | while ( my ( $module, $version ) = each %$value ) { | |
332 | $self->can($key)->($self, $module => $version ); | |
333 | } | |
334 | } else { | |
335 | $self->can($key)->($self, $value); | |
336 | } | |
337 | } | |
338 | return $self; | |
339 | } | |
340 | ||
341 | sub write { | |
342 | my $self = shift; | |
343 | return $self unless $self->is_admin; | |
344 | $self->admin->write_meta; | |
345 | return $self; | |
346 | } | |
347 | ||
348 | sub version_from { | |
349 | require ExtUtils::MM_Unix; | |
350 | my ( $self, $file ) = @_; | |
351 | $self->version( ExtUtils::MM_Unix->parse_version($file) ); | |
352 | } | |
353 | ||
354 | sub abstract_from { | |
355 | require ExtUtils::MM_Unix; | |
356 | my ( $self, $file ) = @_; | |
357 | $self->abstract( | |
358 | bless( | |
359 | { DISTNAME => $self->name }, | |
360 | 'ExtUtils::MM_Unix' | |
361 | )->parse_abstract($file) | |
362 | ); | |
363 | } | |
364 | ||
365 | # Add both distribution and module name | |
366 | sub name_from { | |
367 | my ($self, $file) = @_; | |
368 | if ( | |
369 | Module::Install::_read($file) =~ m/ | |
370 | ^ \s* | |
371 | package \s* | |
372 | ([\w:]+) | |
373 | \s* ; | |
374 | /ixms | |
375 | ) { | |
376 | my ($name, $module_name) = ($1, $1); | |
377 | $name =~ s{::}{-}g; | |
378 | $self->name($name); | |
379 | unless ( $self->module_name ) { | |
380 | $self->module_name($module_name); | |
381 | } | |
382 | } else { | |
383 | die("Cannot determine name from $file\n"); | |
384 | } | |
385 | } | |
386 | ||
387 | sub perl_version_from { | |
388 | my $self = shift; | |
389 | if ( | |
390 | Module::Install::_read($_[0]) =~ m/ | |
391 | ^ | |
392 | (?:use|require) \s* | |
393 | v? | |
394 | ([\d_\.]+) | |
395 | \s* ; | |
396 | /ixms | |
397 | ) { | |
398 | my $perl_version = $1; | |
399 | $perl_version =~ s{_}{}g; | |
400 | $self->perl_version($perl_version); | |
401 | } else { | |
402 | warn "Cannot determine perl version info from $_[0]\n"; | |
403 | return; | |
404 | } | |
405 | } | |
406 | ||
407 | sub author_from { | |
408 | my $self = shift; | |
409 | my $content = Module::Install::_read($_[0]); | |
410 | if ($content =~ m/ | |
411 | =head \d \s+ (?:authors?)\b \s* | |
412 | ([^\n]*) | |
413 | | | |
414 | =head \d \s+ (?:licen[cs]e|licensing|copyright|legal)\b \s* | |
415 | .*? copyright .*? \d\d\d[\d.]+ \s* (?:\bby\b)? \s* | |
416 | ([^\n]*) | |
417 | /ixms) { | |
418 | my $author = $1 || $2; | |
419 | $author =~ s{E<lt>}{<}g; | |
420 | $author =~ s{E<gt>}{>}g; | |
421 | $self->author($author); | |
422 | } else { | |
423 | warn "Cannot determine author info from $_[0]\n"; | |
424 | } | |
425 | } | |
426 | ||
427 | sub license_from { | |
428 | my $self = shift; | |
429 | if ( | |
430 | Module::Install::_read($_[0]) =~ m/ | |
431 | ( | |
432 | =head \d \s+ | |
433 | (?:licen[cs]e|licensing|copyright|legal)\b | |
434 | .*? | |
435 | ) | |
436 | (=head\\d.*|=cut.*|) | |
437 | \z | |
438 | /ixms ) { | |
439 | my $license_text = $1; | |
440 | my @phrases = ( | |
441 | 'under the same (?:terms|license) as (?:perl|the perl programming language) itself' => 'perl', 1, | |
442 | 'GNU general public license' => 'gpl', 1, | |
443 | 'GNU public license' => 'gpl', 1, | |
444 | 'GNU lesser general public license' => 'lgpl', 1, | |
445 | 'GNU lesser public license' => 'lgpl', 1, | |
446 | 'GNU library general public license' => 'lgpl', 1, | |
447 | 'GNU library public license' => 'lgpl', 1, | |
448 | 'BSD license' => 'bsd', 1, | |
449 | 'Artistic license' => 'artistic', 1, | |
450 | 'GPL' => 'gpl', 1, | |
451 | 'LGPL' => 'lgpl', 1, | |
452 | 'BSD' => 'bsd', 1, | |
453 | 'Artistic' => 'artistic', 1, | |
454 | 'MIT' => 'mit', 1, | |
455 | 'proprietary' => 'proprietary', 0, | |
456 | ); | |
457 | while ( my ($pattern, $license, $osi) = splice(@phrases, 0, 3) ) { | |
458 | $pattern =~ s{\s+}{\\s+}g; | |
459 | if ( $license_text =~ /\b$pattern\b/i ) { | |
460 | $self->license($license); | |
461 | return 1; | |
462 | } | |
463 | } | |
464 | } | |
465 | ||
466 | warn "Cannot determine license info from $_[0]\n"; | |
467 | return 'unknown'; | |
468 | } | |
469 | ||
470 | sub _extract_bugtracker { | |
471 | my @links = $_[0] =~ m#L<(\Qhttp://rt.cpan.org/\E[^>]+)>#g; | |
472 | my %links; | |
473 | @links{@links}=(); | |
474 | @links=keys %links; | |
475 | return @links; | |
476 | } | |
477 | ||
478 | sub bugtracker_from { | |
479 | my $self = shift; | |
480 | my $content = Module::Install::_read($_[0]); | |
481 | my @links = _extract_bugtracker($content); | |
482 | unless ( @links ) { | |
483 | warn "Cannot determine bugtracker info from $_[0]\n"; | |
484 | return 0; | |
485 | } | |
486 | if ( @links > 1 ) { | |
487 | warn "Found more than on rt.cpan.org link in $_[0]\n"; | |
488 | return 0; | |
489 | } | |
490 | ||
491 | # Set the bugtracker | |
492 | bugtracker( $links[0] ); | |
493 | return 1; | |
494 | } | |
495 | ||
496 | sub requires_from { | |
497 | my $self = shift; | |
498 | my $content = Module::Install::_readperl($_[0]); | |
499 | my @requires = $content =~ m/^use\s+([^\W\d]\w*(?:::\w+)*)\s+([\d\.]+)/mg; | |
500 | while ( @requires ) { | |
501 | my $module = shift @requires; | |
502 | my $version = shift @requires; | |
503 | $self->requires( $module => $version ); | |
504 | } | |
505 | } | |
506 | ||
507 | sub test_requires_from { | |
508 | my $self = shift; | |
509 | my $content = Module::Install::_readperl($_[0]); | |
510 | my @requires = $content =~ m/^use\s+([^\W\d]\w*(?:::\w+)*)\s+([\d\.]+)/mg; | |
511 | while ( @requires ) { | |
512 | my $module = shift @requires; | |
513 | my $version = shift @requires; | |
514 | $self->test_requires( $module => $version ); | |
515 | } | |
516 | } | |
517 | ||
518 | # Convert triple-part versions (eg, 5.6.1 or 5.8.9) to | |
519 | # numbers (eg, 5.006001 or 5.008009). | |
520 | # Also, convert double-part versions (eg, 5.8) | |
521 | sub _perl_version { | |
522 | my $v = $_[-1]; | |
523 | $v =~ s/^([1-9])\.([1-9]\d?\d?)$/sprintf("%d.%03d",$1,$2)/e; | |
524 | $v =~ s/^([1-9])\.([1-9]\d?\d?)\.(0|[1-9]\d?\d?)$/sprintf("%d.%03d%03d",$1,$2,$3 || 0)/e; | |
525 | $v =~ s/(\.\d\d\d)000$/$1/; | |
526 | $v =~ s/_.+$//; | |
527 | if ( ref($v) ) { | |
528 | # Numify | |
529 | $v = $v + 0; | |
530 | } | |
531 | return $v; | |
532 | } | |
533 | ||
534 | ||
535 | ||
536 | ||
537 | ||
538 | ###################################################################### | |
539 | # MYMETA Support | |
540 | ||
541 | sub WriteMyMeta { | |
542 | die "WriteMyMeta has been deprecated"; | |
543 | } | |
544 | ||
545 | sub write_mymeta_yaml { | |
546 | my $self = shift; | |
547 | ||
548 | # We need YAML::Tiny to write the MYMETA.yml file | |
549 | unless ( eval { require YAML::Tiny; 1; } ) { | |
550 | return 1; | |
551 | } | |
552 | ||
553 | # Generate the data | |
554 | my $meta = $self->_write_mymeta_data or return 1; | |
555 | ||
556 | # Save as the MYMETA.yml file | |
557 | print "Writing MYMETA.yml\n"; | |
558 | YAML::Tiny::DumpFile('MYMETA.yml', $meta); | |
559 | } | |
560 | ||
561 | sub write_mymeta_json { | |
562 | my $self = shift; | |
563 | ||
564 | # We need JSON to write the MYMETA.json file | |
565 | unless ( eval { require JSON; 1; } ) { | |
566 | return 1; | |
567 | } | |
568 | ||
569 | # Generate the data | |
570 | my $meta = $self->_write_mymeta_data or return 1; | |
571 | ||
572 | # Save as the MYMETA.yml file | |
573 | print "Writing MYMETA.json\n"; | |
574 | Module::Install::_write( | |
575 | 'MYMETA.json', | |
576 | JSON->new->pretty(1)->canonical->encode($meta), | |
577 | ); | |
578 | } | |
579 | ||
580 | sub _write_mymeta_data { | |
581 | my $self = shift; | |
582 | ||
583 | # If there's no existing META.yml there is nothing we can do | |
584 | return undef unless -f 'META.yml'; | |
585 | ||
586 | # We need Parse::CPAN::Meta to load the file | |
587 | unless ( eval { require Parse::CPAN::Meta; 1; } ) { | |
588 | return undef; | |
589 | } | |
590 | ||
591 | # Merge the perl version into the dependencies | |
592 | my $val = $self->Meta->{values}; | |
593 | my $perl = delete $val->{perl_version}; | |
594 | if ( $perl ) { | |
595 | $val->{requires} ||= []; | |
596 | my $requires = $val->{requires}; | |
597 | ||
598 | # Canonize to three-dot version after Perl 5.6 | |
599 | if ( $perl >= 5.006 ) { | |
600 | $perl =~ s{^(\d+)\.(\d\d\d)(\d*)}{join('.', $1, int($2||0), int($3||0))}e | |
601 | } | |
602 | unshift @$requires, [ perl => $perl ]; | |
603 | } | |
604 | ||
605 | # Load the advisory META.yml file | |
606 | my @yaml = Parse::CPAN::Meta::LoadFile('META.yml'); | |
607 | my $meta = $yaml[0]; | |
608 | ||
609 | # Overwrite the non-configure dependency hashs | |
610 | delete $meta->{requires}; | |
611 | delete $meta->{build_requires}; | |
612 | delete $meta->{recommends}; | |
613 | if ( exists $val->{requires} ) { | |
614 | $meta->{requires} = { map { @$_ } @{ $val->{requires} } }; | |
615 | } | |
616 | if ( exists $val->{build_requires} ) { | |
617 | $meta->{build_requires} = { map { @$_ } @{ $val->{build_requires} } }; | |
618 | } | |
619 | ||
620 | return $meta; | |
621 | } | |
622 | ||
623 | 1; |
0 | #line 1 | |
1 | package Module::Install::Scripts; | |
2 | ||
3 | use strict 'vars'; | |
4 | use Module::Install::Base (); | |
5 | ||
6 | use vars qw{$VERSION @ISA $ISCORE}; | |
7 | BEGIN { | |
8 | $VERSION = '0.91'; | |
9 | @ISA = 'Module::Install::Base'; | |
10 | $ISCORE = 1; | |
11 | } | |
12 | ||
13 | sub install_script { | |
14 | my $self = shift; | |
15 | my $args = $self->makemaker_args; | |
16 | my $exe = $args->{EXE_FILES} ||= []; | |
17 | foreach ( @_ ) { | |
18 | if ( -f $_ ) { | |
19 | push @$exe, $_; | |
20 | } elsif ( -d 'script' and -f "script/$_" ) { | |
21 | push @$exe, "script/$_"; | |
22 | } else { | |
23 | die("Cannot find script '$_'"); | |
24 | } | |
25 | } | |
26 | } | |
27 | ||
28 | 1; |
0 | #line 1 | |
1 | package Module::Install::Win32; | |
2 | ||
3 | use strict; | |
4 | use Module::Install::Base (); | |
5 | ||
6 | use vars qw{$VERSION @ISA $ISCORE}; | |
7 | BEGIN { | |
8 | $VERSION = '0.91'; | |
9 | @ISA = 'Module::Install::Base'; | |
10 | $ISCORE = 1; | |
11 | } | |
12 | ||
13 | # determine if the user needs nmake, and download it if needed | |
14 | sub check_nmake { | |
15 | my $self = shift; | |
16 | $self->load('can_run'); | |
17 | $self->load('get_file'); | |
18 | ||
19 | require Config; | |
20 | return unless ( | |
21 | $^O eq 'MSWin32' and | |
22 | $Config::Config{make} and | |
23 | $Config::Config{make} =~ /^nmake\b/i and | |
24 | ! $self->can_run('nmake') | |
25 | ); | |
26 | ||
27 | print "The required 'nmake' executable not found, fetching it...\n"; | |
28 | ||
29 | require File::Basename; | |
30 | my $rv = $self->get_file( | |
31 | url => 'http://download.microsoft.com/download/vc15/Patch/1.52/W95/EN-US/Nmake15.exe', | |
32 | ftp_url => 'ftp://ftp.microsoft.com/Softlib/MSLFILES/Nmake15.exe', | |
33 | local_dir => File::Basename::dirname($^X), | |
34 | size => 51928, | |
35 | run => 'Nmake15.exe /o > nul', | |
36 | check_for => 'Nmake.exe', | |
37 | remove => 1, | |
38 | ); | |
39 | ||
40 | die <<'END_MESSAGE' unless $rv; | |
41 | ||
42 | ------------------------------------------------------------------------------- | |
43 | ||
44 | Since you are using Microsoft Windows, you will need the 'nmake' utility | |
45 | before installation. It's available at: | |
46 | ||
47 | http://download.microsoft.com/download/vc15/Patch/1.52/W95/EN-US/Nmake15.exe | |
48 | or | |
49 | ftp://ftp.microsoft.com/Softlib/MSLFILES/Nmake15.exe | |
50 | ||
51 | Please download the file manually, save it to a directory in %PATH% (e.g. | |
52 | C:\WINDOWS\COMMAND\), then launch the MS-DOS command line shell, "cd" to | |
53 | that directory, and run "Nmake15.exe" from there; that will create the | |
54 | 'nmake.exe' file needed by this module. | |
55 | ||
56 | You may then resume the installation process described in README. | |
57 | ||
58 | ------------------------------------------------------------------------------- | |
59 | END_MESSAGE | |
60 | ||
61 | } | |
62 | ||
63 | 1; |
0 | #line 1 | |
1 | package Module::Install::WriteAll; | |
2 | ||
3 | use strict; | |
4 | use Module::Install::Base (); | |
5 | ||
6 | use vars qw{$VERSION @ISA $ISCORE}; | |
7 | BEGIN { | |
8 | $VERSION = '0.91';; | |
9 | @ISA = qw{Module::Install::Base}; | |
10 | $ISCORE = 1; | |
11 | } | |
12 | ||
13 | sub WriteAll { | |
14 | my $self = shift; | |
15 | my %args = ( | |
16 | meta => 1, | |
17 | sign => 0, | |
18 | inline => 0, | |
19 | check_nmake => 1, | |
20 | @_, | |
21 | ); | |
22 | ||
23 | $self->sign(1) if $args{sign}; | |
24 | $self->admin->WriteAll(%args) if $self->is_admin; | |
25 | ||
26 | $self->check_nmake if $args{check_nmake}; | |
27 | unless ( $self->makemaker_args->{PL_FILES} ) { | |
28 | $self->makemaker_args( PL_FILES => {} ); | |
29 | } | |
30 | ||
31 | # Until ExtUtils::MakeMaker support MYMETA.yml, make sure | |
32 | # we clean it up properly ourself. | |
33 | $self->realclean_files('MYMETA.yml'); | |
34 | ||
35 | if ( $args{inline} ) { | |
36 | $self->Inline->write; | |
37 | } else { | |
38 | $self->Makefile->write; | |
39 | } | |
40 | ||
41 | # The Makefile write process adds a couple of dependencies, | |
42 | # so write the META.yml files after the Makefile. | |
43 | if ( $args{meta} ) { | |
44 | $self->Meta->write; | |
45 | } | |
46 | ||
47 | # Experimental support for MYMETA | |
48 | if ( $ENV{X_MYMETA} ) { | |
49 | if ( $ENV{X_MYMETA} eq 'JSON' ) { | |
50 | $self->Meta->write_mymeta_json; | |
51 | } else { | |
52 | $self->Meta->write_mymeta_yaml; | |
53 | } | |
54 | } | |
55 | ||
56 | return 1; | |
57 | } | |
58 | ||
59 | 1; |
0 | #line 1 | |
1 | package Module::Install; | |
2 | ||
3 | # For any maintainers: | |
4 | # The load order for Module::Install is a bit magic. | |
5 | # It goes something like this... | |
6 | # | |
7 | # IF ( host has Module::Install installed, creating author mode ) { | |
8 | # 1. Makefile.PL calls "use inc::Module::Install" | |
9 | # 2. $INC{inc/Module/Install.pm} set to installed version of inc::Module::Install | |
10 | # 3. The installed version of inc::Module::Install loads | |
11 | # 4. inc::Module::Install calls "require Module::Install" | |
12 | # 5. The ./inc/ version of Module::Install loads | |
13 | # } ELSE { | |
14 | # 1. Makefile.PL calls "use inc::Module::Install" | |
15 | # 2. $INC{inc/Module/Install.pm} set to ./inc/ version of Module::Install | |
16 | # 3. The ./inc/ version of Module::Install loads | |
17 | # } | |
18 | ||
19 | use 5.005; | |
20 | use strict 'vars'; | |
21 | ||
22 | use vars qw{$VERSION $MAIN}; | |
23 | BEGIN { | |
24 | # All Module::Install core packages now require synchronised versions. | |
25 | # This will be used to ensure we don't accidentally load old or | |
26 | # different versions of modules. | |
27 | # This is not enforced yet, but will be some time in the next few | |
28 | # releases once we can make sure it won't clash with custom | |
29 | # Module::Install extensions. | |
30 | $VERSION = '0.91'; | |
31 | ||
32 | # Storage for the pseudo-singleton | |
33 | $MAIN = undef; | |
34 | ||
35 | *inc::Module::Install::VERSION = *VERSION; | |
36 | @inc::Module::Install::ISA = __PACKAGE__; | |
37 | ||
38 | } | |
39 | ||
40 | ||
41 | ||
42 | ||
43 | ||
44 | # Whether or not inc::Module::Install is actually loaded, the | |
45 | # $INC{inc/Module/Install.pm} is what will still get set as long as | |
46 | # the caller loaded module this in the documented manner. | |
47 | # If not set, the caller may NOT have loaded the bundled version, and thus | |
48 | # they may not have a MI version that works with the Makefile.PL. This would | |
49 | # result in false errors or unexpected behaviour. And we don't want that. | |
50 | my $file = join( '/', 'inc', split /::/, __PACKAGE__ ) . '.pm'; | |
51 | unless ( $INC{$file} ) { die <<"END_DIE" } | |
52 | ||
53 | Please invoke ${\__PACKAGE__} with: | |
54 | ||
55 | use inc::${\__PACKAGE__}; | |
56 | ||
57 | not: | |
58 | ||
59 | use ${\__PACKAGE__}; | |
60 | ||
61 | END_DIE | |
62 | ||
63 | ||
64 | ||
65 | ||
66 | ||
67 | # If the script that is loading Module::Install is from the future, | |
68 | # then make will detect this and cause it to re-run over and over | |
69 | # again. This is bad. Rather than taking action to touch it (which | |
70 | # is unreliable on some platforms and requires write permissions) | |
71 | # for now we should catch this and refuse to run. | |
72 | if ( -f $0 ) { | |
73 | my $s = (stat($0))[9]; | |
74 | ||
75 | # If the modification time is only slightly in the future, | |
76 | # sleep briefly to remove the problem. | |
77 | my $a = $s - time; | |
78 | if ( $a > 0 and $a < 5 ) { sleep 5 } | |
79 | ||
80 | # Too far in the future, throw an error. | |
81 | my $t = time; | |
82 | if ( $s > $t ) { die <<"END_DIE" } | |
83 | ||
84 | Your installer $0 has a modification time in the future ($s > $t). | |
85 | ||
86 | This is known to create infinite loops in make. | |
87 | ||
88 | Please correct this, then run $0 again. | |
89 | ||
90 | END_DIE | |
91 | } | |
92 | ||
93 | ||
94 | ||
95 | ||
96 | ||
97 | # Build.PL was formerly supported, but no longer is due to excessive | |
98 | # difficulty in implementing every single feature twice. | |
99 | if ( $0 =~ /Build.PL$/i ) { die <<"END_DIE" } | |
100 | ||
101 | Module::Install no longer supports Build.PL. | |
102 | ||
103 | It was impossible to maintain duel backends, and has been deprecated. | |
104 | ||
105 | Please remove all Build.PL files and only use the Makefile.PL installer. | |
106 | ||
107 | END_DIE | |
108 | ||
109 | ||
110 | ||
111 | ||
112 | ||
113 | # To save some more typing in Module::Install installers, every... | |
114 | # use inc::Module::Install | |
115 | # ...also acts as an implicit use strict. | |
116 | $^H |= strict::bits(qw(refs subs vars)); | |
117 | ||
118 | ||
119 | ||
120 | ||
121 | ||
122 | use Cwd (); | |
123 | use File::Find (); | |
124 | use File::Path (); | |
125 | use FindBin; | |
126 | ||
127 | sub autoload { | |
128 | my $self = shift; | |
129 | my $who = $self->_caller; | |
130 | my $cwd = Cwd::cwd(); | |
131 | my $sym = "${who}::AUTOLOAD"; | |
132 | $sym->{$cwd} = sub { | |
133 | my $pwd = Cwd::cwd(); | |
134 | if ( my $code = $sym->{$pwd} ) { | |
135 | # Delegate back to parent dirs | |
136 | goto &$code unless $cwd eq $pwd; | |
137 | } | |
138 | $$sym =~ /([^:]+)$/ or die "Cannot autoload $who - $sym"; | |
139 | my $method = $1; | |
140 | if ( uc($method) eq $method ) { | |
141 | # Do nothing | |
142 | return; | |
143 | } elsif ( $method =~ /^_/ and $self->can($method) ) { | |
144 | # Dispatch to the root M:I class | |
145 | return $self->$method(@_); | |
146 | } | |
147 | ||
148 | # Dispatch to the appropriate plugin | |
149 | unshift @_, ( $self, $1 ); | |
150 | goto &{$self->can('call')}; | |
151 | }; | |
152 | } | |
153 | ||
154 | sub import { | |
155 | my $class = shift; | |
156 | my $self = $class->new(@_); | |
157 | my $who = $self->_caller; | |
158 | ||
159 | unless ( -f $self->{file} ) { | |
160 | require "$self->{path}/$self->{dispatch}.pm"; | |
161 | File::Path::mkpath("$self->{prefix}/$self->{author}"); | |
162 | $self->{admin} = "$self->{name}::$self->{dispatch}"->new( _top => $self ); | |
163 | $self->{admin}->init; | |
164 | @_ = ($class, _self => $self); | |
165 | goto &{"$self->{name}::import"}; | |
166 | } | |
167 | ||
168 | *{"${who}::AUTOLOAD"} = $self->autoload; | |
169 | $self->preload; | |
170 | ||
171 | # Unregister loader and worker packages so subdirs can use them again | |
172 | delete $INC{"$self->{file}"}; | |
173 | delete $INC{"$self->{path}.pm"}; | |
174 | ||
175 | # Save to the singleton | |
176 | $MAIN = $self; | |
177 | ||
178 | return 1; | |
179 | } | |
180 | ||
181 | sub preload { | |
182 | my $self = shift; | |
183 | unless ( $self->{extensions} ) { | |
184 | $self->load_extensions( | |
185 | "$self->{prefix}/$self->{path}", $self | |
186 | ); | |
187 | } | |
188 | ||
189 | my @exts = @{$self->{extensions}}; | |
190 | unless ( @exts ) { | |
191 | @exts = $self->{admin}->load_all_extensions; | |
192 | } | |
193 | ||
194 | my %seen; | |
195 | foreach my $obj ( @exts ) { | |
196 | while (my ($method, $glob) = each %{ref($obj) . '::'}) { | |
197 | next unless $obj->can($method); | |
198 | next if $method =~ /^_/; | |
199 | next if $method eq uc($method); | |
200 | $seen{$method}++; | |
201 | } | |
202 | } | |
203 | ||
204 | my $who = $self->_caller; | |
205 | foreach my $name ( sort keys %seen ) { | |
206 | *{"${who}::$name"} = sub { | |
207 | ${"${who}::AUTOLOAD"} = "${who}::$name"; | |
208 | goto &{"${who}::AUTOLOAD"}; | |
209 | }; | |
210 | } | |
211 | } | |
212 | ||
213 | sub new { | |
214 | my ($class, %args) = @_; | |
215 | ||
216 | # ignore the prefix on extension modules built from top level. | |
217 | my $base_path = Cwd::abs_path($FindBin::Bin); | |
218 | unless ( Cwd::abs_path(Cwd::cwd()) eq $base_path ) { | |
219 | delete $args{prefix}; | |
220 | } | |
221 | ||
222 | return $args{_self} if $args{_self}; | |
223 | ||
224 | $args{dispatch} ||= 'Admin'; | |
225 | $args{prefix} ||= 'inc'; | |
226 | $args{author} ||= ($^O eq 'VMS' ? '_author' : '.author'); | |
227 | $args{bundle} ||= 'inc/BUNDLES'; | |
228 | $args{base} ||= $base_path; | |
229 | $class =~ s/^\Q$args{prefix}\E:://; | |
230 | $args{name} ||= $class; | |
231 | $args{version} ||= $class->VERSION; | |
232 | unless ( $args{path} ) { | |
233 | $args{path} = $args{name}; | |
234 | $args{path} =~ s!::!/!g; | |
235 | } | |
236 | $args{file} ||= "$args{base}/$args{prefix}/$args{path}.pm"; | |
237 | $args{wrote} = 0; | |
238 | ||
239 | bless( \%args, $class ); | |
240 | } | |
241 | ||
242 | sub call { | |
243 | my ($self, $method) = @_; | |
244 | my $obj = $self->load($method) or return; | |
245 | splice(@_, 0, 2, $obj); | |
246 | goto &{$obj->can($method)}; | |
247 | } | |
248 | ||
249 | sub load { | |
250 | my ($self, $method) = @_; | |
251 | ||
252 | $self->load_extensions( | |
253 | "$self->{prefix}/$self->{path}", $self | |
254 | ) unless $self->{extensions}; | |
255 | ||
256 | foreach my $obj (@{$self->{extensions}}) { | |
257 | return $obj if $obj->can($method); | |
258 | } | |
259 | ||
260 | my $admin = $self->{admin} or die <<"END_DIE"; | |
261 | The '$method' method does not exist in the '$self->{prefix}' path! | |
262 | Please remove the '$self->{prefix}' directory and run $0 again to load it. | |
263 | END_DIE | |
264 | ||
265 | my $obj = $admin->load($method, 1); | |
266 | push @{$self->{extensions}}, $obj; | |
267 | ||
268 | $obj; | |
269 | } | |
270 | ||
271 | sub load_extensions { | |
272 | my ($self, $path, $top) = @_; | |
273 | ||
274 | unless ( grep { ! ref $_ and lc $_ eq lc $self->{prefix} } @INC ) { | |
275 | unshift @INC, $self->{prefix}; | |
276 | } | |
277 | ||
278 | foreach my $rv ( $self->find_extensions($path) ) { | |
279 | my ($file, $pkg) = @{$rv}; | |
280 | next if $self->{pathnames}{$pkg}; | |
281 | ||
282 | local $@; | |
283 | my $new = eval { require $file; $pkg->can('new') }; | |
284 | unless ( $new ) { | |
285 | warn $@ if $@; | |
286 | next; | |
287 | } | |
288 | $self->{pathnames}{$pkg} = delete $INC{$file}; | |
289 | push @{$self->{extensions}}, &{$new}($pkg, _top => $top ); | |
290 | } | |
291 | ||
292 | $self->{extensions} ||= []; | |
293 | } | |
294 | ||
295 | sub find_extensions { | |
296 | my ($self, $path) = @_; | |
297 | ||
298 | my @found; | |
299 | File::Find::find( sub { | |
300 | my $file = $File::Find::name; | |
301 | return unless $file =~ m!^\Q$path\E/(.+)\.pm\Z!is; | |
302 | my $subpath = $1; | |
303 | return if lc($subpath) eq lc($self->{dispatch}); | |
304 | ||
305 | $file = "$self->{path}/$subpath.pm"; | |
306 | my $pkg = "$self->{name}::$subpath"; | |
307 | $pkg =~ s!/!::!g; | |
308 | ||
309 | # If we have a mixed-case package name, assume case has been preserved | |
310 | # correctly. Otherwise, root through the file to locate the case-preserved | |
311 | # version of the package name. | |
312 | if ( $subpath eq lc($subpath) || $subpath eq uc($subpath) ) { | |
313 | my $content = Module::Install::_read($subpath . '.pm'); | |
314 | my $in_pod = 0; | |
315 | foreach ( split //, $content ) { | |
316 | $in_pod = 1 if /^=\w/; | |
317 | $in_pod = 0 if /^=cut/; | |
318 | next if ($in_pod || /^=cut/); # skip pod text | |
319 | next if /^\s*#/; # and comments | |
320 | if ( m/^\s*package\s+($pkg)\s*;/i ) { | |
321 | $pkg = $1; | |
322 | last; | |
323 | } | |
324 | } | |
325 | } | |
326 | ||
327 | push @found, [ $file, $pkg ]; | |
328 | }, $path ) if -d $path; | |
329 | ||
330 | @found; | |
331 | } | |
332 | ||
333 | ||
334 | ||
335 | ||
336 | ||
337 | ##################################################################### | |
338 | # Common Utility Functions | |
339 | ||
340 | sub _caller { | |
341 | my $depth = 0; | |
342 | my $call = caller($depth); | |
343 | while ( $call eq __PACKAGE__ ) { | |
344 | $depth++; | |
345 | $call = caller($depth); | |
346 | } | |
347 | return $call; | |
348 | } | |
349 | ||
350 | sub _read { | |
351 | local *FH; | |
352 | if ( $] >= 5.006 ) { | |
353 | open( FH, '<', $_[0] ) or die "open($_[0]): $!"; | |
354 | } else { | |
355 | open( FH, "< $_[0]" ) or die "open($_[0]): $!"; | |
356 | } | |
357 | my $string = do { local $/; <FH> }; | |
358 | close FH or die "close($_[0]): $!"; | |
359 | return $string; | |
360 | } | |
361 | ||
362 | sub _readperl { | |
363 | my $string = Module::Install::_read($_[0]); | |
364 | $string =~ s/(?:\015{1,2}\012|\015|\012)/\n/sg; | |
365 | $string =~ s/(\n)\n*__(?:DATA|END)__\b.*\z/$1/s; | |
366 | $string =~ s/\n\n=\w+.+?\n\n=cut\b.+?\n+/\n\n/sg; | |
367 | return $string; | |
368 | } | |
369 | ||
370 | sub _readpod { | |
371 | my $string = Module::Install::_read($_[0]); | |
372 | $string =~ s/(?:\015{1,2}\012|\015|\012)/\n/sg; | |
373 | return $string if $_[0] =~ /\.pod\z/; | |
374 | $string =~ s/(^|\n=cut\b.+?\n+)[^=\s].+?\n(\n=\w+|\z)/$1$2/sg; | |
375 | $string =~ s/\n*=pod\b[^\n]*\n+/\n\n/sg; | |
376 | $string =~ s/\n*=cut\b[^\n]*\n+/\n\n/sg; | |
377 | $string =~ s/^\n+//s; | |
378 | return $string; | |
379 | } | |
380 | ||
381 | sub _write { | |
382 | local *FH; | |
383 | if ( $] >= 5.006 ) { | |
384 | open( FH, '>', $_[0] ) or die "open($_[0]): $!"; | |
385 | } else { | |
386 | open( FH, "> $_[0]" ) or die "open($_[0]): $!"; | |
387 | } | |
388 | foreach ( 1 .. $#_ ) { | |
389 | print FH $_[$_] or die "print($_[0]): $!"; | |
390 | } | |
391 | close FH or die "close($_[0]): $!"; | |
392 | } | |
393 | ||
394 | # _version is for processing module versions (eg, 1.03_05) not | |
395 | # Perl versions (eg, 5.8.1). | |
396 | sub _version ($) { | |
397 | my $s = shift || 0; | |
398 | my $d =()= $s =~ /(\.)/g; | |
399 | if ( $d >= 2 ) { | |
400 | # Normalise multipart versions | |
401 | $s =~ s/(\.)(\d{1,3})/sprintf("$1%03d",$2)/eg; | |
402 | } | |
403 | $s =~ s/^(\d+)\.?//; | |
404 | my $l = $1 || 0; | |
405 | my @v = map { | |
406 | $_ . '0' x (3 - length $_) | |
407 | } $s =~ /(\d{1,3})\D?/g; | |
408 | $l = $l . '.' . join '', @v if @v; | |
409 | return $l + 0; | |
410 | } | |
411 | ||
412 | sub _cmp ($$) { | |
413 | _version($_[0]) <=> _version($_[1]); | |
414 | } | |
415 | ||
416 | # Cloned from Params::Util::_CLASS | |
417 | sub _CLASS ($) { | |
418 | ( | |
419 | defined $_[0] | |
420 | and | |
421 | ! ref $_[0] | |
422 | and | |
423 | $_[0] =~ m/^[^\W\d]\w*(?:::\w+)*\z/s | |
424 | ) ? $_[0] : undef; | |
425 | } | |
426 | ||
427 | 1; | |
428 | ||
429 | # Copyright 2008 - 2009 Adam Kennedy. |
370 | 370 | |
371 | 371 | =item .. insert many rows of data efficiently? |
372 | 372 | |
373 | The C<populate> method in L<DBIx::Class::ResultSet> provides | |
374 | efficient bulk inserts. | |
375 | ||
373 | 376 | =item .. update a collection of rows at the same time? |
374 | 377 | |
375 | 378 | Create a resultset using a search, to filter the rows of data you |
155 | 155 | L<https://bugzilla.redhat.com/show_bug.cgi?id=460308> and |
156 | 156 | L<http://rhn.redhat.com/errata/RHBA-2008-0876.html> |
157 | 157 | |
158 | =head2 Excessive Memory Allocation with TEXT/BLOB/etc. Columns and Large LongReadLen | |
159 | ||
160 | It has been observed, using L<DBD::ODBC>, that a creating a L<DBIx::Class::Row> | |
161 | object which includes a column of data type TEXT/BLOB/etc. will allocate | |
162 | LongReadLen bytes. This allocation does not leak, but if LongReadLen | |
163 | is large in size, and many such row objects are created, e.g. as the | |
164 | output of a ResultSet query, the memory footprint of the Perl interpreter | |
165 | can grow very large. | |
166 | ||
167 | The solution is to use the smallest practical value for LongReadLen. | |
168 | ||
158 | 169 | =cut |
159 | 170 |
500 | 500 | } |
501 | 501 | else { |
502 | 502 | my $bumped_pos_val = $self->_position_value ($to_position); |
503 | my @between = ($to_position, $new_group_last_position); | |
503 | my @between = map { $self->_position_value ($_) } ($to_position, $new_group_last_position); | |
504 | 504 | $self->_shift_siblings (1, @between); #shift right |
505 | 505 | $self->set_column( $position_column => $bumped_pos_val ); |
506 | 506 | } |
681 | 681 | if you are working with preexisting non-normalised position data, |
682 | 682 | or if you need to work with materialized path columns. |
683 | 683 | |
684 | =head2 _position | |
685 | ||
686 | my $num_pos = $item->_position; | |
687 | ||
688 | Returns the B<absolute numeric position> of the current object, with the | |
689 | first object being at position 1, its sibling at position 2 and so on. | |
690 | By default simply returns the value of L</position_column>. | |
691 | ||
692 | =cut | |
693 | sub _position { | |
694 | my $self = shift; | |
695 | ||
696 | # #the right way to do this | |
697 | # return $self->previous_siblings->count + 1; | |
698 | ||
699 | return $self->get_column ($self->position_column); | |
700 | } | |
701 | ||
702 | 684 | =head2 _position_from_value |
703 | 685 | |
704 | my $num_pos = $item->_position_of_value ( $pos_value ) | |
686 | my $num_pos = $item->_position_from_value ( $pos_value ) | |
705 | 687 | |
706 | 688 | Returns the B<absolute numeric position> of an object with a B<position |
707 | 689 | value> set to C<$pos_value>. By default simply returns C<$pos_value>. |
861 | 843 | return $self->_group_rs->search( |
862 | 844 | { $position_column => { '!=' => $self->get_column($position_column) } }, |
863 | 845 | ); |
846 | } | |
847 | ||
848 | =head2 _position | |
849 | ||
850 | my $num_pos = $item->_position; | |
851 | ||
852 | Returns the B<absolute numeric position> of the current object, with the | |
853 | first object being at position 1, its sibling at position 2 and so on. | |
854 | ||
855 | =cut | |
856 | sub _position { | |
857 | my $self = shift; | |
858 | return $self->_position_from_value ($self->get_column ($self->position_column) ); | |
864 | 859 | } |
865 | 860 | |
866 | 861 | =head2 _grouping_clause |
356 | 356 | } |
357 | 357 | |
358 | 358 | my $rs = (ref $self)->new($self->result_source, $new_attrs); |
359 | if ($rows) { | |
360 | $rs->set_cache($rows); | |
361 | } | |
359 | ||
360 | $rs->set_cache($rows) if ($rows); | |
361 | ||
362 | 362 | return $rs; |
363 | 363 | } |
364 | 364 | |
518 | 518 | # in ::Relationship::Base::search_related (the row method), and furthermore |
519 | 519 | # the relationship is of the 'single' type. This means that the condition |
520 | 520 | # provided by the relationship (already attached to $self) is sufficient, |
521 | # as there can be only one row in the databse that would satisfy the | |
521 | # as there can be only one row in the databse that would satisfy the | |
522 | 522 | # relationship |
523 | 523 | } |
524 | 524 | else { |
529 | 529 | } |
530 | 530 | |
531 | 531 | # Run the query |
532 | my $rs = $self->search ($query, $attrs); | |
532 | my $rs = $self->search ($query, {result_class => $self->result_class, %$attrs}); | |
533 | 533 | if (keys %{$rs->_resolved_attrs->{collapse}}) { |
534 | 534 | my $row = $rs->next; |
535 | 535 | carp "Query returned more than one row" if $rs->next; |
1239 | 1239 | |
1240 | 1240 | my $tmp_attrs = { %$attrs }; |
1241 | 1241 | |
1242 | # take off any limits, record_filter is cdbi, and no point of ordering a count | |
1242 | # take off any limits, record_filter is cdbi, and no point of ordering a count | |
1243 | 1243 | delete $tmp_attrs->{$_} for (qw/select as rows offset order_by record_filter/); |
1244 | 1244 | |
1245 | 1245 | # overwrite the selector (supplied by the storage) |
1494 | 1494 | |
1495 | 1495 | my $rsrc = $self->result_source; |
1496 | 1496 | |
1497 | # if a condition exists we need to strip all table qualifiers | |
1498 | # if this is not possible we'll force a subquery below | |
1499 | my $cond = $rsrc->schema->storage->_strip_cond_qualifiers ($self->{cond}); | |
1500 | ||
1497 | 1501 | my $needs_group_by_subq = $self->_has_resolved_attr (qw/collapse group_by -join/); |
1498 | my $needs_subq = $self->_has_resolved_attr (qw/row offset/); | |
1502 | my $needs_subq = (not defined $cond) || $self->_has_resolved_attr(qw/row offset/); | |
1499 | 1503 | |
1500 | 1504 | if ($needs_group_by_subq or $needs_subq) { |
1501 | 1505 | |
1543 | 1547 | return $rsrc->storage->$op( |
1544 | 1548 | $rsrc, |
1545 | 1549 | $op eq 'update' ? $values : (), |
1546 | $self->_cond_for_update_delete, | |
1550 | $cond, | |
1547 | 1551 | ); |
1548 | 1552 | } |
1549 | 1553 | } |
1550 | ||
1551 | ||
1552 | # _cond_for_update_delete | |
1553 | # | |
1554 | # update/delete require the condition to be modified to handle | |
1555 | # the differing SQL syntax available. This transforms the $self->{cond} | |
1556 | # appropriately, returning the new condition. | |
1557 | ||
1558 | sub _cond_for_update_delete { | |
1559 | my ($self, $full_cond) = @_; | |
1560 | my $cond = {}; | |
1561 | ||
1562 | $full_cond ||= $self->{cond}; | |
1563 | # No-op. No condition, we're updating/deleting everything | |
1564 | return $cond unless ref $full_cond; | |
1565 | ||
1566 | if (ref $full_cond eq 'ARRAY') { | |
1567 | $cond = [ | |
1568 | map { | |
1569 | my %hash; | |
1570 | foreach my $key (keys %{$_}) { | |
1571 | $key =~ /([^.]+)$/; | |
1572 | $hash{$1} = $_->{$key}; | |
1573 | } | |
1574 | \%hash; | |
1575 | } @{$full_cond} | |
1576 | ]; | |
1577 | } | |
1578 | elsif (ref $full_cond eq 'HASH') { | |
1579 | if ((keys %{$full_cond})[0] eq '-and') { | |
1580 | $cond->{-and} = []; | |
1581 | my @cond = @{$full_cond->{-and}}; | |
1582 | for (my $i = 0; $i < @cond; $i++) { | |
1583 | my $entry = $cond[$i]; | |
1584 | my $hash; | |
1585 | if (ref $entry eq 'HASH') { | |
1586 | $hash = $self->_cond_for_update_delete($entry); | |
1587 | } | |
1588 | else { | |
1589 | $entry =~ /([^.]+)$/; | |
1590 | $hash->{$1} = $cond[++$i]; | |
1591 | } | |
1592 | push @{$cond->{-and}}, $hash; | |
1593 | } | |
1594 | } | |
1595 | else { | |
1596 | foreach my $key (keys %{$full_cond}) { | |
1597 | $key =~ /([^.]+)$/; | |
1598 | $cond->{$1} = $full_cond->{$key}; | |
1599 | } | |
1600 | } | |
1601 | } | |
1602 | else { | |
1603 | $self->throw_exception("Can't update/delete on resultset with condition unless hash or array"); | |
1604 | } | |
1605 | ||
1606 | return $cond; | |
1607 | } | |
1608 | ||
1609 | 1554 | |
1610 | 1555 | =head2 update |
1611 | 1556 | |
1793 | 1738 | } |
1794 | 1739 | return wantarray ? @created : \@created; |
1795 | 1740 | } else { |
1796 | my ($first, @rest) = @$data; | |
1797 | ||
1798 | my @names = grep {!ref $first->{$_}} keys %$first; | |
1799 | my @rels = grep { $self->result_source->has_relationship($_) } keys %$first; | |
1741 | my $first = $data->[0]; | |
1742 | ||
1743 | # if a column is a registered relationship, and is a non-blessed hash/array, consider | |
1744 | # it relationship data | |
1745 | my (@rels, @columns); | |
1746 | for (keys %$first) { | |
1747 | my $ref = ref $first->{$_}; | |
1748 | $self->result_source->has_relationship($_) && ($ref eq 'ARRAY' or $ref eq 'HASH') | |
1749 | ? push @rels, $_ | |
1750 | : push @columns, $_ | |
1751 | ; | |
1752 | } | |
1753 | ||
1800 | 1754 | my @pks = $self->result_source->primary_columns; |
1801 | 1755 | |
1802 | 1756 | ## do the belongs_to relationships |
1825 | 1779 | delete $data->[$index]->{$rel}; |
1826 | 1780 | $data->[$index] = {%{$data->[$index]}, %$related}; |
1827 | 1781 | |
1828 | push @names, keys %$related if $index == 0; | |
1782 | push @columns, keys %$related if $index == 0; | |
1829 | 1783 | } |
1830 | 1784 | } |
1831 | 1785 | |
1832 | 1786 | ## do bulk insert on current row |
1833 | my @values = map { [ @$_{@names} ] } @$data; | |
1834 | ||
1835 | 1787 | $self->result_source->storage->insert_bulk( |
1836 | 1788 | $self->result_source, |
1837 | \@names, | |
1838 | \@values, | |
1789 | \@columns, | |
1790 | [ map { [ @$_{@columns} ] } @$data ], | |
1839 | 1791 | ); |
1840 | 1792 | |
1841 | 1793 | ## do the has_many relationships |
1844 | 1796 | foreach my $rel (@rels) { |
1845 | 1797 | next unless $item->{$rel} && ref $item->{$rel} eq "ARRAY"; |
1846 | 1798 | |
1847 | my $parent = $self->find(map {{$_=>$item->{$_}} } @pks) | |
1799 | my $parent = $self->find({map { $_ => $item->{$_} } @pks}) | |
1848 | 1800 | || $self->throw_exception('Cannot find the relating object.'); |
1849 | 1801 | |
1850 | 1802 | my $child = $parent->$rel; |
2561 | 2513 | |
2562 | 2514 | sub clear_cache { |
2563 | 2515 | shift->set_cache(undef); |
2516 | } | |
2517 | ||
2518 | =head2 is_paged | |
2519 | ||
2520 | =over 4 | |
2521 | ||
2522 | =item Arguments: none | |
2523 | ||
2524 | =item Return Value: true, if the resultset has been paginated | |
2525 | ||
2526 | =back | |
2527 | ||
2528 | =cut | |
2529 | ||
2530 | sub is_paged { | |
2531 | my ($self) = @_; | |
2532 | return !!$self->{attrs}{page}; | |
2564 | 2533 | } |
2565 | 2534 | |
2566 | 2535 | =head2 related_resultset |
2710 | 2679 | }]; |
2711 | 2680 | |
2712 | 2681 | my $seen = { %{$attrs->{seen_join} || {} } }; |
2713 | my $jpath = ($attrs->{seen_join} && keys %{$attrs->{seen_join}}) | |
2714 | ? $from->[-1][0]{-join_path} | |
2682 | my $jpath = ($attrs->{seen_join} && keys %{$attrs->{seen_join}}) | |
2683 | ? $from->[-1][0]{-join_path} | |
2715 | 2684 | : []; |
2716 | 2685 | |
2717 | 2686 | |
2947 | 2916 | # even though it doesn't make much sense, this is what pre 081xx has |
2948 | 2917 | # been doing |
2949 | 2918 | if (my $page = delete $attrs->{page}) { |
2950 | $attrs->{offset} = | |
2919 | $attrs->{offset} = | |
2951 | 2920 | ($attrs->{rows} * ($page - 1)) |
2952 | 2921 | + |
2953 | 2922 | ($attrs->{offset} || 0) |
3142 | 3111 | |
3143 | 3112 | =back |
3144 | 3113 | |
3145 | Which column(s) to order the results by. | |
3114 | Which column(s) to order the results by. | |
3146 | 3115 | |
3147 | 3116 | [The full list of suitable values is documented in |
3148 | 3117 | L<SQL::Abstract/"ORDER BY CLAUSES">; the following is a summary of |
3236 | 3205 | When you use function/stored procedure names and do not supply an C<as> |
3237 | 3206 | attribute, the column names returned are storage-dependent. E.g. MySQL would |
3238 | 3207 | return a column named C<count(employeeid)> in the above example. |
3208 | ||
3209 | B<NOTE:> You will almost always need a corresponding 'as' entry when you use | |
3210 | 'select'. | |
3239 | 3211 | |
3240 | 3212 | =head2 +select |
3241 | 3213 | |
3434 | 3406 | |
3435 | 3407 | =over 4 |
3436 | 3408 | |
3437 | =item * | |
3409 | =item * | |
3438 | 3410 | |
3439 | 3411 | Prefetch uses the L</cache> to populate the prefetched relationships. This |
3440 | 3412 | may or may not be what you want. |
3441 | 3413 | |
3442 | =item * | |
3414 | =item * | |
3443 | 3415 | |
3444 | 3416 | If you specify a condition on a prefetched relationship, ONLY those |
3445 | 3417 | rows that match the prefetched condition will be fetched into that relationship. |
3551 | 3523 | # only return rows WHERE deleted IS NULL for all searches |
3552 | 3524 | __PACKAGE__->resultset_attributes({ where => { deleted => undef } }); ) |
3553 | 3525 | |
3554 | Can be overridden by passing C<{ where => undef }> as an attribute | |
3555 | to a resulset. | |
3526 | Can be overridden by passing C<< { where => undef } >> as an attribute | |
3527 | to a resultset. | |
3556 | 3528 | |
3557 | 3529 | =back |
3558 | 3530 |
388 | 388 | my $self = shift; |
389 | 389 | $self->throw_exception( |
390 | 390 | "columns() is a read-only accessor, did you mean add_columns()?" |
391 | ) if (@_ > 1); | |
391 | ) if @_; | |
392 | 392 | return @{$self->{_ordered_columns}||[]}; |
393 | 393 | } |
394 | 394 |
423 | 423 | sub in_storage { |
424 | 424 | my ($self, $val) = @_; |
425 | 425 | $self->{_in_storage} = $val if @_ > 1; |
426 | return $self->{_in_storage}; | |
426 | return $self->{_in_storage} ? 1 : 0; | |
427 | 427 | } |
428 | 428 | |
429 | 429 | =head2 update |
46 | 46 | $self; |
47 | 47 | } |
48 | 48 | |
49 | # Some databases (sqlite) do not handle multiple parenthesis | |
50 | # around in/between arguments. A tentative x IN ( (1, 2 ,3) ) | |
51 | # is interpreted as x IN 1 or something similar. | |
52 | # | |
53 | # Since we currently do not have access to the SQLA AST, resort | |
54 | # to barbaric mutilation of any SQL supplied in literal form | |
55 | sub _strip_outer_paren { | |
56 | my ($self, $arg) = @_; | |
57 | ||
58 | return $self->_SWITCH_refkind ($arg, { | |
59 | ARRAYREFREF => sub { | |
60 | $$arg->[0] = __strip_outer_paren ($$arg->[0]); | |
61 | return $arg; | |
62 | }, | |
63 | SCALARREF => sub { | |
64 | return \__strip_outer_paren( $$arg ); | |
65 | }, | |
66 | FALLBACK => sub { | |
67 | return $arg | |
68 | }, | |
69 | }); | |
70 | } | |
71 | ||
72 | sub __strip_outer_paren { | |
73 | my $sql = shift; | |
74 | ||
75 | if ($sql and not ref $sql) { | |
76 | while ($sql =~ /^ \s* \( (.*) \) \s* $/x ) { | |
77 | $sql = $1; | |
78 | } | |
79 | } | |
80 | ||
81 | return $sql; | |
82 | } | |
83 | ||
84 | sub _where_field_IN { | |
85 | my ($self, $lhs, $op, $rhs) = @_; | |
86 | $rhs = $self->_strip_outer_paren ($rhs); | |
87 | return $self->SUPER::_where_field_IN ($lhs, $op, $rhs); | |
88 | } | |
89 | ||
90 | sub _where_field_BETWEEN { | |
91 | my ($self, $lhs, $op, $rhs) = @_; | |
92 | $rhs = $self->_strip_outer_paren ($rhs); | |
93 | return $self->SUPER::_where_field_BETWEEN ($lhs, $op, $rhs); | |
94 | } | |
95 | 49 | |
96 | 50 | # Slow but ANSI standard Limit/Offset support. DB2 uses this |
97 | 51 | sub _RowNumberOver { |
9 | 9 | |
10 | 10 | =head1 NAME |
11 | 11 | |
12 | DBIx::Class::Storage::DBI::AutoCast | |
12 | DBIx::Class::Storage::DBI::AutoCast - Storage component for RDBMS requiring explicit placeholder typing | |
13 | 13 | |
14 | 14 | =head1 SYNOPSIS |
15 | 15 | |
27 | 27 | converted to: |
28 | 28 | |
29 | 29 | CAST(? as $mapped_type) |
30 | ||
31 | This option can also be enabled in L<DBIx::Class::Storage::DBI/connect_info> as: | |
32 | ||
33 | on_connect_call => ['set_auto_cast'] | |
30 | 34 | |
31 | 35 | =cut |
32 | 36 | |
59 | 63 | return ($sql, $bind); |
60 | 64 | } |
61 | 65 | |
66 | =head2 connect_call_set_auto_cast | |
67 | ||
68 | Executes: | |
69 | ||
70 | $schema->storage->auto_cast(1); | |
71 | ||
72 | on connection. | |
73 | ||
74 | Used as: | |
75 | ||
76 | on_connect_call => ['set_auto_cast'] | |
77 | ||
78 | in L<DBIx::Class::Storage::DBI/connect_info>. | |
79 | ||
80 | =cut | |
81 | ||
82 | sub connect_call_set_auto_cast { | |
83 | my $self = shift; | |
84 | $self->auto_cast(1); | |
85 | } | |
62 | 86 | |
63 | 87 | =head1 AUTHOR |
64 | 88 |
60 | 60 | my $self = shift; |
61 | 61 | |
62 | 62 | if (ref($self->_dbi_connect_info->[0]) eq 'CODE') { |
63 | $self->throw_exception ('cannot set DBI attributes on a CODE ref connect_info'); | |
63 | $self->throw_exception ('Cannot set DBI attributes on a CODE ref connect_info'); | |
64 | 64 | } |
65 | 65 | |
66 | 66 | my $dbi_attrs = $self->_dbi_connect_info->[-1]; |
52 | 52 | |
53 | 53 | my $sth; |
54 | 54 | |
55 | my $source_name; | |
56 | if ( ref $source->name ne 'SCALAR' ) { | |
57 | $source_name = $source->name; | |
58 | } | |
59 | else { | |
60 | $source_name = ${$source->name}; | |
61 | } | |
62 | ||
55 | 63 | # check for fully-qualified name (eg. SCHEMA.TABLENAME) |
56 | if ( my ( $schema, $table ) = $source->name =~ /(\w+)\.(\w+)/ ) { | |
64 | if ( my ( $schema, $table ) = $source_name =~ /(\w+)\.(\w+)/ ) { | |
57 | 65 | $sql = q{ |
58 | 66 | SELECT trigger_body FROM ALL_TRIGGERS t |
59 | 67 | WHERE t.owner = ? AND t.table_name = ? |
65 | 73 | } |
66 | 74 | else { |
67 | 75 | $sth = $dbh->prepare($sql); |
68 | $sth->execute( uc( $source->name ) ); | |
76 | $sth->execute( uc( $source_name ) ); | |
69 | 77 | } |
70 | 78 | while (my ($insert_trigger) = $sth->fetchrow_array) { |
71 | 79 | return uc($1) if $insert_trigger =~ m!(\w+)\.nextval!i; # col name goes here??? |
222 | 230 | |
223 | 231 | =cut |
224 | 232 | |
225 | sub source_bind_attributes | |
233 | sub source_bind_attributes | |
226 | 234 | { |
227 | 235 | require DBD::Oracle; |
228 | 236 | my $self = shift; |
25 | 25 | |
26 | 26 | for my $col (@cols) { |
27 | 27 | my $seq = ( $source->column_info($col)->{sequence} ||= $self->dbh_do('_dbh_get_autoinc_seq', $source, $col) ) |
28 | or $self->throw_exception( "could not determine sequence for " | |
29 | . $source->name | |
30 | . ".$col, please consider adding a " | |
31 | . "schema-qualified sequence to its column info" | |
32 | ); | |
28 | or $self->throw_exception( sprintf( | |
29 | 'could not determine sequence for column %s.%s, please consider adding a schema-qualified sequence to its column info', | |
30 | $source->name, | |
31 | $col, | |
32 | )); | |
33 | 33 | |
34 | 34 | push @values, $self->_dbh_last_insert_id ($self->_dbh, $seq); |
35 | 35 | } |
60 | 60 | ( $schema, $table ) = ( $1, $2 ); |
61 | 61 | } |
62 | 62 | |
63 | # use DBD::Pg to fetch the column info if it is recent enough to | |
64 | # work. otherwise, use custom SQL | |
65 | my $seq_expr = $DBD::Pg::VERSION >= 2.015001 | |
66 | ? eval{ $dbh->column_info(undef,$schema,$table,$col)->fetchrow_hashref->{COLUMN_DEF} } | |
67 | : $self->_dbh_get_column_default( $dbh, $schema, $table, $col ); | |
63 | # get the column default using a Postgres-specific pg_catalog query | |
64 | my $seq_expr = $self->_dbh_get_column_default( $dbh, $schema, $table, $col ); | |
68 | 65 | |
69 | 66 | # if no default value is set on the column, or if we can't parse the |
70 | 67 | # default value as a sequence, throw. |
71 | unless ( defined $seq_expr and $seq_expr =~ /^nextval\(+'([^']+)'::(?:text|regclass)\)/i ){ | |
68 | unless ( defined $seq_expr and $seq_expr =~ /^nextval\(+'([^']+)'::(?:text|regclass)\)/i ) { | |
72 | 69 | $seq_expr = '' unless defined $seq_expr; |
73 | 70 | $schema = "$schema." if defined $schema && length $schema; |
74 | $self->throw_exception( "no sequence found for $schema$table.$col, check table definition, " | |
75 | . "or explicitly set the 'sequence' for this column in the " | |
76 | . $source->source_name | |
77 | . " class" | |
78 | ); | |
71 | $self->throw_exception( sprintf ( | |
72 | 'no sequence found for %s%s.%s, check the RDBMS table definition or explicitly set the '. | |
73 | "'sequence' for this column in %s", | |
74 | $schema ? "$schema." : '', | |
75 | $table, | |
76 | $col, | |
77 | $source->source_name, | |
78 | )); | |
79 | 79 | } |
80 | 80 | |
81 | 81 | return $1; |
46 | 46 | return $backupfile; |
47 | 47 | } |
48 | 48 | |
49 | sub deployment_statements { | |
50 | my $self = shift;; | |
51 | my ($schema, $type, $version, $dir, $sqltargs, @rest) = @_; | |
52 | ||
53 | $sqltargs ||= {}; | |
54 | ||
55 | my $sqlite_version = $self->_get_dbh->{sqlite_version}; | |
56 | ||
57 | # numify, SQLT does a numeric comparison | |
58 | $sqlite_version =~ s/^(\d+) \. (\d+) (?: \. (\d+))? .*/${1}.${2}/x; | |
59 | ||
60 | $sqltargs->{producer_args}{sqlite_version} = $sqlite_version; | |
61 | ||
62 | $self->next::method($schema, $type, $version, $dir, $sqltargs, @rest); | |
63 | } | |
64 | ||
49 | 65 | sub datetime_parser_type { return "DateTime::Format::SQLite"; } |
50 | 66 | |
51 | 67 | 1; |
0 | package # hide from PAUSE | |
1 | DBIx::Class::Storage::DBI::Sybase::Base; | |
2 | ||
3 | use strict; | |
4 | use warnings; | |
5 | ||
6 | use base qw/DBIx::Class::Storage::DBI/; | |
7 | use mro 'c3'; | |
8 | ||
9 | =head1 NAME | |
10 | ||
11 | DBIx::Class::Storage::DBI::Sybase::Base - Common functionality for drivers using | |
12 | DBD::Sybase | |
13 | ||
14 | =cut | |
15 | ||
16 | sub _ping { | |
17 | my $self = shift; | |
18 | ||
19 | my $dbh = $self->_dbh or return 0; | |
20 | ||
21 | local $dbh->{RaiseError} = 1; | |
22 | eval { | |
23 | $dbh->do('select 1'); | |
24 | }; | |
25 | ||
26 | return $@ ? 0 : 1; | |
27 | } | |
28 | ||
29 | sub _placeholders_supported { | |
30 | my $self = shift; | |
31 | my $dbh = $self->_get_dbh; | |
32 | ||
33 | return eval { | |
34 | # There's also $dbh->{syb_dynamic_supported} but it can be inaccurate for this | |
35 | # purpose. | |
36 | local $dbh->{PrintError} = 0; | |
37 | local $dbh->{RaiseError} = 1; | |
38 | # this specifically tests a bind that is NOT a string | |
39 | $dbh->selectrow_array('select 1 where 1 = ?', {}, 1); | |
40 | }; | |
41 | } | |
42 | ||
43 | 1; | |
44 | ||
45 | =head1 AUTHORS | |
46 | ||
47 | See L<DBIx::Class/CONTRIBUTORS>. | |
48 | ||
49 | =head1 LICENSE | |
50 | ||
51 | You may distribute this code under the same terms as Perl itself. | |
52 | ||
53 | =cut |
0 | package DBIx::Class::Storage::DBI::Sybase::Common; | |
1 | ||
2 | use strict; | |
3 | use warnings; | |
4 | ||
5 | use base qw/DBIx::Class::Storage::DBI/; | |
6 | use mro 'c3'; | |
7 | ||
8 | =head1 NAME | |
9 | ||
10 | DBIx::Class::Storage::DBI::Sybase::Common - Common functionality for drivers using | |
11 | DBD::Sybase | |
12 | ||
13 | =head1 DESCRIPTION | |
14 | ||
15 | This is the base class for L<DBIx::Class::Storage::DBI::Sybase> and | |
16 | L<DBIx::Class::Storage::DBI::Sybase::Microsoft_SQL_Server>. It provides some | |
17 | utility methods related to L<DBD::Sybase> and the supported functions of the | |
18 | database you are connecting to. | |
19 | ||
20 | =head1 METHODS | |
21 | ||
22 | =cut | |
23 | ||
24 | sub _ping { | |
25 | my $self = shift; | |
26 | ||
27 | my $dbh = $self->_dbh or return 0; | |
28 | ||
29 | local $dbh->{RaiseError} = 1; | |
30 | local $dbh->{PrintError} = 0; | |
31 | ||
32 | if ($dbh->{syb_no_child_con}) { | |
33 | # ping is impossible with an active statement, we return false if so | |
34 | my $ping = eval { $dbh->ping }; | |
35 | return $@ ? 0 : $ping; | |
36 | } | |
37 | ||
38 | eval { | |
39 | # XXX if the main connection goes stale, does opening another for this statement | |
40 | # really determine anything? | |
41 | $dbh->do('select 1'); | |
42 | }; | |
43 | ||
44 | return $@ ? 0 : 1; | |
45 | } | |
46 | ||
47 | sub _set_max_connect { | |
48 | my $self = shift; | |
49 | my $val = shift || 256; | |
50 | ||
51 | my $dsn = $self->_dbi_connect_info->[0]; | |
52 | ||
53 | return if ref($dsn) eq 'CODE'; | |
54 | ||
55 | if ($dsn !~ /maxConnect=/) { | |
56 | $self->_dbi_connect_info->[0] = "$dsn;maxConnect=$val"; | |
57 | my $connected = defined $self->_dbh; | |
58 | $self->disconnect; | |
59 | $self->ensure_connected if $connected; | |
60 | } | |
61 | } | |
62 | ||
63 | =head2 using_freetds | |
64 | ||
65 | Whether or not L<DBD::Sybase> was compiled against FreeTDS. If false, it means | |
66 | the Sybase OpenClient libraries were used. | |
67 | ||
68 | =cut | |
69 | ||
70 | sub using_freetds { | |
71 | my $self = shift; | |
72 | ||
73 | return $self->_get_dbh->{syb_oc_version} =~ /freetds/i; | |
74 | } | |
75 | ||
76 | =head2 set_textsize | |
77 | ||
78 | When using FreeTDS and/or MSSQL, C<< $dbh->{LongReadLen} >> is not available, | |
79 | use this function instead. It does: | |
80 | ||
81 | $dbh->do("SET TEXTSIZE $bytes"); | |
82 | ||
83 | Takes the number of bytes, or uses the C<LongReadLen> value from your | |
84 | L<DBIx::Class/connect_info> if omitted, lastly falls back to the C<32768> which | |
85 | is the L<DBD::Sybase> default. | |
86 | ||
87 | =cut | |
88 | ||
89 | sub set_textsize { | |
90 | my $self = shift; | |
91 | my $text_size = shift || | |
92 | eval { $self->_dbi_connect_info->[-1]->{LongReadLen} } || | |
93 | 32768; # the DBD::Sybase default | |
94 | ||
95 | return unless defined $text_size; | |
96 | ||
97 | $self->_dbh->do("SET TEXTSIZE $text_size"); | |
98 | } | |
99 | ||
100 | 1; | |
101 | ||
102 | =head1 AUTHORS | |
103 | ||
104 | See L<DBIx::Class/CONTRIBUTORS>. | |
105 | ||
106 | =head1 LICENSE | |
107 | ||
108 | You may distribute this code under the same terms as Perl itself. | |
109 | ||
110 | =cut |
3 | 3 | use warnings; |
4 | 4 | |
5 | 5 | use base qw/ |
6 | DBIx::Class::Storage::DBI::Sybase::Base | |
6 | DBIx::Class::Storage::DBI::Sybase::Common | |
7 | 7 | DBIx::Class::Storage::DBI::MSSQL |
8 | 8 | /; |
9 | 9 | use mro 'c3'; |
17 | 17 | 'DBIx::Class::Storage::DBI::Sybase::Microsoft_SQL_Server::NoBindVars'; |
18 | 18 | $self->_rebless; |
19 | 19 | } |
20 | } | |
20 | 21 | |
21 | # LongReadLen doesn't work with MSSQL through DBD::Sybase, and the default is | |
22 | # huge on some versions of SQL server and can cause memory problems, so we | |
23 | # fix it up here. | |
24 | my $text_size = eval { $self->_dbi_connect_info->[-1]->{LongReadLen} } || | |
25 | 32768; # the DBD::Sybase default | |
22 | sub _init { | |
23 | my $self = shift; | |
26 | 24 | |
27 | $dbh->do("set textsize $text_size"); | |
25 | # LongReadLen doesn't work with MSSQL through DBD::Sybase, and the default is | |
26 | # huge on some versions of SQL server and can cause memory problems, so we | |
27 | # fix it up here (see Sybase/Common.pm) | |
28 | $self->set_textsize; | |
29 | } | |
30 | ||
31 | sub _dbh_begin_work { | |
32 | my $self = shift; | |
33 | ||
34 | $self->_get_dbh->do('BEGIN TRAN'); | |
35 | } | |
36 | ||
37 | sub _dbh_commit { | |
38 | my $self = shift; | |
39 | my $dbh = $self->_dbh | |
40 | or $self->throw_exception('cannot COMMIT on a disconnected handle'); | |
41 | $dbh->do('COMMIT'); | |
42 | } | |
43 | ||
44 | sub _dbh_rollback { | |
45 | my $self = shift; | |
46 | my $dbh = $self->_dbh | |
47 | or $self->throw_exception('cannot ROLLBACK on a disconnected handle'); | |
48 | $dbh->do('ROLLBACK'); | |
28 | 49 | } |
29 | 50 | |
30 | 51 | 1; |
0 | package DBIx::Class::Storage::DBI::Sybase::NoBindVars; | |
1 | ||
2 | use base qw/ | |
3 | DBIx::Class::Storage::DBI::NoBindVars | |
4 | DBIx::Class::Storage::DBI::Sybase | |
5 | /; | |
6 | use mro 'c3'; | |
7 | use List::Util (); | |
8 | use Scalar::Util (); | |
9 | ||
10 | sub _init { | |
11 | my $self = shift; | |
12 | $self->disable_sth_caching(1); | |
13 | $self->_identity_method('@@IDENTITY'); | |
14 | $self->next::method (@_); | |
15 | } | |
16 | ||
17 | sub _fetch_identity_sql { 'SELECT ' . $_[0]->_identity_method } | |
18 | ||
19 | my $number = sub { Scalar::Util::looks_like_number($_[0]) }; | |
20 | ||
21 | my $decimal = sub { $_[0] =~ /^ [-+]? \d+ (?:\.\d*)? \z/x }; | |
22 | ||
23 | my %noquote = ( | |
24 | int => sub { $_[0] =~ /^ [-+]? \d+ \z/x }, | |
25 | bit => => sub { $_[0] =~ /^[01]\z/ }, | |
26 | money => sub { $_[0] =~ /^\$ \d+ (?:\.\d*)? \z/x }, | |
27 | float => $number, | |
28 | real => $number, | |
29 | double => $number, | |
30 | decimal => $decimal, | |
31 | numeric => $decimal, | |
32 | ); | |
33 | ||
34 | sub interpolate_unquoted { | |
35 | my $self = shift; | |
36 | my ($type, $value) = @_; | |
37 | ||
38 | return $self->next::method(@_) if not defined $value or not defined $type; | |
39 | ||
40 | if (my $key = List::Util::first { $type =~ /$_/i } keys %noquote) { | |
41 | return 1 if $noquote{$key}->($value); | |
42 | } | |
43 | elsif ($self->is_datatype_numeric($type) && $number->($value)) { | |
44 | return 1; | |
45 | } | |
46 | ||
47 | return $self->next::method(@_); | |
48 | } | |
49 | ||
50 | sub _prep_interpolated_value { | |
51 | my ($self, $type, $value) = @_; | |
52 | ||
53 | if ($type =~ /money/i && defined $value) { | |
54 | # change a ^ not followed by \$ to a \$ | |
55 | $value =~ s/^ (?! \$) /\$/x; | |
56 | } | |
57 | ||
58 | return $value; | |
59 | } | |
60 | ||
61 | 1; | |
62 | ||
63 | =head1 NAME | |
64 | ||
65 | DBIx::Class::Storage::DBI::Sybase::NoBindVars - Storage::DBI subclass for Sybase | |
66 | without placeholder support | |
67 | ||
68 | =head1 DESCRIPTION | |
69 | ||
70 | If you're using this driver than your version of Sybase, or the libraries you | |
71 | use to connect to it, do not support placeholders. | |
72 | ||
73 | You can also enable this driver explicitly using: | |
74 | ||
75 | my $schema = SchemaClass->clone; | |
76 | $schema->storage_type('::DBI::Sybase::NoBindVars'); | |
77 | $schema->connect($dsn, $user, $pass, \%opts); | |
78 | ||
79 | See the discussion in L<< DBD::Sybase/Using ? Placeholders & bind parameters to | |
80 | $sth->execute >> for details on the pros and cons of using placeholders. | |
81 | ||
82 | One advantage of not using placeholders is that C<select @@identity> will work | |
83 | for obtainging the last insert id of an C<IDENTITY> column, instead of having to | |
84 | do C<select max(col)> in a transaction as the base Sybase driver does. | |
85 | ||
86 | When using this driver, bind variables will be interpolated (properly quoted of | |
87 | course) into the SQL query itself, without using placeholders. | |
88 | ||
89 | The caching of prepared statements is also explicitly disabled, as the | |
90 | interpolation renders it useless. | |
91 | ||
92 | =head1 AUTHORS | |
93 | ||
94 | See L<DBIx::Class/CONTRIBUTORS>. | |
95 | ||
96 | =head1 LICENSE | |
97 | ||
98 | You may distribute this code under the same terms as Perl itself. | |
99 | ||
100 | =cut | |
101 | # vim:sts=2 sw=2: |
3 | 3 | use warnings; |
4 | 4 | |
5 | 5 | use base qw/ |
6 | DBIx::Class::Storage::DBI::Sybase::Base | |
7 | DBIx::Class::Storage::DBI::NoBindVars | |
6 | DBIx::Class::Storage::DBI::Sybase::Common | |
7 | DBIx::Class::Storage::DBI::AutoCast | |
8 | 8 | /; |
9 | 9 | use mro 'c3'; |
10 | use Carp::Clan qw/^DBIx::Class/; | |
11 | use List::Util(); | |
12 | use Sub::Name(); | |
13 | use Data::Dumper::Concise(); | |
14 | ||
15 | __PACKAGE__->mk_group_accessors('simple' => | |
16 | qw/_identity _blob_log_on_update _writer_storage _is_extra_storage | |
17 | _bulk_storage _is_bulk_storage _began_bulk_work | |
18 | _bulk_disabled_due_to_coderef_connect_info_warned | |
19 | _identity_method/ | |
20 | ); | |
21 | ||
22 | my @also_proxy_to_extra_storages = qw/ | |
23 | connect_call_set_auto_cast auto_cast connect_call_blob_setup | |
24 | connect_call_datetime_setup | |
25 | ||
26 | disconnect _connect_info _sql_maker _sql_maker_opts disable_sth_caching | |
27 | auto_savepoint unsafe cursor_class debug debugobj schema | |
28 | /; | |
29 | ||
30 | =head1 NAME | |
31 | ||
32 | DBIx::Class::Storage::DBI::Sybase - Sybase support for DBIx::Class | |
33 | ||
34 | =head1 SYNOPSIS | |
35 | ||
36 | This subclass supports L<DBD::Sybase> for real Sybase databases. If you are | |
37 | using an MSSQL database via L<DBD::Sybase>, your storage will be reblessed to | |
38 | L<DBIx::Class::Storage::DBI::Sybase::Microsoft_SQL_Server>. | |
39 | ||
40 | =head1 DESCRIPTION | |
41 | ||
42 | If your version of Sybase does not support placeholders, then your storage | |
43 | will be reblessed to L<DBIx::Class::Storage::DBI::Sybase::NoBindVars>. You can | |
44 | also enable that driver explicitly, see the documentation for more details. | |
45 | ||
46 | With this driver there is unfortunately no way to get the C<last_insert_id> | |
47 | without doing a C<SELECT MAX(col)>. This is done safely in a transaction | |
48 | (locking the table.) See L</INSERTS WITH PLACEHOLDERS>. | |
49 | ||
50 | A recommended L<DBIx::Class::Storage::DBI/connect_info> setting: | |
51 | ||
52 | on_connect_call => [['datetime_setup'], ['blob_setup', log_on_update => 0]] | |
53 | ||
54 | =head1 METHODS | |
55 | ||
56 | =cut | |
10 | 57 | |
11 | 58 | sub _rebless { |
59 | my $self = shift; | |
60 | ||
61 | if (ref($self) eq 'DBIx::Class::Storage::DBI::Sybase') { | |
62 | my $dbtype = eval { | |
63 | @{$self->_get_dbh->selectrow_arrayref(qq{sp_server_info \@attribute_id=1})}[2] | |
64 | } || ''; | |
65 | $self->throw_exception("Unable to estable connection to determine database type: $@") | |
66 | if $@; | |
67 | ||
68 | $dbtype =~ s/\W/_/gi; | |
69 | my $subclass = "DBIx::Class::Storage::DBI::Sybase::${dbtype}"; | |
70 | ||
71 | if ($dbtype && $self->load_optional_class($subclass)) { | |
72 | bless $self, $subclass; | |
73 | $self->_rebless; | |
74 | } else { # real Sybase | |
75 | my $no_bind_vars = 'DBIx::Class::Storage::DBI::Sybase::NoBindVars'; | |
76 | ||
77 | if ($self->using_freetds) { | |
78 | carp <<'EOF' unless $ENV{DBIC_SYBASE_FREETDS_NOWARN}; | |
79 | ||
80 | You are using FreeTDS with Sybase. | |
81 | ||
82 | We will do our best to support this configuration, but please consider this | |
83 | support experimental. | |
84 | ||
85 | TEXT/IMAGE columns will definitely not work. | |
86 | ||
87 | You are encouraged to recompile DBD::Sybase with the Sybase Open Client libraries | |
88 | instead. | |
89 | ||
90 | See perldoc DBIx::Class::Storage::DBI::Sybase for more details. | |
91 | ||
92 | To turn off this warning set the DBIC_SYBASE_FREETDS_NOWARN environment | |
93 | variable. | |
94 | EOF | |
95 | if (not $self->_typeless_placeholders_supported) { | |
96 | if ($self->_placeholders_supported) { | |
97 | $self->auto_cast(1); | |
98 | } else { | |
99 | $self->ensure_class_loaded($no_bind_vars); | |
100 | bless $self, $no_bind_vars; | |
101 | $self->_rebless; | |
102 | } | |
103 | } | |
104 | } | |
105 | elsif (not $self->_get_dbh->{syb_dynamic_supported}) { | |
106 | # not necessarily FreeTDS, but no placeholders nevertheless | |
107 | $self->ensure_class_loaded($no_bind_vars); | |
108 | bless $self, $no_bind_vars; | |
109 | $self->_rebless; | |
110 | } elsif (not $self->_typeless_placeholders_supported) { | |
111 | # this is highly unlikely, but we check just in case | |
112 | $self->auto_cast(1); | |
113 | } | |
114 | } | |
115 | } | |
116 | } | |
117 | ||
118 | sub _init { | |
119 | my $self = shift; | |
120 | $self->_set_max_connect(256); | |
121 | ||
122 | # based on LongReadLen in connect_info | |
123 | $self->set_textsize if $self->using_freetds; | |
124 | ||
125 | # create storage for insert/(update blob) transactions, | |
126 | # unless this is that storage | |
127 | return if $self->_is_extra_storage; | |
128 | ||
129 | my $writer_storage = (ref $self)->new; | |
130 | ||
131 | $writer_storage->_is_extra_storage(1); | |
132 | $writer_storage->connect_info($self->connect_info); | |
133 | $writer_storage->auto_cast($self->auto_cast); | |
134 | ||
135 | $self->_writer_storage($writer_storage); | |
136 | ||
137 | # create a bulk storage unless connect_info is a coderef | |
138 | return | |
139 | if (Scalar::Util::reftype($self->_dbi_connect_info->[0])||'') eq 'CODE'; | |
140 | ||
141 | my $bulk_storage = (ref $self)->new; | |
142 | ||
143 | $bulk_storage->_is_extra_storage(1); | |
144 | $bulk_storage->_is_bulk_storage(1); # for special ->disconnect acrobatics | |
145 | $bulk_storage->connect_info($self->connect_info); | |
146 | ||
147 | # this is why | |
148 | $bulk_storage->_dbi_connect_info->[0] .= ';bulkLogin=1'; | |
149 | ||
150 | $self->_bulk_storage($bulk_storage); | |
151 | } | |
152 | ||
153 | for my $method (@also_proxy_to_extra_storages) { | |
154 | no strict 'refs'; | |
155 | no warnings 'redefine'; | |
156 | ||
157 | my $replaced = __PACKAGE__->can($method); | |
158 | ||
159 | *{$method} = Sub::Name::subname $method => sub { | |
12 | 160 | my $self = shift; |
13 | ||
14 | my $dbtype = eval { | |
15 | @{$self->_get_dbh | |
16 | ->selectrow_arrayref(qq{sp_server_info \@attribute_id=1}) | |
17 | }[2] | |
161 | $self->_writer_storage->$replaced(@_) if $self->_writer_storage; | |
162 | $self->_bulk_storage->$replaced(@_) if $self->_bulk_storage; | |
163 | return $self->$replaced(@_); | |
164 | }; | |
165 | } | |
166 | ||
167 | sub disconnect { | |
168 | my $self = shift; | |
169 | ||
170 | # Even though we call $sth->finish for uses off the bulk API, there's still an | |
171 | # "active statement" warning on disconnect, which we throw away here. | |
172 | # This is due to the bug described in insert_bulk. | |
173 | # Currently a noop because 'prepare' is used instead of 'prepare_cached'. | |
174 | local $SIG{__WARN__} = sub { | |
175 | warn $_[0] unless $_[0] =~ /active statement/i; | |
176 | } if $self->_is_bulk_storage; | |
177 | ||
178 | # so that next transaction gets a dbh | |
179 | $self->_began_bulk_work(0) if $self->_is_bulk_storage; | |
180 | ||
181 | $self->next::method; | |
182 | } | |
183 | ||
184 | # Make sure we have CHAINED mode turned on if AutoCommit is off in non-FreeTDS | |
185 | # DBD::Sybase (since we don't know how DBD::Sybase was compiled.) If however | |
186 | # we're using FreeTDS, CHAINED mode turns on an implicit transaction which we | |
187 | # only want when AutoCommit is off. | |
188 | sub _populate_dbh { | |
189 | my $self = shift; | |
190 | ||
191 | $self->next::method(@_); | |
192 | ||
193 | if ($self->_is_bulk_storage) { | |
194 | # this should be cleared on every reconnect | |
195 | $self->_began_bulk_work(0); | |
196 | return; | |
197 | } | |
198 | ||
199 | if (not $self->using_freetds) { | |
200 | $self->_dbh->{syb_chained_txn} = 1; | |
201 | } else { | |
202 | if ($self->_dbh_autocommit) { | |
203 | $self->_dbh->do('SET CHAINED OFF'); | |
204 | } else { | |
205 | $self->_dbh->do('SET CHAINED ON'); | |
206 | } | |
207 | } | |
208 | } | |
209 | ||
210 | =head2 connect_call_blob_setup | |
211 | ||
212 | Used as: | |
213 | ||
214 | on_connect_call => [ [ 'blob_setup', log_on_update => 0 ] ] | |
215 | ||
216 | Does C<< $dbh->{syb_binary_images} = 1; >> to return C<IMAGE> data as raw binary | |
217 | instead of as a hex string. | |
218 | ||
219 | Recommended. | |
220 | ||
221 | Also sets the C<log_on_update> value for blob write operations. The default is | |
222 | C<1>, but C<0> is better if your database is configured for it. | |
223 | ||
224 | See | |
225 | L<DBD::Sybase/Handling_IMAGE/TEXT_data_with_syb_ct_get_data()/syb_ct_send_data()>. | |
226 | ||
227 | =cut | |
228 | ||
229 | sub connect_call_blob_setup { | |
230 | my $self = shift; | |
231 | my %args = @_; | |
232 | my $dbh = $self->_dbh; | |
233 | $dbh->{syb_binary_images} = 1; | |
234 | ||
235 | $self->_blob_log_on_update($args{log_on_update}) | |
236 | if exists $args{log_on_update}; | |
237 | } | |
238 | ||
239 | sub _is_lob_type { | |
240 | my $self = shift; | |
241 | my $type = shift; | |
242 | $type && $type =~ /(?:text|image|lob|bytea|binary|memo)/i; | |
243 | } | |
244 | ||
245 | sub _is_lob_column { | |
246 | my ($self, $source, $column) = @_; | |
247 | ||
248 | return $self->_is_lob_type($source->column_info($column)->{data_type}); | |
249 | } | |
250 | ||
251 | sub _prep_for_execute { | |
252 | my $self = shift; | |
253 | my ($op, $extra_bind, $ident, $args) = @_; | |
254 | ||
255 | my ($sql, $bind) = $self->next::method (@_); | |
256 | ||
257 | my $table = Scalar::Util::blessed($ident) ? $ident->from : $ident; | |
258 | ||
259 | my $bind_info = $self->_resolve_column_info( | |
260 | $ident, [map $_->[0], @{$bind}] | |
261 | ); | |
262 | my $bound_identity_col = List::Util::first | |
263 | { $bind_info->{$_}{is_auto_increment} } | |
264 | (keys %$bind_info) | |
265 | ; | |
266 | my $identity_col = Scalar::Util::blessed($ident) && | |
267 | List::Util::first | |
268 | { $ident->column_info($_)->{is_auto_increment} } | |
269 | $ident->columns | |
270 | ; | |
271 | ||
272 | if (($op eq 'insert' && $bound_identity_col) || | |
273 | ($op eq 'update' && exists $args->[0]{$identity_col})) { | |
274 | $sql = join ("\n", | |
275 | $self->_set_table_identity_sql($op => $table, 'on'), | |
276 | $sql, | |
277 | $self->_set_table_identity_sql($op => $table, 'off'), | |
278 | ); | |
279 | } | |
280 | ||
281 | if ($op eq 'insert' && (not $bound_identity_col) && $identity_col && | |
282 | (not $self->{insert_bulk})) { | |
283 | $sql = | |
284 | "$sql\n" . | |
285 | $self->_fetch_identity_sql($ident, $identity_col); | |
286 | } | |
287 | ||
288 | return ($sql, $bind); | |
289 | } | |
290 | ||
291 | sub _set_table_identity_sql { | |
292 | my ($self, $op, $table, $on_off) = @_; | |
293 | ||
294 | return sprintf 'SET IDENTITY_%s %s %s', | |
295 | uc($op), $self->sql_maker->_quote($table), uc($on_off); | |
296 | } | |
297 | ||
298 | # Stolen from SQLT, with some modifications. This is a makeshift | |
299 | # solution before a sane type-mapping library is available, thus | |
300 | # the 'our' for easy overrides. | |
301 | our %TYPE_MAPPING = ( | |
302 | number => 'numeric', | |
303 | money => 'money', | |
304 | varchar => 'varchar', | |
305 | varchar2 => 'varchar', | |
306 | timestamp => 'datetime', | |
307 | text => 'varchar', | |
308 | real => 'double precision', | |
309 | comment => 'text', | |
310 | bit => 'bit', | |
311 | tinyint => 'smallint', | |
312 | float => 'double precision', | |
313 | serial => 'numeric', | |
314 | bigserial => 'numeric', | |
315 | boolean => 'varchar', | |
316 | long => 'varchar', | |
317 | ); | |
318 | ||
319 | sub _native_data_type { | |
320 | my ($self, $type) = @_; | |
321 | ||
322 | $type = lc $type; | |
323 | $type =~ s/\s* identity//x; | |
324 | ||
325 | return uc($TYPE_MAPPING{$type} || $type); | |
326 | } | |
327 | ||
328 | sub _fetch_identity_sql { | |
329 | my ($self, $source, $col) = @_; | |
330 | ||
331 | return sprintf ("SELECT MAX(%s) FROM %s", | |
332 | map { $self->sql_maker->_quote ($_) } ($col, $source->from) | |
333 | ); | |
334 | } | |
335 | ||
336 | sub _execute { | |
337 | my $self = shift; | |
338 | my ($op) = @_; | |
339 | ||
340 | my ($rv, $sth, @bind) = $self->dbh_do($self->can('_dbh_execute'), @_); | |
341 | ||
342 | if ($op eq 'insert') { | |
343 | $self->_identity($sth->fetchrow_array); | |
344 | $sth->finish; | |
345 | } | |
346 | ||
347 | return wantarray ? ($rv, $sth, @bind) : $rv; | |
348 | } | |
349 | ||
350 | sub last_insert_id { shift->_identity } | |
351 | ||
352 | # handles TEXT/IMAGE and transaction for last_insert_id | |
353 | sub insert { | |
354 | my $self = shift; | |
355 | my ($source, $to_insert) = @_; | |
356 | ||
357 | my $identity_col = (List::Util::first | |
358 | { $source->column_info($_)->{is_auto_increment} } | |
359 | $source->columns) || ''; | |
360 | ||
361 | # check for empty insert | |
362 | # INSERT INTO foo DEFAULT VALUES -- does not work with Sybase | |
363 | # try to insert explicit 'DEFAULT's instead (except for identity) | |
364 | if (not %$to_insert) { | |
365 | for my $col ($source->columns) { | |
366 | next if $col eq $identity_col; | |
367 | $to_insert->{$col} = \'DEFAULT'; | |
368 | } | |
369 | } | |
370 | ||
371 | my $blob_cols = $self->_remove_blob_cols($source, $to_insert); | |
372 | ||
373 | # do we need the horrific SELECT MAX(COL) hack? | |
374 | my $dumb_last_insert_id = | |
375 | $identity_col | |
376 | && (not exists $to_insert->{$identity_col}) | |
377 | && ($self->_identity_method||'') ne '@@IDENTITY'; | |
378 | ||
379 | my $next = $self->next::can; | |
380 | ||
381 | # we are already in a transaction, or there are no blobs | |
382 | # and we don't need the PK - just (try to) do it | |
383 | if ($self->{transaction_depth} | |
384 | || (!$blob_cols && !$dumb_last_insert_id) | |
385 | ) { | |
386 | return $self->_insert ( | |
387 | $next, $source, $to_insert, $blob_cols, $identity_col | |
388 | ); | |
389 | } | |
390 | ||
391 | # otherwise use the _writer_storage to do the insert+transaction on another | |
392 | # connection | |
393 | my $guard = $self->_writer_storage->txn_scope_guard; | |
394 | ||
395 | my $updated_cols = $self->_writer_storage->_insert ( | |
396 | $next, $source, $to_insert, $blob_cols, $identity_col | |
397 | ); | |
398 | ||
399 | $self->_identity($self->_writer_storage->_identity); | |
400 | ||
401 | $guard->commit; | |
402 | ||
403 | return $updated_cols; | |
404 | } | |
405 | ||
406 | sub _insert { | |
407 | my ($self, $next, $source, $to_insert, $blob_cols, $identity_col) = @_; | |
408 | ||
409 | my $updated_cols = $self->$next ($source, $to_insert); | |
410 | ||
411 | my $final_row = { | |
412 | ($identity_col ? | |
413 | ($identity_col => $self->last_insert_id($source, $identity_col)) : ()), | |
414 | %$to_insert, | |
415 | %$updated_cols, | |
416 | }; | |
417 | ||
418 | $self->_insert_blobs ($source, $blob_cols, $final_row) if $blob_cols; | |
419 | ||
420 | return $updated_cols; | |
421 | } | |
422 | ||
423 | sub update { | |
424 | my $self = shift; | |
425 | my ($source, $fields, $where, @rest) = @_; | |
426 | ||
427 | my $wantarray = wantarray; | |
428 | ||
429 | my $blob_cols = $self->_remove_blob_cols($source, $fields); | |
430 | ||
431 | my $table = $source->name; | |
432 | ||
433 | my $identity_col = List::Util::first | |
434 | { $source->column_info($_)->{is_auto_increment} } | |
435 | $source->columns; | |
436 | ||
437 | my $is_identity_update = $identity_col && defined $fields->{$identity_col}; | |
438 | ||
439 | return $self->next::method(@_) unless $blob_cols; | |
440 | ||
441 | # If there are any blobs in $where, Sybase will return a descriptive error | |
442 | # message. | |
443 | # XXX blobs can still be used with a LIKE query, and this should be handled. | |
444 | ||
445 | # update+blob update(s) done atomically on separate connection | |
446 | $self = $self->_writer_storage; | |
447 | ||
448 | my $guard = $self->txn_scope_guard; | |
449 | ||
450 | # First update the blob columns to be updated to '' (taken from $fields, where | |
451 | # it is originally put by _remove_blob_cols .) | |
452 | my %blobs_to_empty = map { ($_ => delete $fields->{$_}) } keys %$blob_cols; | |
453 | ||
454 | # We can't only update NULL blobs, because blobs cannot be in the WHERE clause. | |
455 | ||
456 | $self->next::method($source, \%blobs_to_empty, $where, @rest); | |
457 | ||
458 | # Now update the blobs before the other columns in case the update of other | |
459 | # columns makes the search condition invalid. | |
460 | $self->_update_blobs($source, $blob_cols, $where); | |
461 | ||
462 | my @res; | |
463 | if (%$fields) { | |
464 | if ($wantarray) { | |
465 | @res = $self->next::method(@_); | |
466 | } | |
467 | elsif (defined $wantarray) { | |
468 | $res[0] = $self->next::method(@_); | |
469 | } | |
470 | else { | |
471 | $self->next::method(@_); | |
472 | } | |
473 | } | |
474 | ||
475 | $guard->commit; | |
476 | ||
477 | return $wantarray ? @res : $res[0]; | |
478 | } | |
479 | ||
480 | sub insert_bulk { | |
481 | my $self = shift; | |
482 | my ($source, $cols, $data) = @_; | |
483 | ||
484 | my $identity_col = List::Util::first | |
485 | { $source->column_info($_)->{is_auto_increment} } | |
486 | $source->columns; | |
487 | ||
488 | my $is_identity_insert = (List::Util::first | |
489 | { $_ eq $identity_col } | |
490 | @{$cols} | |
491 | ) ? 1 : 0; | |
492 | ||
493 | my @source_columns = $source->columns; | |
494 | ||
495 | my $use_bulk_api = | |
496 | $self->_bulk_storage && | |
497 | $self->_get_dbh->{syb_has_blk}; | |
498 | ||
499 | if ((not $use_bulk_api) && | |
500 | (Scalar::Util::reftype($self->_dbi_connect_info->[0])||'') eq 'CODE' && | |
501 | (not $self->_bulk_disabled_due_to_coderef_connect_info_warned)) { | |
502 | carp <<'EOF'; | |
503 | Bulk API support disabled due to use of a CODEREF connect_info. Reverting to | |
504 | regular array inserts. | |
505 | EOF | |
506 | $self->_bulk_disabled_due_to_coderef_connect_info_warned(1); | |
507 | } | |
508 | ||
509 | if (not $use_bulk_api) { | |
510 | my $blob_cols = $self->_remove_blob_cols_array($source, $cols, $data); | |
511 | ||
512 | # _execute_array uses a txn anyway, but it ends too early in case we need to | |
513 | # select max(col) to get the identity for inserting blobs. | |
514 | ($self, my $guard) = $self->{transaction_depth} == 0 ? | |
515 | ($self->_writer_storage, $self->_writer_storage->txn_scope_guard) | |
516 | : | |
517 | ($self, undef); | |
518 | ||
519 | local $self->{insert_bulk} = 1; | |
520 | ||
521 | $self->next::method(@_); | |
522 | ||
523 | if ($blob_cols) { | |
524 | if ($is_identity_insert) { | |
525 | $self->_insert_blobs_array ($source, $blob_cols, $cols, $data); | |
526 | } | |
527 | else { | |
528 | my @cols_with_identities = (@$cols, $identity_col); | |
529 | ||
530 | ## calculate identities | |
531 | # XXX This assumes identities always increase by 1, which may or may not | |
532 | # be true. | |
533 | my ($last_identity) = | |
534 | $self->_dbh->selectrow_array ( | |
535 | $self->_fetch_identity_sql($source, $identity_col) | |
536 | ); | |
537 | my @identities = (($last_identity - @$data + 1) .. $last_identity); | |
538 | ||
539 | my @data_with_identities = map [@$_, shift @identities], @$data; | |
540 | ||
541 | $self->_insert_blobs_array ( | |
542 | $source, $blob_cols, \@cols_with_identities, \@data_with_identities | |
543 | ); | |
544 | } | |
545 | } | |
546 | ||
547 | $guard->commit if $guard; | |
548 | ||
549 | return; | |
550 | } | |
551 | ||
552 | # otherwise, use the bulk API | |
553 | ||
554 | # rearrange @$data so that columns are in database order | |
555 | my %orig_idx; | |
556 | @orig_idx{@$cols} = 0..$#$cols; | |
557 | ||
558 | my %new_idx; | |
559 | @new_idx{@source_columns} = 0..$#source_columns; | |
560 | ||
561 | my @new_data; | |
562 | for my $datum (@$data) { | |
563 | my $new_datum = []; | |
564 | for my $col (@source_columns) { | |
565 | # identity data will be 'undef' if not $is_identity_insert | |
566 | # columns with defaults will also be 'undef' | |
567 | $new_datum->[ $new_idx{$col} ] = | |
568 | exists $orig_idx{$col} ? $datum->[ $orig_idx{$col} ] : undef; | |
569 | } | |
570 | push @new_data, $new_datum; | |
571 | } | |
572 | ||
573 | # bcp identity index is 1-based | |
574 | my $identity_idx = exists $new_idx{$identity_col} ? | |
575 | $new_idx{$identity_col} + 1 : 0; | |
576 | ||
577 | ## Set a client-side conversion error handler, straight from DBD::Sybase docs. | |
578 | # This ignores any data conversion errors detected by the client side libs, as | |
579 | # they are usually harmless. | |
580 | my $orig_cslib_cb = DBD::Sybase::set_cslib_cb( | |
581 | Sub::Name::subname insert_bulk => sub { | |
582 | my ($layer, $origin, $severity, $errno, $errmsg, $osmsg, $blkmsg) = @_; | |
583 | ||
584 | return 1 if $errno == 36; | |
585 | ||
586 | carp | |
587 | "Layer: $layer, Origin: $origin, Severity: $severity, Error: $errno" . | |
588 | ($errmsg ? "\n$errmsg" : '') . | |
589 | ($osmsg ? "\n$osmsg" : '') . | |
590 | ($blkmsg ? "\n$blkmsg" : ''); | |
591 | ||
592 | return 0; | |
593 | }); | |
594 | ||
595 | eval { | |
596 | my $bulk = $self->_bulk_storage; | |
597 | ||
598 | my $guard = $bulk->txn_scope_guard; | |
599 | ||
600 | ## XXX get this to work instead of our own $sth | |
601 | ## will require SQLA or *Hacks changes for ordered columns | |
602 | # $bulk->next::method($source, \@source_columns, \@new_data, { | |
603 | # syb_bcp_attribs => { | |
604 | # identity_flag => $is_identity_insert, | |
605 | # identity_column => $identity_idx, | |
606 | # } | |
607 | # }); | |
608 | my $sql = 'INSERT INTO ' . | |
609 | $bulk->sql_maker->_quote($source->name) . ' (' . | |
610 | # colname list is ignored for BCP, but does no harm | |
611 | (join ', ', map $bulk->sql_maker->_quote($_), @source_columns) . ') '. | |
612 | ' VALUES ('. (join ', ', ('?') x @source_columns) . ')'; | |
613 | ||
614 | ## XXX there's a bug in the DBD::Sybase bulk support that makes $sth->finish for | |
615 | ## a prepare_cached statement ineffective. Replace with ->sth when fixed, or | |
616 | ## better yet the version above. Should be fixed in DBD::Sybase . | |
617 | my $sth = $bulk->_get_dbh->prepare($sql, | |
618 | # 'insert', # op | |
619 | { | |
620 | syb_bcp_attribs => { | |
621 | identity_flag => $is_identity_insert, | |
622 | identity_column => $identity_idx, | |
623 | } | |
624 | } | |
625 | ); | |
626 | ||
627 | my @bind = do { | |
628 | my $idx = 0; | |
629 | map [ $_, $idx++ ], @source_columns; | |
18 | 630 | }; |
19 | unless ( $@ ) { | |
20 | $dbtype =~ s/\W/_/gi; | |
21 | my $subclass = "DBIx::Class::Storage::DBI::Sybase::${dbtype}"; | |
22 | if ($self->load_optional_class($subclass) && !$self->isa($subclass)) { | |
23 | bless $self, $subclass; | |
24 | $self->_rebless; | |
631 | ||
632 | $self->_execute_array( | |
633 | $source, $sth, \@bind, \@source_columns, \@new_data, sub { | |
634 | $guard->commit | |
635 | } | |
636 | ); | |
637 | ||
638 | $bulk->_query_end($sql); | |
639 | }; | |
640 | ||
641 | my $exception = $@; | |
642 | DBD::Sybase::set_cslib_cb($orig_cslib_cb); | |
643 | ||
644 | if ($exception =~ /-Y option/) { | |
645 | carp <<"EOF"; | |
646 | ||
647 | Sybase bulk API operation failed due to character set incompatibility, reverting | |
648 | to regular array inserts: | |
649 | ||
650 | *** Try unsetting the LANG environment variable. | |
651 | ||
652 | $exception | |
653 | EOF | |
654 | $self->_bulk_storage(undef); | |
655 | unshift @_, $self; | |
656 | goto \&insert_bulk; | |
657 | } | |
658 | elsif ($exception) { | |
659 | # rollback makes the bulkLogin connection unusable | |
660 | $self->_bulk_storage->disconnect; | |
661 | $self->throw_exception($exception); | |
662 | } | |
663 | } | |
664 | ||
665 | sub _dbh_execute_array { | |
666 | my ($self, $sth, $tuple_status, $cb) = @_; | |
667 | ||
668 | my $rv = $self->next::method($sth, $tuple_status); | |
669 | $cb->() if $cb; | |
670 | ||
671 | return $rv; | |
672 | } | |
673 | ||
674 | # Make sure blobs are not bound as placeholders, and return any non-empty ones | |
675 | # as a hash. | |
676 | sub _remove_blob_cols { | |
677 | my ($self, $source, $fields) = @_; | |
678 | ||
679 | my %blob_cols; | |
680 | ||
681 | for my $col (keys %$fields) { | |
682 | if ($self->_is_lob_column($source, $col)) { | |
683 | my $blob_val = delete $fields->{$col}; | |
684 | if (not defined $blob_val) { | |
685 | $fields->{$col} = \'NULL'; | |
686 | } | |
687 | else { | |
688 | $fields->{$col} = \"''"; | |
689 | $blob_cols{$col} = $blob_val unless $blob_val eq ''; | |
690 | } | |
691 | } | |
692 | } | |
693 | ||
694 | return %blob_cols ? \%blob_cols : undef; | |
695 | } | |
696 | ||
697 | # same for insert_bulk | |
698 | sub _remove_blob_cols_array { | |
699 | my ($self, $source, $cols, $data) = @_; | |
700 | ||
701 | my @blob_cols; | |
702 | ||
703 | for my $i (0..$#$cols) { | |
704 | my $col = $cols->[$i]; | |
705 | ||
706 | if ($self->_is_lob_column($source, $col)) { | |
707 | for my $j (0..$#$data) { | |
708 | my $blob_val = delete $data->[$j][$i]; | |
709 | if (not defined $blob_val) { | |
710 | $data->[$j][$i] = \'NULL'; | |
25 | 711 | } |
26 | } | |
27 | } | |
28 | ||
29 | sub _dbh_last_insert_id { | |
30 | my ($self, $dbh, $source, $col) = @_; | |
31 | return ($dbh->selectrow_array('select @@identity'))[0]; | |
712 | else { | |
713 | $data->[$j][$i] = \"''"; | |
714 | $blob_cols[$j][$i] = $blob_val | |
715 | unless $blob_val eq ''; | |
716 | } | |
717 | } | |
718 | } | |
719 | } | |
720 | ||
721 | return @blob_cols ? \@blob_cols : undef; | |
722 | } | |
723 | ||
724 | sub _update_blobs { | |
725 | my ($self, $source, $blob_cols, $where) = @_; | |
726 | ||
727 | my (@primary_cols) = $source->primary_columns; | |
728 | ||
729 | $self->throw_exception('Cannot update TEXT/IMAGE column(s) without a primary key') | |
730 | unless @primary_cols; | |
731 | ||
732 | # check if we're updating a single row by PK | |
733 | my $pk_cols_in_where = 0; | |
734 | for my $col (@primary_cols) { | |
735 | $pk_cols_in_where++ if defined $where->{$col}; | |
736 | } | |
737 | my @rows; | |
738 | ||
739 | if ($pk_cols_in_where == @primary_cols) { | |
740 | my %row_to_update; | |
741 | @row_to_update{@primary_cols} = @{$where}{@primary_cols}; | |
742 | @rows = \%row_to_update; | |
743 | } else { | |
744 | my $cursor = $self->select ($source, \@primary_cols, $where, {}); | |
745 | @rows = map { | |
746 | my %row; @row{@primary_cols} = @$_; \%row | |
747 | } $cursor->all; | |
748 | } | |
749 | ||
750 | for my $row (@rows) { | |
751 | $self->_insert_blobs($source, $blob_cols, $row); | |
752 | } | |
753 | } | |
754 | ||
755 | sub _insert_blobs { | |
756 | my ($self, $source, $blob_cols, $row) = @_; | |
757 | my $dbh = $self->_get_dbh; | |
758 | ||
759 | my $table = $source->name; | |
760 | ||
761 | my %row = %$row; | |
762 | my (@primary_cols) = $source->primary_columns; | |
763 | ||
764 | $self->throw_exception('Cannot update TEXT/IMAGE column(s) without a primary key') | |
765 | unless @primary_cols; | |
766 | ||
767 | $self->throw_exception('Cannot update TEXT/IMAGE column(s) without primary key values') | |
768 | if ((grep { defined $row{$_} } @primary_cols) != @primary_cols); | |
769 | ||
770 | for my $col (keys %$blob_cols) { | |
771 | my $blob = $blob_cols->{$col}; | |
772 | ||
773 | my %where = map { ($_, $row{$_}) } @primary_cols; | |
774 | ||
775 | my $cursor = $self->select ($source, [$col], \%where, {}); | |
776 | $cursor->next; | |
777 | my $sth = $cursor->sth; | |
778 | ||
779 | if (not $sth) { | |
780 | ||
781 | $self->throw_exception( | |
782 | "Could not find row in table '$table' for blob update:\n" | |
783 | . Data::Dumper::Concise::Dumper (\%where) | |
784 | ); | |
785 | } | |
786 | ||
787 | eval { | |
788 | do { | |
789 | $sth->func('CS_GET', 1, 'ct_data_info') or die $sth->errstr; | |
790 | } while $sth->fetch; | |
791 | ||
792 | $sth->func('ct_prepare_send') or die $sth->errstr; | |
793 | ||
794 | my $log_on_update = $self->_blob_log_on_update; | |
795 | $log_on_update = 1 if not defined $log_on_update; | |
796 | ||
797 | $sth->func('CS_SET', 1, { | |
798 | total_txtlen => length($blob), | |
799 | log_on_update => $log_on_update | |
800 | }, 'ct_data_info') or die $sth->errstr; | |
801 | ||
802 | $sth->func($blob, length($blob), 'ct_send_data') or die $sth->errstr; | |
803 | ||
804 | $sth->func('ct_finish_send') or die $sth->errstr; | |
805 | }; | |
806 | my $exception = $@; | |
807 | $sth->finish if $sth; | |
808 | if ($exception) { | |
809 | if ($self->using_freetds) { | |
810 | $self->throw_exception ( | |
811 | 'TEXT/IMAGE operation failed, probably because you are using FreeTDS: ' | |
812 | . $exception | |
813 | ); | |
814 | } else { | |
815 | $self->throw_exception($exception); | |
816 | } | |
817 | } | |
818 | } | |
819 | } | |
820 | ||
821 | sub _insert_blobs_array { | |
822 | my ($self, $source, $blob_cols, $cols, $data) = @_; | |
823 | ||
824 | for my $i (0..$#$data) { | |
825 | my $datum = $data->[$i]; | |
826 | ||
827 | my %row; | |
828 | @row{ @$cols } = @$datum; | |
829 | ||
830 | my %blob_vals; | |
831 | for my $j (0..$#$cols) { | |
832 | if (exists $blob_cols->[$i][$j]) { | |
833 | $blob_vals{ $cols->[$j] } = $blob_cols->[$i][$j]; | |
834 | } | |
835 | } | |
836 | ||
837 | $self->_insert_blobs ($source, \%blob_vals, \%row); | |
838 | } | |
839 | } | |
840 | ||
841 | =head2 connect_call_datetime_setup | |
842 | ||
843 | Used as: | |
844 | ||
845 | on_connect_call => 'datetime_setup' | |
846 | ||
847 | In L<DBIx::Class::Storage::DBI/connect_info> to set: | |
848 | ||
849 | $dbh->syb_date_fmt('ISO_strict'); # output fmt: 2004-08-21T14:36:48.080Z | |
850 | $dbh->do('set dateformat mdy'); # input fmt: 08/13/1979 18:08:55.080 | |
851 | ||
852 | On connection for use with L<DBIx::Class::InflateColumn::DateTime>, using | |
853 | L<DateTime::Format::Sybase>, which you will need to install. | |
854 | ||
855 | This works for both C<DATETIME> and C<SMALLDATETIME> columns, although | |
856 | C<SMALLDATETIME> columns only have minute precision. | |
857 | ||
858 | =cut | |
859 | ||
860 | { | |
861 | my $old_dbd_warned = 0; | |
862 | ||
863 | sub connect_call_datetime_setup { | |
864 | my $self = shift; | |
865 | my $dbh = $self->_get_dbh; | |
866 | ||
867 | if ($dbh->can('syb_date_fmt')) { | |
868 | # amazingly, this works with FreeTDS | |
869 | $dbh->syb_date_fmt('ISO_strict'); | |
870 | } elsif (not $old_dbd_warned) { | |
871 | carp "Your DBD::Sybase is too old to support ". | |
872 | "DBIx::Class::InflateColumn::DateTime, please upgrade!"; | |
873 | $old_dbd_warned = 1; | |
874 | } | |
875 | ||
876 | $dbh->do('SET DATEFORMAT mdy'); | |
877 | ||
878 | 1; | |
879 | } | |
880 | } | |
881 | ||
882 | sub datetime_parser_type { "DateTime::Format::Sybase" } | |
883 | ||
884 | # ->begin_work and such have no effect with FreeTDS but we run them anyway to | |
885 | # let the DBD keep any state it needs to. | |
886 | # | |
887 | # If they ever do start working, the extra statements will do no harm (because | |
888 | # Sybase supports nested transactions.) | |
889 | ||
890 | sub _dbh_begin_work { | |
891 | my $self = shift; | |
892 | ||
893 | # bulkLogin=1 connections are always in a transaction, and can only call BEGIN | |
894 | # TRAN once. However, we need to make sure there's a $dbh. | |
895 | return if $self->_is_bulk_storage && $self->_dbh && $self->_began_bulk_work; | |
896 | ||
897 | $self->next::method(@_); | |
898 | ||
899 | if ($self->using_freetds) { | |
900 | $self->_get_dbh->do('BEGIN TRAN'); | |
901 | } | |
902 | ||
903 | $self->_began_bulk_work(1) if $self->_is_bulk_storage; | |
904 | } | |
905 | ||
906 | sub _dbh_commit { | |
907 | my $self = shift; | |
908 | if ($self->using_freetds) { | |
909 | $self->_dbh->do('COMMIT'); | |
910 | } | |
911 | return $self->next::method(@_); | |
912 | } | |
913 | ||
914 | sub _dbh_rollback { | |
915 | my $self = shift; | |
916 | if ($self->using_freetds) { | |
917 | $self->_dbh->do('ROLLBACK'); | |
918 | } | |
919 | return $self->next::method(@_); | |
920 | } | |
921 | ||
922 | # savepoint support using ASE syntax | |
923 | ||
924 | sub _svp_begin { | |
925 | my ($self, $name) = @_; | |
926 | ||
927 | $self->_get_dbh->do("SAVE TRANSACTION $name"); | |
928 | } | |
929 | ||
930 | # A new SAVE TRANSACTION with the same name releases the previous one. | |
931 | sub _svp_release { 1 } | |
932 | ||
933 | sub _svp_rollback { | |
934 | my ($self, $name) = @_; | |
935 | ||
936 | $self->_get_dbh->do("ROLLBACK TRANSACTION $name"); | |
32 | 937 | } |
33 | 938 | |
34 | 939 | 1; |
35 | 940 | |
36 | =head1 NAME | |
37 | ||
38 | DBIx::Class::Storage::DBI::Sybase - Storage::DBI subclass for Sybase | |
39 | ||
40 | =head1 SYNOPSIS | |
41 | ||
42 | This subclass supports L<DBD::Sybase> for real Sybase databases. If | |
43 | you are using an MSSQL database via L<DBD::Sybase>, see | |
44 | L<DBIx::Class::Storage::DBI::Sybase::MSSQL>. | |
45 | ||
46 | =head1 CAVEATS | |
47 | ||
48 | This storage driver uses L<DBIx::Class::Storage::DBI::NoBindVars> as a base. | |
49 | This means that bind variables will be interpolated (properly quoted of course) | |
50 | into the SQL query itself, without using bind placeholders. | |
51 | ||
52 | More importantly this means that caching of prepared statements is explicitly | |
53 | disabled, as the interpolation renders it useless. | |
54 | ||
55 | =head1 AUTHORS | |
56 | ||
57 | Brandon L Black <blblack@gmail.com> | |
58 | ||
59 | Justin Hunter <justin.d.hunter@gmail.com> | |
941 | =head1 Schema::Loader Support | |
942 | ||
943 | There is an experimental branch of L<DBIx::Class::Schema::Loader> that will | |
944 | allow you to dump a schema from most (if not all) versions of Sybase. | |
945 | ||
946 | It is available via subversion from: | |
947 | ||
948 | http://dev.catalyst.perl.org/repos/bast/branches/DBIx-Class-Schema-Loader/current/ | |
949 | ||
950 | =head1 FreeTDS | |
951 | ||
952 | This driver supports L<DBD::Sybase> compiled against FreeTDS | |
953 | (L<http://www.freetds.org/>) to the best of our ability, however it is | |
954 | recommended that you recompile L<DBD::Sybase> against the Sybase Open Client | |
955 | libraries. They are a part of the Sybase ASE distribution: | |
956 | ||
957 | The Open Client FAQ is here: | |
958 | L<http://www.isug.com/Sybase_FAQ/ASE/section7.html>. | |
959 | ||
960 | Sybase ASE for Linux (which comes with the Open Client libraries) may be | |
961 | downloaded here: L<http://response.sybase.com/forms/ASE_Linux_Download>. | |
962 | ||
963 | To see if you're using FreeTDS check C<< $schema->storage->using_freetds >>, or run: | |
964 | ||
965 | perl -MDBI -le 'my $dbh = DBI->connect($dsn, $user, $pass); print $dbh->{syb_oc_version}' | |
966 | ||
967 | Some versions of the libraries involved will not support placeholders, in which | |
968 | case the storage will be reblessed to | |
969 | L<DBIx::Class::Storage::DBI::Sybase::NoBindVars>. | |
970 | ||
971 | In some configurations, placeholders will work but will throw implicit type | |
972 | conversion errors for anything that's not expecting a string. In such a case, | |
973 | the C<auto_cast> option from L<DBIx::Class::Storage::DBI::AutoCast> is | |
974 | automatically set, which you may enable on connection with | |
975 | L<DBIx::Class::Storage::DBI::AutoCast/connect_call_set_auto_cast>. The type info | |
976 | for the C<CAST>s is taken from the L<DBIx::Class::ResultSource/data_type> | |
977 | definitions in your Result classes, and are mapped to a Sybase type (if it isn't | |
978 | already) using a mapping based on L<SQL::Translator>. | |
979 | ||
980 | In other configurations, placeholers will work just as they do with the Sybase | |
981 | Open Client libraries. | |
982 | ||
983 | Inserts or updates of TEXT/IMAGE columns will B<NOT> work with FreeTDS. | |
984 | ||
985 | =head1 INSERTS WITH PLACEHOLDERS | |
986 | ||
987 | With placeholders enabled, inserts are done in a transaction so that there are | |
988 | no concurrency issues with getting the inserted identity value using | |
989 | C<SELECT MAX(col)>, which is the only way to get the C<IDENTITY> value in this | |
990 | mode. | |
991 | ||
992 | In addition, they are done on a separate connection so that it's possible to | |
993 | have active cursors when doing an insert. | |
994 | ||
995 | When using C<DBIx::Class::Storage::DBI::Sybase::NoBindVars> transactions are | |
996 | disabled, as there are no concurrency issues with C<SELECT @@IDENTITY> as it's a | |
997 | session variable. | |
998 | ||
999 | =head1 TRANSACTIONS | |
1000 | ||
1001 | Due to limitations of the TDS protocol, L<DBD::Sybase>, or both; you cannot | |
1002 | begin a transaction while there are active cursors; nor can you use multiple | |
1003 | active cursors within a transaction. An active cursor is, for example, a | |
1004 | L<ResultSet|DBIx::Class::ResultSet> that has been executed using C<next> or | |
1005 | C<first> but has not been exhausted or L<reset|DBIx::Class::ResultSet/reset>. | |
1006 | ||
1007 | For example, this will not work: | |
1008 | ||
1009 | $schema->txn_do(sub { | |
1010 | my $rs = $schema->resultset('Book'); | |
1011 | while (my $row = $rs->next) { | |
1012 | $schema->resultset('MetaData')->create({ | |
1013 | book_id => $row->id, | |
1014 | ... | |
1015 | }); | |
1016 | } | |
1017 | }); | |
1018 | ||
1019 | This won't either: | |
1020 | ||
1021 | my $first_row = $large_rs->first; | |
1022 | $schema->txn_do(sub { ... }); | |
1023 | ||
1024 | Transactions done for inserts in C<AutoCommit> mode when placeholders are in use | |
1025 | are not affected, as they are done on an extra database handle. | |
1026 | ||
1027 | Some workarounds: | |
1028 | ||
1029 | =over 4 | |
1030 | ||
1031 | =item * use L<DBIx::Class::Storage::DBI::Replicated> | |
1032 | ||
1033 | =item * L<connect|DBIx::Class::Schema/connect> another L<Schema|DBIx::Class::Schema> | |
1034 | ||
1035 | =item * load the data from your cursor with L<DBIx::Class::ResultSet/all> | |
1036 | ||
1037 | =back | |
1038 | ||
1039 | =head1 MAXIMUM CONNECTIONS | |
1040 | ||
1041 | The TDS protocol makes separate connections to the server for active statements | |
1042 | in the background. By default the number of such connections is limited to 25, | |
1043 | on both the client side and the server side. | |
1044 | ||
1045 | This is a bit too low for a complex L<DBIx::Class> application, so on connection | |
1046 | the client side setting is set to C<256> (see L<DBD::Sybase/maxConnect>.) You | |
1047 | can override it to whatever setting you like in the DSN. | |
1048 | ||
1049 | See | |
1050 | L<http://infocenter.sybase.com/help/index.jsp?topic=/com.sybase.help.ase_15.0.sag1/html/sag1/sag1272.htm> | |
1051 | for information on changing the setting on the server side. | |
1052 | ||
1053 | =head1 DATES | |
1054 | ||
1055 | See L</connect_call_datetime_setup> to setup date formats | |
1056 | for L<DBIx::Class::InflateColumn::DateTime>. | |
1057 | ||
1058 | =head1 TEXT/IMAGE COLUMNS | |
1059 | ||
1060 | L<DBD::Sybase> compiled with FreeTDS will B<NOT> allow you to insert or update | |
1061 | C<TEXT/IMAGE> columns. | |
1062 | ||
1063 | Setting C<< $dbh->{LongReadLen} >> will also not work with FreeTDS use either: | |
1064 | ||
1065 | $schema->storage->dbh->do("SET TEXTSIZE $bytes"); | |
1066 | ||
1067 | or | |
1068 | ||
1069 | $schema->storage->set_textsize($bytes); | |
1070 | ||
1071 | instead. | |
1072 | ||
1073 | However, the C<LongReadLen> you pass in | |
1074 | L<DBIx::Class::Storage::DBI/connect_info> is used to execute the equivalent | |
1075 | C<SET TEXTSIZE> command on connection. | |
1076 | ||
1077 | See L</connect_call_blob_setup> for a L<DBIx::Class::Storage::DBI/connect_info> | |
1078 | setting you need to work with C<IMAGE> columns. | |
1079 | ||
1080 | =head1 BULK API | |
1081 | ||
1082 | The experimental L<DBD::Sybase> Bulk API support is used for | |
1083 | L<populate|DBIx::Class::ResultSet/populate> in B<void> context, in a transaction | |
1084 | on a separate connection. | |
1085 | ||
1086 | To use this feature effectively, use a large number of rows for each | |
1087 | L<populate|DBIx::Class::ResultSet/populate> call, eg.: | |
1088 | ||
1089 | while (my $rows = $data_source->get_100_rows()) { | |
1090 | $rs->populate($rows); | |
1091 | } | |
1092 | ||
1093 | B<NOTE:> the L<add_columns|DBIx::Class::ResultSource/add_columns> | |
1094 | calls in your C<Result> classes B<must> list columns in database order for this | |
1095 | to work. Also, you may have to unset the C<LANG> environment variable before | |
1096 | loading your app, if it doesn't match the character set of your database. | |
1097 | ||
1098 | When inserting IMAGE columns using this method, you'll need to use | |
1099 | L</connect_call_blob_setup> as well. | |
1100 | ||
1101 | =head1 TODO | |
1102 | ||
1103 | =over | |
1104 | ||
1105 | =item * | |
1106 | ||
1107 | Transitions to AutoCommit=0 (starting a transaction) mode by exhausting | |
1108 | any active cursors, using eager cursors. | |
1109 | ||
1110 | =item * | |
1111 | ||
1112 | Real limits and limited counts using stored procedures deployed on startup. | |
1113 | ||
1114 | =item * | |
1115 | ||
1116 | Adaptive Server Anywhere (ASA) support, with possible SQLA::Limit support. | |
1117 | ||
1118 | =item * | |
1119 | ||
1120 | Blob update with a LIKE query on a blob, without invalidating the WHERE condition. | |
1121 | ||
1122 | =item * | |
1123 | ||
1124 | bulk_insert using prepare_cached (see comments.) | |
1125 | ||
1126 | =back | |
1127 | ||
1128 | =head1 AUTHOR | |
1129 | ||
1130 | See L<DBIx::Class/CONTRIBUTORS>. | |
60 | 1131 | |
61 | 1132 | =head1 LICENSE |
62 | 1133 | |
63 | 1134 | You may distribute this code under the same terms as Perl itself. |
64 | 1135 | |
65 | 1136 | =cut |
1137 | # vim:sts=2 sw=2: |
12 | 12 | use DBIx::Class::Storage::Statistics; |
13 | 13 | use Scalar::Util(); |
14 | 14 | use List::Util(); |
15 | use Data::Dumper::Concise(); | |
15 | 16 | |
16 | 17 | # what version of sqlt do we require if deploy() without a ddl_dir is invoked |
17 | 18 | # when changing also adjust the corresponding author_require in Makefile.PL |
37 | 38 | |
38 | 39 | __PACKAGE__->mk_group_accessors('inherited' => qw/sql_maker_class/); |
39 | 40 | __PACKAGE__->sql_maker_class('DBIx::Class::SQLAHacks'); |
41 | ||
42 | ||
43 | # Each of these methods need _determine_driver called before itself | |
44 | # in order to function reliably. This is a purely DRY optimization | |
45 | my @rdbms_specific_methods = qw/ | |
46 | sqlt_type | |
47 | build_datetime_parser | |
48 | datetime_parser_type | |
49 | ||
50 | insert | |
51 | insert_bulk | |
52 | update | |
53 | delete | |
54 | select | |
55 | select_single | |
56 | /; | |
57 | ||
58 | for my $meth (@rdbms_specific_methods) { | |
59 | ||
60 | my $orig = __PACKAGE__->can ($meth) | |
61 | or next; | |
62 | ||
63 | no strict qw/refs/; | |
64 | no warnings qw/redefine/; | |
65 | *{__PACKAGE__ ."::$meth"} = sub { | |
66 | if (not $_[0]->_driver_determined) { | |
67 | $_[0]->_determine_driver; | |
68 | goto $_[0]->can($meth); | |
69 | } | |
70 | $orig->(@_); | |
71 | }; | |
72 | } | |
40 | 73 | |
41 | 74 | |
42 | 75 | =head1 NAME |
711 | 744 | # Storage subclasses should override this |
712 | 745 | sub with_deferred_fk_checks { |
713 | 746 | my ($self, $sub) = @_; |
714 | ||
715 | 747 | $sub->(); |
716 | 748 | } |
717 | 749 | |
877 | 909 | my ($self) = @_; |
878 | 910 | |
879 | 911 | if ((not $self->_driver_determined) && (not $self->{_in_determine_driver})) { |
880 | my $started_unconnected = 0; | |
912 | my $started_connected = 0; | |
881 | 913 | local $self->{_in_determine_driver} = 1; |
882 | 914 | |
883 | 915 | if (ref($self) eq __PACKAGE__) { |
884 | 916 | my $driver; |
885 | 917 | if ($self->_dbh) { # we are connected |
886 | 918 | $driver = $self->_dbh->{Driver}{Name}; |
919 | $started_connected = 1; | |
887 | 920 | } else { |
888 | 921 | # if connect_info is a CODEREF, we have no choice but to connect |
889 | 922 | if (ref $self->_dbi_connect_info->[0] && |
895 | 928 | # try to use dsn to not require being connected, the driver may still |
896 | 929 | # force a connection in _rebless to determine version |
897 | 930 | ($driver) = $self->_dbi_connect_info->[0] =~ /dbi:([^:]+):/i; |
898 | $started_unconnected = 1; | |
899 | 931 | } |
900 | 932 | } |
901 | 933 | |
912 | 944 | $self->_init; # run driver-specific initializations |
913 | 945 | |
914 | 946 | $self->_run_connection_actions |
915 | if $started_unconnected && defined $self->_dbh; | |
947 | if !$started_connected && defined $self->_dbh; | |
916 | 948 | } |
917 | 949 | } |
918 | 950 | |
1143 | 1175 | sub txn_commit { |
1144 | 1176 | my $self = shift; |
1145 | 1177 | if ($self->{transaction_depth} == 1) { |
1146 | my $dbh = $self->_dbh; | |
1147 | 1178 | $self->debugobj->txn_commit() |
1148 | 1179 | if ($self->debug); |
1149 | 1180 | $self->_dbh_commit; |
1159 | 1190 | |
1160 | 1191 | sub _dbh_commit { |
1161 | 1192 | my $self = shift; |
1162 | $self->_dbh->commit; | |
1193 | my $dbh = $self->_dbh | |
1194 | or $self->throw_exception('cannot COMMIT on a disconnected handle'); | |
1195 | $dbh->commit; | |
1163 | 1196 | } |
1164 | 1197 | |
1165 | 1198 | sub txn_rollback { |
1196 | 1229 | |
1197 | 1230 | sub _dbh_rollback { |
1198 | 1231 | my $self = shift; |
1199 | $self->_dbh->rollback; | |
1232 | my $dbh = $self->_dbh | |
1233 | or $self->throw_exception('cannot ROLLBACK on a disconnected handle'); | |
1234 | $dbh->rollback; | |
1200 | 1235 | } |
1201 | 1236 | |
1202 | 1237 | # This used to be the top-half of _execute. It was split out to make it |
1299 | 1334 | sub insert { |
1300 | 1335 | my ($self, $source, $to_insert) = @_; |
1301 | 1336 | |
1302 | # redispatch to insert method of storage we reblessed into, if necessary | |
1303 | if (not $self->_driver_determined) { | |
1304 | $self->_determine_driver; | |
1305 | goto $self->can('insert'); | |
1306 | } | |
1307 | ||
1308 | 1337 | my $ident = $source->from; |
1309 | 1338 | my $bind_attributes = $self->source_bind_attributes($source); |
1310 | 1339 | |
1336 | 1365 | sub insert_bulk { |
1337 | 1366 | my ($self, $source, $cols, $data) = @_; |
1338 | 1367 | |
1339 | # redispatch to insert_bulk method of storage we reblessed into, if necessary | |
1340 | if (not $self->_driver_determined) { | |
1341 | $self->_determine_driver; | |
1342 | goto $self->can('insert_bulk'); | |
1343 | } | |
1344 | ||
1345 | 1368 | my %colvalues; |
1346 | my $table = $source->from; | |
1347 | 1369 | @colvalues{@$cols} = (0..$#$cols); |
1348 | my ($sql, @bind) = $self->sql_maker->insert($table, \%colvalues); | |
1349 | ||
1350 | $self->_query_start( $sql, @bind ); | |
1370 | ||
1371 | for my $i (0..$#$cols) { | |
1372 | my $first_val = $data->[0][$i]; | |
1373 | next unless ref $first_val eq 'SCALAR'; | |
1374 | ||
1375 | $colvalues{ $cols->[$i] } = $first_val; | |
1376 | } | |
1377 | ||
1378 | # check for bad data and stringify stringifiable objects | |
1379 | my $bad_slice = sub { | |
1380 | my ($msg, $col_idx, $slice_idx) = @_; | |
1381 | $self->throw_exception(sprintf "%s for column '%s' in populate slice:\n%s", | |
1382 | $msg, | |
1383 | $cols->[$col_idx], | |
1384 | do { | |
1385 | local $Data::Dumper::Maxdepth = 1; # don't dump objects, if any | |
1386 | Data::Dumper::Concise::Dumper({ | |
1387 | map { $cols->[$_] => $data->[$slice_idx][$_] } (0 .. $#$cols) | |
1388 | }), | |
1389 | } | |
1390 | ); | |
1391 | }; | |
1392 | ||
1393 | for my $datum_idx (0..$#$data) { | |
1394 | my $datum = $data->[$datum_idx]; | |
1395 | ||
1396 | for my $col_idx (0..$#$cols) { | |
1397 | my $val = $datum->[$col_idx]; | |
1398 | my $sqla_bind = $colvalues{ $cols->[$col_idx] }; | |
1399 | my $is_literal_sql = (ref $sqla_bind) eq 'SCALAR'; | |
1400 | ||
1401 | if ($is_literal_sql) { | |
1402 | if (not ref $val) { | |
1403 | $bad_slice->('bind found where literal SQL expected', $col_idx, $datum_idx); | |
1404 | } | |
1405 | elsif ((my $reftype = ref $val) ne 'SCALAR') { | |
1406 | $bad_slice->("$reftype reference found where literal SQL expected", | |
1407 | $col_idx, $datum_idx); | |
1408 | } | |
1409 | elsif ($$val ne $$sqla_bind){ | |
1410 | $bad_slice->("inconsistent literal SQL value, expecting: '$$sqla_bind'", | |
1411 | $col_idx, $datum_idx); | |
1412 | } | |
1413 | } | |
1414 | elsif (my $reftype = ref $val) { | |
1415 | require overload; | |
1416 | if (overload::Method($val, '""')) { | |
1417 | $datum->[$col_idx] = "".$val; | |
1418 | } | |
1419 | else { | |
1420 | $bad_slice->("$reftype reference found where bind expected", | |
1421 | $col_idx, $datum_idx); | |
1422 | } | |
1423 | } | |
1424 | } | |
1425 | } | |
1426 | ||
1427 | my ($sql, $bind) = $self->_prep_for_execute ( | |
1428 | 'insert', undef, $source, [\%colvalues] | |
1429 | ); | |
1430 | my @bind = @$bind; | |
1431 | ||
1432 | my $empty_bind = 1 if (not @bind) && | |
1433 | (grep { ref $_ eq 'SCALAR' } values %colvalues) == @$cols; | |
1434 | ||
1435 | if ((not @bind) && (not $empty_bind)) { | |
1436 | $self->throw_exception( | |
1437 | 'Cannot insert_bulk without support for placeholders' | |
1438 | ); | |
1439 | } | |
1440 | ||
1441 | $self->_query_start( $sql, ['__BULK__'] ); | |
1351 | 1442 | my $sth = $self->sth($sql); |
1352 | 1443 | |
1353 | # @bind = map { ref $_ ? ''.$_ : $_ } @bind; # stringify args | |
1444 | my $rv = do { | |
1445 | if ($empty_bind) { | |
1446 | # bind_param_array doesn't work if there are no binds | |
1447 | $self->_dbh_execute_inserts_with_no_binds( $sth, scalar @$data ); | |
1448 | } | |
1449 | else { | |
1450 | # @bind = map { ref $_ ? ''.$_ : $_ } @bind; # stringify args | |
1451 | $self->_execute_array( $source, $sth, \@bind, $cols, $data ); | |
1452 | } | |
1453 | }; | |
1454 | ||
1455 | $self->_query_end( $sql, ['__BULK__'] ); | |
1456 | ||
1457 | return (wantarray ? ($rv, $sth, @bind) : $rv); | |
1458 | } | |
1459 | ||
1460 | sub _execute_array { | |
1461 | my ($self, $source, $sth, $bind, $cols, $data, @extra) = @_; | |
1462 | ||
1463 | my $guard = $self->txn_scope_guard unless $self->{transaction_depth} != 0; | |
1354 | 1464 | |
1355 | 1465 | ## This must be an arrayref, else nothing works! |
1356 | 1466 | my $tuple_status = []; |
1361 | 1471 | ## Bind the values and execute |
1362 | 1472 | my $placeholder_index = 1; |
1363 | 1473 | |
1364 | foreach my $bound (@bind) { | |
1474 | foreach my $bound (@$bind) { | |
1365 | 1475 | |
1366 | 1476 | my $attributes = {}; |
1367 | 1477 | my ($column_name, $data_index) = @$bound; |
1376 | 1486 | $sth->bind_param_array( $placeholder_index, [@data], $attributes ); |
1377 | 1487 | $placeholder_index++; |
1378 | 1488 | } |
1379 | my $rv = eval { $sth->execute_array({ArrayTupleStatus => $tuple_status}) }; | |
1380 | if (my $err = $@) { | |
1489 | ||
1490 | my $rv = eval { | |
1491 | $self->_dbh_execute_array($sth, $tuple_status, @extra); | |
1492 | }; | |
1493 | my $err = $@ || $sth->errstr; | |
1494 | ||
1495 | # Statement must finish even if there was an exception. | |
1496 | eval { $sth->finish }; | |
1497 | $err = $@ unless $err; | |
1498 | ||
1499 | if ($err) { | |
1381 | 1500 | my $i = 0; |
1382 | 1501 | ++$i while $i <= $#$tuple_status && !ref $tuple_status->[$i]; |
1383 | 1502 | |
1384 | $self->throw_exception($sth->errstr || "Unexpected populate error: $err") | |
1503 | $self->throw_exception("Unexpected populate error: $err") | |
1385 | 1504 | if ($i > $#$tuple_status); |
1386 | 1505 | |
1387 | require Data::Dumper; | |
1388 | local $Data::Dumper::Terse = 1; | |
1389 | local $Data::Dumper::Indent = 1; | |
1390 | local $Data::Dumper::Useqq = 1; | |
1391 | local $Data::Dumper::Quotekeys = 0; | |
1392 | local $Data::Dumper::Sortkeys = 1; | |
1393 | ||
1394 | 1506 | $self->throw_exception(sprintf "%s for populate slice:\n%s", |
1395 | $tuple_status->[$i][1], | |
1396 | Data::Dumper::Dumper( | |
1397 | { map { $cols->[$_] => $data->[$i][$_] } (0 .. $#$cols) } | |
1398 | ), | |
1507 | ($tuple_status->[$i][1] || $err), | |
1508 | Data::Dumper::Concise::Dumper({ | |
1509 | map { $cols->[$_] => $data->[$i][$_] } (0 .. $#$cols) | |
1510 | }), | |
1399 | 1511 | ); |
1400 | 1512 | } |
1401 | $self->throw_exception($sth->errstr) if !$rv; | |
1402 | ||
1403 | $self->_query_end( $sql, @bind ); | |
1404 | return (wantarray ? ($rv, $sth, @bind) : $rv); | |
1513 | ||
1514 | $guard->commit if $guard; | |
1515 | ||
1516 | return $rv; | |
1517 | } | |
1518 | ||
1519 | sub _dbh_execute_array { | |
1520 | my ($self, $sth, $tuple_status, @extra) = @_; | |
1521 | ||
1522 | return $sth->execute_array({ArrayTupleStatus => $tuple_status}); | |
1523 | } | |
1524 | ||
1525 | sub _dbh_execute_inserts_with_no_binds { | |
1526 | my ($self, $sth, $count) = @_; | |
1527 | ||
1528 | my $guard = $self->txn_scope_guard unless $self->{transaction_depth} != 0; | |
1529 | ||
1530 | eval { | |
1531 | my $dbh = $self->_get_dbh; | |
1532 | local $dbh->{RaiseError} = 1; | |
1533 | local $dbh->{PrintError} = 0; | |
1534 | ||
1535 | $sth->execute foreach 1..$count; | |
1536 | }; | |
1537 | my $exception = $@; | |
1538 | ||
1539 | # Make sure statement is finished even if there was an exception. | |
1540 | eval { $sth->finish }; | |
1541 | $exception = $@ unless $exception; | |
1542 | ||
1543 | $self->throw_exception($exception) if $exception; | |
1544 | ||
1545 | $guard->commit if $guard; | |
1546 | ||
1547 | return $count; | |
1405 | 1548 | } |
1406 | 1549 | |
1407 | 1550 | sub update { |
1408 | 1551 | my ($self, $source, @args) = @_; |
1409 | 1552 | |
1410 | # redispatch to update method of storage we reblessed into, if necessary | |
1411 | if (not $self->_driver_determined) { | |
1412 | $self->_determine_driver; | |
1413 | goto $self->can('update'); | |
1414 | } | |
1415 | ||
1416 | my $bind_attributes = $self->source_bind_attributes($source); | |
1417 | ||
1418 | return $self->_execute('update' => [], $source, $bind_attributes, @args); | |
1553 | my $bind_attrs = $self->source_bind_attributes($source); | |
1554 | ||
1555 | return $self->_execute('update' => [], $source, $bind_attrs, @args); | |
1419 | 1556 | } |
1420 | 1557 | |
1421 | 1558 | |
1422 | 1559 | sub delete { |
1423 | my $self = shift @_; | |
1424 | my $source = shift @_; | |
1425 | $self->_determine_driver; | |
1560 | my ($self, $source, @args) = @_; | |
1561 | ||
1426 | 1562 | my $bind_attrs = $self->source_bind_attributes($source); |
1427 | 1563 | |
1428 | return $self->_execute('delete' => [], $source, $bind_attrs, @_); | |
1564 | return $self->_execute('delete' => [], $source, $bind_attrs, @args); | |
1565 | } | |
1566 | ||
1567 | # Most databases do not allow aliasing of tables in UPDATE/DELETE. Thus | |
1568 | # a condition containing 'me' or other table prefixes will not work | |
1569 | # at all. What this code tries to do (badly) is introspect the condition | |
1570 | # and remove all column qualifiers. If it bails out early (returns undef) | |
1571 | # the calling code should try another approach (e.g. a subquery) | |
1572 | sub _strip_cond_qualifiers { | |
1573 | my ($self, $where) = @_; | |
1574 | ||
1575 | my $cond = {}; | |
1576 | ||
1577 | # No-op. No condition, we're updating/deleting everything | |
1578 | return $cond unless $where; | |
1579 | ||
1580 | if (ref $where eq 'ARRAY') { | |
1581 | $cond = [ | |
1582 | map { | |
1583 | my %hash; | |
1584 | foreach my $key (keys %{$_}) { | |
1585 | $key =~ /([^.]+)$/; | |
1586 | $hash{$1} = $_->{$key}; | |
1587 | } | |
1588 | \%hash; | |
1589 | } @$where | |
1590 | ]; | |
1591 | } | |
1592 | elsif (ref $where eq 'HASH') { | |
1593 | if ( (keys %$where) == 1 && ( (keys %{$where})[0] eq '-and' )) { | |
1594 | $cond->{-and} = []; | |
1595 | my @cond = @{$where->{-and}}; | |
1596 | for (my $i = 0; $i < @cond; $i++) { | |
1597 | my $entry = $cond[$i]; | |
1598 | my $hash; | |
1599 | if (ref $entry eq 'HASH') { | |
1600 | $hash = $self->_strip_cond_qualifiers($entry); | |
1601 | } | |
1602 | else { | |
1603 | $entry =~ /([^.]+)$/; | |
1604 | $hash->{$1} = $cond[++$i]; | |
1605 | } | |
1606 | push @{$cond->{-and}}, $hash; | |
1607 | } | |
1608 | } | |
1609 | else { | |
1610 | foreach my $key (keys %$where) { | |
1611 | $key =~ /([^.]+)$/; | |
1612 | $cond->{$1} = $where->{$key}; | |
1613 | } | |
1614 | } | |
1615 | } | |
1616 | else { | |
1617 | return undef; | |
1618 | } | |
1619 | ||
1620 | return $cond; | |
1429 | 1621 | } |
1430 | 1622 | |
1431 | 1623 | # We were sent here because the $rs contains a complex search |
1432 | 1624 | # which will require a subquery to select the correct rows |
1433 | # (i.e. joined or limited resultsets) | |
1625 | # (i.e. joined or limited resultsets, or non-introspectable conditions) | |
1434 | 1626 | # |
1435 | 1627 | # Genarating a single PK column subquery is trivial and supported |
1436 | 1628 | # by all RDBMS. However if we have a multicolumn PK, things get ugly. |
1441 | 1633 | |
1442 | 1634 | my $rsrc = $rs->result_source; |
1443 | 1635 | |
1444 | # we already check this, but double check naively just in case. Should be removed soon | |
1445 | my $sel = $rs->_resolved_attrs->{select}; | |
1446 | $sel = [ $sel ] unless ref $sel eq 'ARRAY'; | |
1447 | 1636 | my @pcols = $rsrc->primary_columns; |
1448 | if (@$sel != @pcols) { | |
1449 | $self->throw_exception ( | |
1450 | 'Subquery update/delete can not be called on resultsets selecting a' | |
1451 | .' number of columns different than the number of primary keys' | |
1452 | ); | |
1453 | } | |
1454 | 1637 | |
1455 | 1638 | if (@pcols == 1) { |
1456 | 1639 | return $self->$op ( |
1992 | 2175 | return @pcols ? \@pcols : [ 1 ]; |
1993 | 2176 | } |
1994 | 2177 | |
1995 | ||
1996 | 2178 | sub source_bind_attributes { |
1997 | 2179 | my ($self, $source) = @_; |
1998 | 2180 | |
2226 | 2408 | =cut |
2227 | 2409 | |
2228 | 2410 | sub sqlt_type { |
2229 | my ($self) = @_; | |
2230 | ||
2231 | if (not $self->_driver_determined) { | |
2232 | $self->_determine_driver; | |
2233 | goto $self->can ('sqlt_type'); | |
2234 | } | |
2235 | ||
2236 | $self->_get_dbh->{Driver}->{Name}; | |
2411 | shift->_get_dbh->{Driver}->{Name}; | |
2237 | 2412 | } |
2238 | 2413 | |
2239 | 2414 | =head2 bind_attribute_by_data_type |
2507 | 2682 | parser => 'SQL::Translator::Parser::DBIx::Class', |
2508 | 2683 | data => $schema, |
2509 | 2684 | ); |
2510 | return $tr->translate; | |
2685 | ||
2686 | my $ret = $tr->translate | |
2687 | or $self->throw_exception( 'Unable to produce deployment statements: ' . $tr->error); | |
2688 | ||
2689 | return $ret; | |
2511 | 2690 | } |
2512 | 2691 | |
2513 | 2692 | sub deploy { |
2573 | 2752 | =cut |
2574 | 2753 | |
2575 | 2754 | sub build_datetime_parser { |
2576 | if (not $_[0]->_driver_determined) { | |
2577 | $_[0]->_determine_driver; | |
2578 | goto $_[0]->can('build_datetime_parser'); | |
2579 | } | |
2580 | ||
2581 | 2755 | my $self = shift; |
2582 | 2756 | my $type = $self->datetime_parser_type(@_); |
2583 | 2757 | $self->ensure_class_loaded ($type); |
2610 | 2784 | return; |
2611 | 2785 | } |
2612 | 2786 | |
2613 | # SQLT version handling | |
2787 | # SQLT version handling | |
2614 | 2788 | { |
2615 | my $_sqlt_version_ok; # private | |
2616 | my $_sqlt_version_error; # private | |
2789 | my $_sqlt_version_ok; # private | |
2790 | my $_sqlt_version_error; # private | |
2617 | 2791 | |
2618 | 2792 | sub _sqlt_version_ok { |
2619 | 2793 | if (!defined $_sqlt_version_ok) { |
23 | 23 | # Always remember to do all digits for the version even if they're 0 |
24 | 24 | # i.e. first release of 0.XX *must* be 0.XX000. This avoids fBSD ports |
25 | 25 | # brain damage and presumably various other packaging systems too |
26 | $VERSION = '0.08112'; | |
26 | $VERSION = '0.08113'; | |
27 | 27 | |
28 | 28 | $VERSION = eval $VERSION; # numify for warning-free dev releases |
29 | 29 | |
114 | 114 | my $all_artists_rs = $schema->resultset('Artist'); |
115 | 115 | |
116 | 116 | # Output all artists names |
117 | # $artist here is a DBIx::Class::Row, which has accessors | |
117 | # $artist here is a DBIx::Class::Row, which has accessors | |
118 | 118 | # for all its columns. Rows are also subclasses of your Result class. |
119 | 119 | foreach $artist (@artists) { |
120 | 120 | print $artist->name, "\n"; |
340 | 340 | |
341 | 341 | Tom Hukins |
342 | 342 | |
343 | triode: Pete Gamache <gamache@cpan.org> | |
344 | ||
343 | 345 | typester: Daisuke Murase <typester@cpan.org> |
344 | 346 | |
345 | 347 | victori: Victor Igumnov <victori@cpan.org> |
183 | 183 | if ($fk_constraint) { |
184 | 184 | $cascade->{$c} = $rel_info->{attrs}{"on_$c"}; |
185 | 185 | } |
186 | else { | |
186 | elsif ( $rel_info->{attrs}{"on_$c"} ) { | |
187 | 187 | carp "SQLT attribute 'on_$c' was supplied for relationship '$moniker/$rel', which does not appear to be a foreign constraint. " |
188 | 188 | . "If you are sure that SQLT must generate a constraint for this relationship, add 'is_foreign_key_constraint => 1' to the attributes.\n"; |
189 | 189 | } |
0 | #!/usr/bin/perl | |
1 | ||
2 | # | |
3 | # So you wrote a new mk_hash implementation which passed all tests (particularly | |
4 | # t/68inflate_resultclass_hashrefinflator) and would like to see how it holds up | |
5 | # against older versions of the same. Just add your coderef to the HRI::Bench | |
6 | # namespace and add a name/ref pair to the %bench_list hash. Happy testing. | |
7 | ||
8 | package DBIx::Class::ResultClass::HashRefInflator::Bench; | |
9 | ||
10 | use warnings; | |
11 | use strict; | |
12 | ||
13 | my $current_mk_hash; | |
14 | $current_mk_hash = sub { | |
15 | if (ref $_[0] eq 'ARRAY') { # multi relationship | |
16 | return [ map { $current_mk_hash->(@$_) || () } (@_) ]; | |
17 | } | |
18 | else { | |
19 | my $hash = { | |
20 | # the main hash could be an undef if we are processing a skipped-over join | |
21 | $_[0] ? %{$_[0]} : (), | |
22 | ||
23 | # the second arg is a hash of arrays for each prefetched relation | |
24 | map | |
25 | { $_ => $current_mk_hash->( @{$_[1]->{$_}} ) } | |
26 | ( $_[1] ? (keys %{$_[1]}) : () ) | |
27 | }; | |
28 | ||
29 | # if there is at least one defined column consider the resultset real | |
30 | # (and not an emtpy has_many rel containing one empty hashref) | |
31 | for (values %$hash) { | |
32 | return $hash if defined $_; | |
33 | } | |
34 | ||
35 | return undef; | |
36 | } | |
37 | }; | |
38 | ||
39 | # the (incomplete, fails a test) implementation before svn:4760 | |
40 | my $old_mk_hash; | |
41 | $old_mk_hash = sub { | |
42 | my ($me, $rest) = @_; | |
43 | ||
44 | # $me is the hashref of cols/data from the immediate resultsource | |
45 | # $rest is a deep hashref of all the data from the prefetched | |
46 | # related sources. | |
47 | ||
48 | # to avoid emtpy has_many rels contain one empty hashref | |
49 | return undef if (not keys %$me); | |
50 | ||
51 | my $def; | |
52 | ||
53 | foreach (values %$me) { | |
54 | if (defined $_) { | |
55 | $def = 1; | |
56 | last; | |
57 | } | |
58 | } | |
59 | return undef unless $def; | |
60 | ||
61 | return { %$me, | |
62 | map { | |
63 | ( $_ => | |
64 | ref($rest->{$_}[0]) eq 'ARRAY' | |
65 | ? [ grep defined, map $old_mk_hash->(@$_), @{$rest->{$_}} ] | |
66 | : $old_mk_hash->( @{$rest->{$_}} ) | |
67 | ) | |
68 | } keys %$rest | |
69 | }; | |
70 | }; | |
71 | ||
72 | ||
73 | our %bench_list = ( | |
74 | current_implementation => $current_mk_hash, | |
75 | old_implementation => $old_mk_hash, | |
76 | ); | |
77 | ||
78 | 1; | |
79 | ||
80 | package benchmark_hashrefinflator; | |
81 | ||
82 | use warnings; | |
83 | use strict; | |
84 | ||
85 | use FindBin; | |
86 | use lib ("$FindBin::Bin/../lib", "$FindBin::Bin/../t/lib"); | |
87 | ||
88 | use Benchmark qw/timethis cmpthese/; | |
89 | use DBICTest; | |
90 | ||
91 | chdir ("$FindBin::Bin/.."); | |
92 | my $schema = DBICTest->init_schema(); | |
93 | ||
94 | my $test_sub = sub { | |
95 | my $rs_hashrefinf = $schema->resultset ('Artist')->search ({}, { | |
96 | prefetch => { cds => 'tracks' }, | |
97 | }); | |
98 | $rs_hashrefinf->result_class('DBIx::Class::ResultClass::HashRefInflator::Bench'); | |
99 | my @stuff = $rs_hashrefinf->all; | |
100 | }; | |
101 | ||
102 | ||
103 | my $results; | |
104 | for my $b (keys %DBIx::Class::ResultClass::HashRefInflator::Bench::bench_list) { | |
105 | ||
106 | print "Timing $b... "; | |
107 | ||
108 | # switch the inflator | |
109 | no warnings qw/redefine once/; | |
110 | no strict qw/refs/; | |
111 | local *DBIx::Class::ResultClass::HashRefInflator::Bench::inflate_result = sub { | |
112 | return $DBIx::Class::ResultClass::HashRefInflator::Bench::bench_list{$b}->(@_[2,3]); | |
113 | }; | |
114 | ||
115 | $results->{$b} = timethis (-2, $test_sub); | |
116 | } | |
117 | cmpthese ($results); |
0 | #!/usr/bin/perl -w | |
1 | ||
2 | # Originally by: Zbigniew Lukasiak, C<zz bb yy@gmail.com> | |
3 | # but refactored and modified to our nefarious purposes | |
4 | ||
5 | # XXX I'm not done refactoring this yet --blblack | |
6 | ||
7 | use strict; | |
8 | use warnings; | |
9 | ||
10 | use Pod::Coverage; | |
11 | use Data::Dumper; | |
12 | use File::Find::Rule; | |
13 | use File::Slurp; | |
14 | use Path::Class; | |
15 | use Template; | |
16 | ||
17 | # Convert filename to package name | |
18 | sub getpac { | |
19 | my $file = shift; | |
20 | my $filecont = read_file( $file ); | |
21 | $filecont =~ /package\s*(.*?);/s or return; | |
22 | my $pac = $1; | |
23 | $pac =~ /\s+(.*)$/; | |
24 | return $1; | |
25 | } | |
26 | ||
27 | my @files = File::Find::Rule->file()->name('*.pm', '*.pod')->in('lib'); | |
28 | ||
29 | my %docsyms; | |
30 | for my $file (@files){ | |
31 | my $package = getpac( $file ) or next; | |
32 | my $pc = Pod::Coverage->new(package => $package); | |
33 | my %allsyms = map {$_ => 1} $pc->_get_syms($package); | |
34 | my $podarr = $pc->_get_pods(); | |
35 | next if !$podarr; | |
36 | for my $sym (@{$podarr}){ | |
37 | $docsyms{$sym}{$package} = $file if $allsyms{$sym}; | |
38 | } | |
39 | } | |
40 | ||
41 | my @lines; | |
42 | for my $sym (sort keys %docsyms){ | |
43 | for my $pac (sort keys %{$docsyms{$sym}}){ | |
44 | push @lines, {symbol => $sym, package => $pac}; | |
45 | } | |
46 | } | |
47 | ||
48 | my $tt = Template->new({}) | |
49 | || die Template->error(), "\n"; | |
50 | ||
51 | $tt->process(\*DATA, { lines => \@lines }) | |
52 | || die $tt->error(), "\n"; | |
53 | ||
54 | ||
55 | __DATA__ | |
56 | ||
57 | =head1 NAME | |
58 | ||
59 | Method Index | |
60 | ||
61 | [% FOR line = lines %] | |
62 | L<[% line.symbol %] ([% line.package %])|[% line.package %]/[% line.symbol %]> | |
63 | [% END %] |
0 | #!/usr/bin/perl | |
1 | ||
2 | use strict; | |
3 | use warnings; | |
4 | use lib qw(lib t/lib); | |
5 | ||
6 | use DBICTest::Schema; | |
7 | use SQL::Translator; | |
8 | ||
9 | my $schema = DBICTest::Schema->connect; | |
10 | print scalar ($schema->storage->deployment_statements($schema, 'SQLite')); |
0 | #!/usr/bin/perl | |
1 | ||
2 | die "must be run from DBIx::Class root dir" unless -d 't/run'; | |
3 | ||
4 | gen_tests($_) for qw/BasicRels HelperRels/; | |
5 | ||
6 | sub gen_tests { | |
7 | my $variant = shift; | |
8 | my $dir = lc $variant; | |
9 | system("rm -f t/$dir/*.t"); | |
10 | ||
11 | foreach my $test (map { m[^t/run/(.+)\.tl$]; $1 } split(/\n/, `ls t/run/*.tl`)) { | |
12 | open(my $fh, '>', "t/$dir/${test}.t") or die $!; | |
13 | print $fh <<"EOF"; | |
14 | use Test::More; | |
15 | use lib qw(t/lib); | |
16 | use DBICTest; | |
17 | use DBICTest::$variant; | |
18 | ||
19 | require "t/run/${test}.tl"; | |
20 | run_tests(DBICTest->schema); | |
21 | EOF | |
22 | close $fh; | |
23 | } | |
24 | } |
0 | #!/usr/bin/perl | |
1 | use strict; | |
2 | use warnings; | |
3 | use lib qw(lib t/lib); | |
4 | ||
5 | # USAGE: | |
6 | # maint/inheritance_pod.pl Some::Module | |
7 | ||
8 | my $module = $ARGV[0]; | |
9 | eval(" require $module; "); | |
10 | ||
11 | my @modules = Class::C3::calculateMRO($module); | |
12 | shift( @modules ); | |
13 | ||
14 | print "=head1 INHERITED METHODS\n\n"; | |
15 | ||
16 | foreach my $module (@modules) { | |
17 | print "=head2 $module\n\n"; | |
18 | print "=over 4\n\n"; | |
19 | my $file = $module; | |
20 | $file =~ s/::/\//g; | |
21 | $file .= '.pm'; | |
22 | foreach my $path (@INC){ | |
23 | if (-e "$path/$file") { | |
24 | open(MODULE,"<$path/$file"); | |
25 | while (my $line = <MODULE>) { | |
26 | if ($line=~/^\s*sub ([a-z][a-z_]+) \{/) { | |
27 | my $method = $1; | |
28 | print "=item *\n\n"; | |
29 | print "L<$method|$module/$method>\n\n"; | |
30 | } | |
31 | } | |
32 | close(MODULE); | |
33 | last; | |
34 | } | |
35 | } | |
36 | print "=back\n\n"; | |
37 | } | |
38 | ||
39 | 1; |
0 | #!/bin/sh | |
1 | ||
2 | cd maint; | |
3 | rm svn-log.perl; | |
4 | wget https://thirdlobe.com/svn/repo-tools/trunk/svn-log.perl; |
0 | #!/usr/bin/env perl | |
1 | # $Id$ | |
2 | ||
3 | # This program is Copyright 2005 by Rocco Caputo. All rights are | |
4 | # reserved. This program is free software. It may be modified, used, | |
5 | # and redistributed under the same terms as Perl itself. | |
6 | ||
7 | # Generate a nice looking change log from the subversion logs for a | |
8 | # Perl project. The log is also easy for machines to parse. | |
9 | ||
10 | use warnings; | |
11 | use strict; | |
12 | ||
13 | use Getopt::Long; | |
14 | use Text::Wrap qw(wrap fill $columns $huge); | |
15 | use POSIX qw(strftime); | |
16 | use XML::Parser; | |
17 | ||
18 | my %month = qw( | |
19 | Jan 01 Feb 02 Mar 03 Apr 04 May 05 Jun 06 | |
20 | Jul 07 Aug 08 Sep 09 Oct 10 Nov 11 Dec 12 | |
21 | ); | |
22 | ||
23 | $Text::Wrap::huge = "wrap"; | |
24 | $Text::Wrap::columns = 74; | |
25 | ||
26 | my $days_back = 365; # Go back a year by default. | |
27 | my $send_help = 0; # Display help and exit. | |
28 | my $svn_repo; # Where to log from. | |
29 | ||
30 | use constant LOG_REV => 0; | |
31 | use constant LOG_DATE => 1; | |
32 | use constant LOG_WHO => 2; | |
33 | use constant LOG_MESSAGE => 3; | |
34 | use constant LOG_PATHS => 4; | |
35 | ||
36 | use constant PATH_PATH => 0; | |
37 | use constant PATH_ACTION => 1; | |
38 | use constant PATH_CPF_PATH => 2; | |
39 | use constant PATH_CPF_REV => 3; | |
40 | ||
41 | use constant TAG_REV => 0; | |
42 | use constant TAG_TAG => 1; | |
43 | use constant TAG_LOG => 2; | |
44 | ||
45 | use constant MAX_TIMESTAMP => "9999-99-99 99:99:99"; | |
46 | ||
47 | GetOptions( | |
48 | "age=s" => \$days_back, | |
49 | "repo=s" => \$svn_repo, | |
50 | "help" => \$send_help, | |
51 | ) or exit; | |
52 | ||
53 | # Find the trunk for the current repository if one isn't specified. | |
54 | unless (defined $svn_repo) { | |
55 | $svn_repo = `svn info . | grep '^URL: '`; | |
56 | if (length $svn_repo) { | |
57 | chomp $svn_repo; | |
58 | $svn_repo =~ s{^URL\:\s+(.+?)/trunk/?.*$}{$1}; | |
59 | } | |
60 | else { | |
61 | $send_help = 1; | |
62 | } | |
63 | } | |
64 | ||
65 | die( | |
66 | "$0 usage:\n", | |
67 | " --repo REPOSITORY\n", | |
68 | " [--age DAYS]\n", | |
69 | "\n", | |
70 | "REPOSITORY must have a trunk subdirectory and a tags directory where\n", | |
71 | "release tags are kept.\n", | |
72 | ) if $send_help; | |
73 | ||
74 | my $earliest_date = strftime "%F", gmtime(time() - $days_back * 86400); | |
75 | ||
76 | ### 1. Gather a list of tags for the repository, their revisions and | |
77 | ### dates. | |
78 | ||
79 | my %tag; | |
80 | ||
81 | open(TAG, "svn -v list $svn_repo/tags|") or die $!; | |
82 | while (<TAG>) { | |
83 | # The date is unused, however. | |
84 | next unless ( | |
85 | my ($rev, $date, $tag) = m{ | |
86 | (\d+).*?(\S\S\S\s+\d\d\s+(?:\d\d\d\d|\d\d:\d\d))\s+(v[0-9_.]+) | |
87 | }x | |
88 | ); | |
89 | ||
90 | my @tag_log = gather_log("$svn_repo/tags/$tag", "--stop-on-copy"); | |
91 | die "Tag $tag has changes after tagging!\n" if @tag_log > 1; | |
92 | ||
93 | my $timestamp = $tag_log[0][LOG_DATE]; | |
94 | $tag{$timestamp} = [ | |
95 | $rev, # TAG_REV | |
96 | $tag, # TAG_TAG | |
97 | [ ], # TAG_LOG | |
98 | ]; | |
99 | } | |
100 | close TAG; | |
101 | ||
102 | # Fictitious "HEAD" tag for revisions that came after the last tag. | |
103 | ||
104 | $tag{+MAX_TIMESTAMP} = [ | |
105 | "HEAD", # TAG_REV | |
106 | "(untagged)", # TAG_TAG | |
107 | undef, # TAG_LOG | |
108 | ]; | |
109 | ||
110 | ### 2. Gather the log for the trunk. Place log entries under their | |
111 | ### proper tags. | |
112 | ||
113 | my @tag_dates = sort keys %tag; | |
114 | while (my $date = pop(@tag_dates)) { | |
115 | ||
116 | # We're done if this date's before our earliest date. | |
117 | if ($date lt $earliest_date) { | |
118 | delete $tag{$date}; | |
119 | next; | |
120 | } | |
121 | ||
122 | my $tag = $tag{$date}[TAG_TAG]; | |
123 | #warn "Gathering information for tag $tag...\n"; | |
124 | ||
125 | my $this_rev = $tag{$date}[TAG_REV]; | |
126 | my $prev_rev; | |
127 | if (@tag_dates) { | |
128 | $prev_rev = $tag{$tag_dates[-1]}[TAG_REV]; | |
129 | } | |
130 | else { | |
131 | $prev_rev = 0; | |
132 | } | |
133 | ||
134 | my @log = gather_log("$svn_repo/trunk", "-r", "$this_rev:$prev_rev"); | |
135 | ||
136 | $tag{$date}[TAG_LOG] = \@log; | |
137 | } | |
138 | ||
139 | ### 3. PROFIT! No, wait... generate the nice log file. | |
140 | ||
141 | foreach my $timestamp (sort { $b cmp $a } keys %tag) { | |
142 | my $tag_rec = $tag{$timestamp}; | |
143 | ||
144 | # Skip this tag if there are no log entries. | |
145 | next unless @{$tag_rec->[TAG_LOG]}; | |
146 | ||
147 | my $tag_line = "$timestamp $tag_rec->[TAG_TAG]"; | |
148 | my $tag_bar = "=" x length($tag_line); | |
149 | print $tag_bar, "\n", $tag_line, "\n", $tag_bar, "\n\n"; | |
150 | ||
151 | foreach my $log_rec (@{$tag_rec->[TAG_LOG]}) { | |
152 | ||
153 | my @paths = @{$log_rec->[LOG_PATHS]}; | |
154 | if (@paths > 1) { | |
155 | @paths = grep { | |
156 | $_->[PATH_PATH] ne "/trunk" or $_->[PATH_ACTION] ne "M" | |
157 | } @paths; | |
158 | } | |
159 | ||
160 | my $time_line = wrap( | |
161 | " ", " ", | |
162 | join( | |
163 | "; ", | |
164 | "$log_rec->[LOG_DATE] (r$log_rec->[LOG_REV]) by $log_rec->[LOG_WHO]", | |
165 | map { "$_->[PATH_PATH] $_->[PATH_ACTION]" } @paths | |
166 | ) | |
167 | ); | |
168 | ||
169 | if ($time_line =~ /\n/) { | |
170 | $time_line = wrap( | |
171 | " ", " ", | |
172 | "$log_rec->[LOG_DATE] (r$log_rec->[LOG_REV]) by $log_rec->[LOG_WHO]\n" | |
173 | ) . | |
174 | wrap( | |
175 | " ", " ", | |
176 | join( | |
177 | "; ", | |
178 | map { "$_->[PATH_PATH] $_->[PATH_ACTION]" } @paths | |
179 | ) | |
180 | ); | |
181 | } | |
182 | ||
183 | print $time_line, "\n\n"; | |
184 | ||
185 | # Blank lines should have the indent level of whitespace. This | |
186 | # makes it easier for other utilities to parse them. | |
187 | ||
188 | my @paragraphs = split /\n\s*\n/, $log_rec->[LOG_MESSAGE]; | |
189 | foreach my $paragraph (@paragraphs) { | |
190 | ||
191 | # Trim off identical leading space from every line. | |
192 | my ($whitespace) = $paragraph =~ /^(\s*)/; | |
193 | if (length $whitespace) { | |
194 | $paragraph =~ s/^$whitespace//mg; | |
195 | } | |
196 | ||
197 | # Re-flow the paragraph if it isn't indented from the norm. | |
198 | # This should preserve indented quoted text, wiki-style. | |
199 | unless ($paragraph =~ /^\s/) { | |
200 | $paragraph = fill(" ", " ", $paragraph); | |
201 | } | |
202 | } | |
203 | ||
204 | print join("\n \n", @paragraphs), "\n\n"; | |
205 | } | |
206 | } | |
207 | ||
208 | print( | |
209 | "==============\n", | |
210 | "End of Excerpt\n", | |
211 | "==============\n", | |
212 | ); | |
213 | ||
214 | ### Z. Helper functions. | |
215 | ||
216 | sub gather_log { | |
217 | my ($url, @flags) = @_; | |
218 | ||
219 | my (@log, @stack); | |
220 | ||
221 | my $parser = XML::Parser->new( | |
222 | Handlers => { | |
223 | Start => sub { | |
224 | my ($self, $tag, %att) = @_; | |
225 | push @stack, [ $tag, \%att ]; | |
226 | if ($tag eq "logentry") { | |
227 | push @log, [ ]; | |
228 | $log[-1][LOG_WHO] = "(nobody)"; | |
229 | } | |
230 | }, | |
231 | Char => sub { | |
232 | my ($self, $text) = @_; | |
233 | $stack[-1][1]{0} .= $text; | |
234 | }, | |
235 | End => sub { | |
236 | my ($self, $tag) = @_; | |
237 | die "close $tag w/out open" unless @stack; | |
238 | my ($pop_tag, $att) = @{pop @stack}; | |
239 | ||
240 | die "$tag ne $pop_tag" if $tag ne $pop_tag; | |
241 | ||
242 | if ($tag eq "date") { | |
243 | my $timestamp = $att->{0}; | |
244 | my ($date, $time) = split /[T.]/, $timestamp; | |
245 | $log[-1][LOG_DATE] = "$date $time"; | |
246 | return; | |
247 | } | |
248 | ||
249 | if ($tag eq "logentry") { | |
250 | $log[-1][LOG_REV] = $att->{revision}; | |
251 | return; | |
252 | } | |
253 | ||
254 | if ($tag eq "msg") { | |
255 | $log[-1][LOG_MESSAGE] = $att->{0}; | |
256 | return; | |
257 | } | |
258 | ||
259 | if ($tag eq "author") { | |
260 | $log[-1][LOG_WHO] = $att->{0}; | |
261 | return; | |
262 | } | |
263 | ||
264 | if ($tag eq "path") { | |
265 | my $path = $att->{0}; | |
266 | $path =~ s{^/trunk/}{}; | |
267 | push( | |
268 | @{$log[-1][LOG_PATHS]}, [ | |
269 | $path, # PATH_PATH | |
270 | $att->{action}, # PATH_ACTION | |
271 | ] | |
272 | ); | |
273 | ||
274 | $log[-1][LOG_PATHS][-1][PATH_CPF_PATH] = $att->{"copyfrom-path"} if ( | |
275 | exists $att->{"copyfrom-path"} | |
276 | ); | |
277 | ||
278 | $log[-1][LOG_PATHS][-1][PATH_CPF_REV] = $att->{"copyfrom-rev"} if ( | |
279 | exists $att->{"copyfrom-rev"} | |
280 | ); | |
281 | return; | |
282 | } | |
283 | ||
284 | } | |
285 | } | |
286 | ); | |
287 | ||
288 | my $cmd = "svn -v --xml @flags log $url"; | |
289 | #warn "Command: $cmd\n"; | |
290 | ||
291 | open(LOG, "$cmd|") or die $!; | |
292 | $parser->parse(*LOG); | |
293 | close LOG; | |
294 | ||
295 | return @log; | |
296 | } |
4 | 4 | use Test::Exception; |
5 | 5 | use lib qw(t/lib); |
6 | 6 | use DBICTest; |
7 | ||
8 | plan tests => 23; | |
7 | use Path::Class::File (); | |
9 | 8 | |
10 | 9 | my $schema = DBICTest->init_schema(); |
11 | 10 | |
115 | 114 | is($link7->url, undef, 'Link 7 url'); |
116 | 115 | is($link7->title, 'gtitle', 'Link 7 title'); |
117 | 116 | |
117 | my $rs = $schema->resultset('Artist'); | |
118 | $rs->delete; | |
119 | ||
120 | # test _execute_array_empty (insert_bulk with all literal sql) | |
121 | ||
122 | $rs->populate([ | |
123 | (+{ | |
124 | name => \"'DT'", | |
125 | rank => \500, | |
126 | charfield => \"'mtfnpy'", | |
127 | }) x 5 | |
128 | ]); | |
129 | ||
130 | is((grep { | |
131 | $_->name eq 'DT' && | |
132 | $_->rank == 500 && | |
133 | $_->charfield eq 'mtfnpy' | |
134 | } $rs->all), 5, 'populate with all literal SQL'); | |
135 | ||
136 | $rs->delete; | |
137 | ||
138 | # test mixed binds with literal sql | |
139 | ||
140 | $rs->populate([ | |
141 | (+{ | |
142 | name => \"'DT'", | |
143 | rank => 500, | |
144 | charfield => \"'mtfnpy'", | |
145 | }) x 5 | |
146 | ]); | |
147 | ||
148 | is((grep { | |
149 | $_->name eq 'DT' && | |
150 | $_->rank == 500 && | |
151 | $_->charfield eq 'mtfnpy' | |
152 | } $rs->all), 5, 'populate with all literal SQL'); | |
153 | ||
154 | $rs->delete; | |
155 | ||
156 | ### | |
157 | ||
158 | throws_ok { | |
159 | $rs->populate([ | |
160 | { | |
161 | artistid => 1, | |
162 | name => 'foo1', | |
163 | }, | |
164 | { | |
165 | artistid => 'foo', # this dies | |
166 | name => 'foo2', | |
167 | }, | |
168 | { | |
169 | artistid => 3, | |
170 | name => 'foo3', | |
171 | }, | |
172 | ]); | |
173 | } qr/slice/, 'bad slice'; | |
174 | ||
175 | is($rs->count, 0, 'populate is atomic'); | |
176 | ||
177 | # Trying to use a column marked as a bind in the first slice with literal sql in | |
178 | # a later slice should throw. | |
179 | ||
180 | throws_ok { | |
181 | $rs->populate([ | |
182 | { | |
183 | artistid => 1, | |
184 | name => \"'foo'", | |
185 | }, | |
186 | { | |
187 | artistid => \2, | |
188 | name => \"'foo'", | |
189 | } | |
190 | ]); | |
191 | } qr/bind expected/, 'literal sql where bind expected throws'; | |
192 | ||
193 | # ... and vice-versa. | |
194 | ||
195 | throws_ok { | |
196 | $rs->populate([ | |
197 | { | |
198 | artistid => \1, | |
199 | name => \"'foo'", | |
200 | }, | |
201 | { | |
202 | artistid => 2, | |
203 | name => \"'foo'", | |
204 | } | |
205 | ]); | |
206 | } qr/literal SQL expected/i, 'bind where literal sql expected throws'; | |
207 | ||
208 | throws_ok { | |
209 | $rs->populate([ | |
210 | { | |
211 | artistid => 1, | |
212 | name => \"'foo'", | |
213 | }, | |
214 | { | |
215 | artistid => 2, | |
216 | name => \"'bar'", | |
217 | } | |
218 | ]); | |
219 | } qr/inconsistent/, 'literal sql must be the same in all slices'; | |
220 | ||
221 | # the stringification has nothing to do with the artist name | |
222 | # this is solely for testing consistency | |
223 | my $fn = Path::Class::File->new ('somedir/somefilename.tmp'); | |
224 | my $fn2 = Path::Class::File->new ('somedir/someotherfilename.tmp'); | |
225 | ||
226 | lives_ok { | |
227 | $rs->populate([ | |
228 | { | |
229 | name => 'supplied before stringifying object', | |
230 | }, | |
231 | { | |
232 | name => $fn, | |
233 | } | |
234 | ]); | |
235 | } 'stringifying objects pass through'; | |
236 | ||
237 | # ... and vice-versa. | |
238 | ||
239 | lives_ok { | |
240 | $rs->populate([ | |
241 | { | |
242 | name => $fn2, | |
243 | }, | |
244 | { | |
245 | name => 'supplied after stringifying object', | |
246 | }, | |
247 | ]); | |
248 | } 'stringifying objects pass through'; | |
249 | ||
250 | for ( | |
251 | $fn, | |
252 | $fn2, | |
253 | 'supplied after stringifying object', | |
254 | 'supplied before stringifying object' | |
255 | ) { | |
256 | my $row = $rs->find ({name => $_}); | |
257 | ok ($row, "Stringification test row '$_' properly inserted"); | |
258 | } | |
259 | ||
260 | $rs->delete; | |
261 | ||
262 | # test stringification with ->create rather than Storage::insert_bulk as well | |
263 | ||
264 | lives_ok { | |
265 | my @dummy = $rs->populate([ | |
266 | { | |
267 | name => 'supplied before stringifying object', | |
268 | }, | |
269 | { | |
270 | name => $fn, | |
271 | } | |
272 | ]); | |
273 | } 'stringifying objects pass through'; | |
274 | ||
275 | # ... and vice-versa. | |
276 | ||
277 | lives_ok { | |
278 | my @dummy = $rs->populate([ | |
279 | { | |
280 | name => $fn2, | |
281 | }, | |
282 | { | |
283 | name => 'supplied after stringifying object', | |
284 | }, | |
285 | ]); | |
286 | } 'stringifying objects pass through'; | |
287 | ||
288 | for ( | |
289 | $fn, | |
290 | $fn2, | |
291 | 'supplied after stringifying object', | |
292 | 'supplied before stringifying object' | |
293 | ) { | |
294 | my $row = $rs->find ({name => $_}); | |
295 | ok ($row, "Stringification test row '$_' properly inserted"); | |
296 | } | |
297 | ||
298 | lives_ok { | |
299 | $schema->resultset('TwoKeys')->populate([{ | |
300 | artist => 1, | |
301 | cd => 5, | |
302 | fourkeys_to_twokeys => [{ | |
303 | f_foo => 1, | |
304 | f_bar => 1, | |
305 | f_hello => 1, | |
306 | f_goodbye => 1, | |
307 | autopilot => 'a', | |
308 | },{ | |
309 | f_foo => 2, | |
310 | f_bar => 2, | |
311 | f_hello => 2, | |
312 | f_goodbye => 2, | |
313 | autopilot => 'b', | |
314 | }] | |
315 | }]) | |
316 | } 'multicol-PK has_many populate works'; | |
317 | ||
318 | done_testing; |
64 | 64 | |
65 | 65 | is(@art, 2, 'And then there were two'); |
66 | 66 | |
67 | ok(!$art->in_storage, "It knows it's dead"); | |
67 | is($art->in_storage, 0, "It knows it's dead"); | |
68 | 68 | |
69 | 69 | dies_ok ( sub { $art->delete }, "Can't delete twice"); |
70 | 70 | |
143 | 143 | }); |
144 | 144 | |
145 | 145 | is($new_obj->name, 'find_or_new', 'find_or_new: instantiated a new artist'); |
146 | ok(! $new_obj->in_storage, 'new artist is not in storage'); | |
146 | is($new_obj->in_storage, 0, 'new artist is not in storage'); | |
147 | 147 | } |
148 | 148 | |
149 | 149 | my $cd = $schema->resultset("CD")->find(1); |
226 | 226 | => 'Nothing Found!'; |
227 | 227 | } |
228 | 228 | |
229 | ||
230 | ## If find() is the first query after connect() | |
231 | ## DBI::Storage::sql_maker() will be called before | |
232 | ## _determine_driver() and so the ::SQLHacks class for MySQL | |
233 | ## will not be used | |
234 | ||
235 | my $schema2 = DBICTest::Schema->connect($dsn, $user, $pass); | |
236 | $schema2->resultset("Artist")->find(4); | |
237 | isa_ok($schema2->storage->sql_maker, 'DBIx::Class::SQLAHacks::MySQL'); | |
238 | ||
229 | 239 | done_testing; |
471 | 471 | |
472 | 472 | my @eapk_schemas; |
473 | 473 | BEGIN{ @eapk_schemas = map "dbic_apk_$_", 0..5 } |
474 | my %seqs; #< hash of schema.table.col => currval of its (DBIC) primary key sequence | |
474 | 475 | |
475 | 476 | sub run_extended_apk_tests { |
476 | 477 | my $schema = shift; |
488 | 489 | for @eapk_schemas; |
489 | 490 | |
490 | 491 | $dbh->do("CREATE SEQUENCE $eapk_schemas[5].fooseq"); |
492 | $dbh->do("SELECT setval('$eapk_schemas[5].fooseq',400)"); | |
493 | $seqs{"$eapk_schemas[1].apk.id2"} = 400; | |
494 | ||
491 | 495 | $dbh->do("CREATE SEQUENCE $eapk_schemas[4].fooseq"); |
496 | $dbh->do("SELECT setval('$eapk_schemas[4].fooseq',300)"); | |
497 | $seqs{"$eapk_schemas[3].apk.id2"} = 300; | |
498 | ||
492 | 499 | $dbh->do("CREATE SEQUENCE $eapk_schemas[3].fooseq"); |
493 | ||
494 | $dbh->do("SET search_path = ".join ',', @eapk_schemas ); | |
500 | $dbh->do("SELECT setval('$eapk_schemas[3].fooseq',200)"); | |
501 | $seqs{"$eapk_schemas[4].apk.id2"} = 200; | |
502 | ||
503 | $dbh->do("SET search_path = ".join ',', reverse @eapk_schemas ); | |
495 | 504 | }); |
496 | 505 | |
497 | 506 | # clear our search_path cache |
518 | 527 | qualify_table => 4, |
519 | 528 | ); |
520 | 529 | |
530 | eapk_poke( $schema ); | |
521 | 531 | eapk_poke( $schema, 0 ); |
522 | 532 | eapk_poke( $schema, 2 ); |
523 | 533 | eapk_poke( $schema, 4 ); |
524 | 534 | eapk_poke( $schema, 1 ); |
525 | 535 | eapk_poke( $schema, 0 ); |
526 | 536 | eapk_poke( $schema, 1 ); |
537 | eapk_poke( $schema ); | |
527 | 538 | eapk_poke( $schema, 4 ); |
528 | 539 | eapk_poke( $schema, 3 ); |
529 | 540 | eapk_poke( $schema, 1 ); |
537 | 548 | # do a DBIC create on the apk table in the given schema number (which is an |
538 | 549 | # index of @eapk_schemas) |
539 | 550 | |
540 | my %seqs; #< sanity-check hash of schema.table.col => currval of its sequence | |
541 | ||
542 | 551 | sub eapk_poke { |
543 | 552 | my ($s, $schema_num) = @_; |
544 | 553 | |
546 | 555 | ? $eapk_schemas[$schema_num] |
547 | 556 | : ''; |
548 | 557 | |
549 | my $schema_name_actual = $schema_name || eapk_get_search_path($s)->[0]; | |
558 | my $schema_name_actual = $schema_name || eapk_find_visible_schema($s); | |
550 | 559 | |
551 | 560 | $s->source('ExtAPK')->name($schema_name ? $schema_name.'.apk' : 'apk'); |
552 | 561 | #< clear sequence name cache |
557 | 566 | lives_ok { |
558 | 567 | my $new; |
559 | 568 | for my $inc (1,2,3) { |
560 | $new = $schema->resultset('ExtAPK')->create({}); | |
569 | $new = $schema->resultset('ExtAPK')->create({ id1 => 1}); | |
561 | 570 | my $proper_seqval = ++$seqs{"$schema_name_actual.apk.id2"}; |
562 | 571 | is( $new->id2, $proper_seqval, "$schema_name_actual.apk.id2 correct inc $inc" ) |
563 | 572 | or eapk_seq_diag($s,$schema_name); |
564 | 573 | $new->discard_changes; |
565 | for my $id (grep $_ ne 'id2', @eapk_id_columns) { | |
574 | is( $new->id1, 1 ); | |
575 | for my $id ('id3','id4') { | |
566 | 576 | my $proper_seqval = ++$seqs{"$schema_name_actual.apk.$id"}; |
567 | 577 | is( $new->$id, $proper_seqval, "$schema_name_actual.apk.$id correct inc $inc" ) |
568 | 578 | or eapk_seq_diag($s,$schema_name); |
576 | 586 | # class |
577 | 587 | sub eapk_seq_diag { |
578 | 588 | my $s = shift; |
579 | my $schema = shift || eapk_get_search_path($s)->[0]; | |
589 | my $schema = shift || eapk_find_visible_schema($s); | |
580 | 590 | |
581 | 591 | diag "$schema.apk sequences: ", |
582 | 592 | join(', ', |
632 | 642 | local $_[1]->{Warn} = 0; |
633 | 643 | |
634 | 644 | my $id_def = $a{nextval} |
635 | ? "integer primary key not null default nextval('$a{nextval}'::regclass)" | |
636 | : 'serial primary key'; | |
645 | ? "integer not null default nextval('$a{nextval}'::regclass)" | |
646 | : 'serial'; | |
637 | 647 | $dbh->do(<<EOS); |
638 | 648 | CREATE TABLE $table_name ( |
639 | 649 | id1 serial |
640 | 650 | , id2 $id_def |
641 | , id3 serial | |
651 | , id3 serial primary key | |
642 | 652 | , id4 serial |
643 | 653 | ) |
644 | 654 | EOS |
666 | 676 | |
667 | 677 | }); |
668 | 678 | } |
679 | ||
680 | sub eapk_find_visible_schema { | |
681 | my ($s) = @_; | |
682 | ||
683 | my ($schema) = | |
684 | $s->storage->dbh_do(sub { | |
685 | $_[1]->selectrow_array(<<EOS); | |
686 | SELECT n.nspname | |
687 | FROM pg_catalog.pg_namespace n | |
688 | JOIN pg_catalog.pg_class c ON c.relnamespace = n.oid | |
689 | WHERE c.relname = 'apk' | |
690 | AND pg_catalog.pg_table_is_visible(c.oid) | |
691 | EOS | |
692 | }); | |
693 | return $schema; | |
694 | } |
25 | 25 | } |
26 | 26 | |
27 | 27 | use strict; |
28 | use warnings; | |
28 | use warnings; | |
29 | 29 | |
30 | 30 | use Test::Exception; |
31 | 31 | use Test::More; |
39 | 39 | ' as well as following sequences: \'pkid1_seq\', \'pkid2_seq\' and \'nonpkid_seq\'' |
40 | 40 | unless ($dsn && $user && $pass); |
41 | 41 | |
42 | plan tests => 35; | |
42 | plan tests => 36; | |
43 | 43 | |
44 | 44 | DBICTest::Schema->load_classes('ArtistFQN'); |
45 | 45 | my $schema = DBICTest::Schema->connect($dsn, $user, $pass); |
48 | 48 | |
49 | 49 | eval { |
50 | 50 | $dbh->do("DROP SEQUENCE artist_seq"); |
51 | $dbh->do("DROP SEQUENCE cd_seq"); | |
51 | 52 | $dbh->do("DROP SEQUENCE pkid1_seq"); |
52 | 53 | $dbh->do("DROP SEQUENCE pkid2_seq"); |
53 | 54 | $dbh->do("DROP SEQUENCE nonpkid_seq"); |
57 | 58 | $dbh->do("DROP TABLE track"); |
58 | 59 | }; |
59 | 60 | $dbh->do("CREATE SEQUENCE artist_seq START WITH 1 MAXVALUE 999999 MINVALUE 0"); |
61 | $dbh->do("CREATE SEQUENCE cd_seq START WITH 1 MAXVALUE 999999 MINVALUE 0"); | |
60 | 62 | $dbh->do("CREATE SEQUENCE pkid1_seq START WITH 1 MAXVALUE 999999 MINVALUE 0"); |
61 | 63 | $dbh->do("CREATE SEQUENCE pkid2_seq START WITH 10 MAXVALUE 999999 MINVALUE 0"); |
62 | 64 | $dbh->do("CREATE SEQUENCE nonpkid_seq START WITH 20 MAXVALUE 999999 MINVALUE 0"); |
66 | 68 | $dbh->do("CREATE TABLE track (trackid NUMBER(12), cd NUMBER(12), position NUMBER(12), title VARCHAR(255), last_updated_on DATE, last_updated_at DATE, small_dt DATE)"); |
67 | 69 | |
68 | 70 | $dbh->do("ALTER TABLE artist ADD (CONSTRAINT artist_pk PRIMARY KEY (artistid))"); |
71 | $dbh->do("ALTER TABLE cd ADD (CONSTRAINT cd_pk PRIMARY KEY (cdid))"); | |
69 | 72 | $dbh->do("ALTER TABLE sequence_test ADD (CONSTRAINT sequence_test_constraint PRIMARY KEY (pkid1, pkid2))"); |
70 | 73 | $dbh->do(qq{ |
71 | 74 | CREATE OR REPLACE TRIGGER artist_insert_trg |
79 | 82 | END IF; |
80 | 83 | END; |
81 | 84 | }); |
85 | $dbh->do(qq{ | |
86 | CREATE OR REPLACE TRIGGER cd_insert_trg | |
87 | BEFORE INSERT ON cd | |
88 | FOR EACH ROW | |
89 | BEGIN | |
90 | IF :new.cdid IS NULL THEN | |
91 | SELECT cd_seq.nextval | |
92 | INTO :new.cdid | |
93 | FROM DUAL; | |
94 | END IF; | |
95 | END; | |
96 | }); | |
82 | 97 | |
83 | 98 | { |
84 | 99 | # Swiped from t/bindtype_columns.t to avoid creating my own Resultset. |
87 | 102 | eval { $dbh->do('DROP TABLE bindtype_test') }; |
88 | 103 | |
89 | 104 | $dbh->do(qq[ |
90 | CREATE TABLE bindtype_test | |
105 | CREATE TABLE bindtype_test | |
91 | 106 | ( |
92 | 107 | id integer NOT NULL PRIMARY KEY, |
93 | 108 | bytea integer NULL, |
107 | 122 | my $new = $schema->resultset('Artist')->create({ name => 'foo' }); |
108 | 123 | is($new->artistid, 1, "Oracle Auto-PK worked"); |
109 | 124 | |
125 | my $cd = $schema->resultset('CD')->create({ artist => 1, title => 'EP C', year => '2003' }); | |
126 | is($new->artistid, 1, "Oracle Auto-PK worked - using scalar ref as table name"); | |
127 | ||
110 | 128 | # test again with fully-qualified table name |
111 | 129 | $new = $schema->resultset('ArtistFQN')->create( { name => 'bar' } ); |
112 | 130 | is( $new->artistid, 2, "Oracle Auto-PK worked with fully-qualified tablename" ); |
113 | 131 | |
114 | 132 | # test join with row count ambiguity |
115 | 133 | |
116 | my $cd = $schema->resultset('CD')->create({ cdid => 1, artist => 1, title => 'EP C', year => '2003' }); | |
117 | 134 | my $track = $schema->resultset('Track')->create({ trackid => 1, cd => 1, |
118 | 135 | position => 1, title => 'Track1' }); |
119 | 136 | my $tjoin = $schema->resultset('Track')->search({ 'me.title' => 'Track1'}, |
148 | 165 | |
149 | 166 | $tcount = $schema->resultset('Track')->search( |
150 | 167 | {}, |
151 | { | |
168 | { | |
152 | 169 | group_by => [ qw/position title/ ] |
153 | 170 | } |
154 | 171 | ); |
211 | 228 | END { |
212 | 229 | if($schema && ($dbh = $schema->storage->dbh)) { |
213 | 230 | $dbh->do("DROP SEQUENCE artist_seq"); |
231 | $dbh->do("DROP SEQUENCE cd_seq"); | |
214 | 232 | $dbh->do("DROP SEQUENCE pkid1_seq"); |
215 | 233 | $dbh->do("DROP SEQUENCE pkid2_seq"); |
216 | 234 | $dbh->do("DROP SEQUENCE nonpkid_seq"); |
142 | 142 | my ($storage, $dbh) = @_; |
143 | 143 | eval { $dbh->do("DROP TABLE money_test") }; |
144 | 144 | $dbh->do(<<'SQL'); |
145 | ||
146 | 145 | CREATE TABLE money_test ( |
147 | 146 | id INT IDENTITY PRIMARY KEY, |
148 | 147 | amount MONEY NULL |
149 | 148 | ) |
150 | ||
151 | 149 | SQL |
152 | ||
153 | 150 | }); |
154 | 151 | |
155 | 152 | my $rs = $schema->resultset('Money'); |
0 | 0 | use strict; |
1 | use warnings; | |
1 | use warnings; | |
2 | no warnings 'uninitialized'; | |
2 | 3 | |
3 | 4 | use Test::More; |
4 | 5 | use Test::Exception; |
5 | 6 | use lib qw(t/lib); |
6 | 7 | use DBICTest; |
7 | 8 | |
9 | require DBIx::Class::Storage::DBI::Sybase; | |
10 | require DBIx::Class::Storage::DBI::Sybase::NoBindVars; | |
11 | ||
8 | 12 | my ($dsn, $user, $pass) = @ENV{map { "DBICTEST_SYBASE_${_}" } qw/DSN USER PASS/}; |
9 | 13 | |
10 | plan skip_all => 'Set $ENV{DBICTEST_SYBASE_DSN}, _USER and _PASS to run this test' | |
11 | unless ($dsn && $user); | |
12 | ||
13 | plan tests => 13; | |
14 | ||
15 | my $schema = DBICTest::Schema->connect($dsn, $user, $pass, {AutoCommit => 1}); | |
16 | ||
17 | # start disconnected to test reconnection | |
18 | $schema->storage->ensure_connected; | |
19 | $schema->storage->_dbh->disconnect; | |
20 | ||
21 | isa_ok( $schema->storage, 'DBIx::Class::Storage::DBI::Sybase' ); | |
22 | ||
23 | my $dbh; | |
24 | lives_ok (sub { | |
25 | $dbh = $schema->storage->dbh; | |
26 | }, 'reconnect works'); | |
27 | ||
28 | $schema->storage->dbh_do (sub { | |
29 | my ($storage, $dbh) = @_; | |
30 | eval { $dbh->do("DROP TABLE artist") }; | |
31 | $dbh->do(<<'SQL'); | |
32 | ||
14 | my $TESTS = 63 + 2; | |
15 | ||
16 | if (not ($dsn && $user)) { | |
17 | plan skip_all => | |
18 | 'Set $ENV{DBICTEST_SYBASE_DSN}, _USER and _PASS to run this test' . | |
19 | "\nWarning: This test drops and creates the tables " . | |
20 | "'artist', 'money_test' and 'bindtype_test'"; | |
21 | } else { | |
22 | plan tests => $TESTS*2 + 1; | |
23 | } | |
24 | ||
25 | my @storage_types = ( | |
26 | 'DBI::Sybase', | |
27 | 'DBI::Sybase::NoBindVars', | |
28 | ); | |
29 | my $schema; | |
30 | my $storage_idx = -1; | |
31 | ||
32 | sub get_schema { | |
33 | DBICTest::Schema->connect($dsn, $user, $pass, { | |
34 | on_connect_call => [ | |
35 | [ blob_setup => log_on_update => 1 ], # this is a safer option | |
36 | ], | |
37 | }); | |
38 | } | |
39 | ||
40 | my $ping_count = 0; | |
41 | { | |
42 | my $ping = DBIx::Class::Storage::DBI::Sybase->can('_ping'); | |
43 | *DBIx::Class::Storage::DBI::Sybase::_ping = sub { | |
44 | $ping_count++; | |
45 | goto $ping; | |
46 | }; | |
47 | } | |
48 | ||
49 | for my $storage_type (@storage_types) { | |
50 | $storage_idx++; | |
51 | ||
52 | unless ($storage_type eq 'DBI::Sybase') { # autodetect | |
53 | DBICTest::Schema->storage_type("::$storage_type"); | |
54 | } | |
55 | ||
56 | $schema = get_schema(); | |
57 | ||
58 | $schema->storage->ensure_connected; | |
59 | ||
60 | if ($storage_idx == 0 && | |
61 | $schema->storage->isa('DBIx::Class::Storage::DBI::Sybase::NoBindVars')) { | |
62 | # no placeholders in this version of Sybase or DBD::Sybase (or using FreeTDS) | |
63 | my $tb = Test::More->builder; | |
64 | $tb->skip('no placeholders') for 1..$TESTS; | |
65 | next; | |
66 | } | |
67 | ||
68 | isa_ok( $schema->storage, "DBIx::Class::Storage::$storage_type" ); | |
69 | ||
70 | $schema->storage->_dbh->disconnect; | |
71 | lives_ok (sub { $schema->storage->dbh }, 'reconnect works'); | |
72 | ||
73 | $schema->storage->dbh_do (sub { | |
74 | my ($storage, $dbh) = @_; | |
75 | eval { $dbh->do("DROP TABLE artist") }; | |
76 | $dbh->do(<<'SQL'); | |
33 | 77 | CREATE TABLE artist ( |
34 | artistid INT IDENTITY NOT NULL, | |
78 | artistid INT IDENTITY PRIMARY KEY, | |
35 | 79 | name VARCHAR(100), |
36 | 80 | rank INT DEFAULT 13 NOT NULL, |
37 | charfield CHAR(10) NULL, | |
38 | primary key(artistid) | |
81 | charfield CHAR(10) NULL | |
39 | 82 | ) |
40 | ||
41 | 83 | SQL |
42 | ||
43 | }); | |
44 | ||
45 | my %seen_id; | |
46 | ||
47 | # fresh $schema so we start unconnected | |
48 | $schema = DBICTest::Schema->connect($dsn, $user, $pass, {AutoCommit => 1}); | |
84 | }); | |
85 | ||
86 | my %seen_id; | |
87 | ||
88 | # so we start unconnected | |
89 | $schema->storage->disconnect; | |
49 | 90 | |
50 | 91 | # test primary key handling |
51 | my $new = $schema->resultset('Artist')->create({ name => 'foo' }); | |
52 | ok($new->artistid > 0, "Auto-PK worked"); | |
53 | ||
54 | $seen_id{$new->artistid}++; | |
55 | ||
56 | # test LIMIT support | |
57 | for (1..6) { | |
92 | my $new = $schema->resultset('Artist')->create({ name => 'foo' }); | |
93 | ok($new->artistid > 0, "Auto-PK worked"); | |
94 | ||
95 | $seen_id{$new->artistid}++; | |
96 | ||
97 | # check redispatch to storage-specific insert when auto-detected storage | |
98 | if ($storage_type eq 'DBI::Sybase') { | |
99 | DBICTest::Schema->storage_type('::DBI'); | |
100 | $schema = get_schema(); | |
101 | } | |
102 | ||
103 | $new = $schema->resultset('Artist')->create({ name => 'Artist 1' }); | |
104 | is ( $seen_id{$new->artistid}, undef, 'id for Artist 1 is unique' ); | |
105 | $seen_id{$new->artistid}++; | |
106 | ||
107 | # inserts happen in a txn, so we make sure it still works inside a txn too | |
108 | $schema->txn_begin; | |
109 | ||
110 | for (2..6) { | |
58 | 111 | $new = $schema->resultset('Artist')->create({ name => 'Artist ' . $_ }); |
59 | 112 | is ( $seen_id{$new->artistid}, undef, "id for Artist $_ is unique" ); |
60 | 113 | $seen_id{$new->artistid}++; |
61 | } | |
62 | ||
63 | my $it; | |
64 | ||
65 | $it = $schema->resultset('Artist')->search( {}, { | |
114 | } | |
115 | ||
116 | $schema->txn_commit; | |
117 | ||
118 | # test simple count | |
119 | is ($schema->resultset('Artist')->count, 7, 'count(*) of whole table ok'); | |
120 | ||
121 | # test LIMIT support | |
122 | my $it = $schema->resultset('Artist')->search({ | |
123 | artistid => { '>' => 0 } | |
124 | }, { | |
66 | 125 | rows => 3, |
67 | 126 | order_by => 'artistid', |
68 | }); | |
69 | ||
70 | TODO: { | |
71 | local $TODO = 'Sybase is very very fucked in the limit department'; | |
72 | ||
73 | is( $it->count, 3, "LIMIT count ok" ); | |
127 | }); | |
128 | ||
129 | is( $it->count, 3, "LIMIT count ok" ); | |
130 | ||
131 | is( $it->next->name, "foo", "iterator->next ok" ); | |
132 | $it->next; | |
133 | is( $it->next->name, "Artist 2", "iterator->next ok" ); | |
134 | is( $it->next, undef, "next past end of resultset ok" ); | |
135 | ||
136 | # now try with offset | |
137 | $it = $schema->resultset('Artist')->search({}, { | |
138 | rows => 3, | |
139 | offset => 3, | |
140 | order_by => 'artistid', | |
141 | }); | |
142 | ||
143 | is( $it->count, 3, "LIMIT with offset count ok" ); | |
144 | ||
145 | is( $it->next->name, "Artist 3", "iterator->next ok" ); | |
146 | $it->next; | |
147 | is( $it->next->name, "Artist 5", "iterator->next ok" ); | |
148 | is( $it->next, undef, "next past end of resultset ok" ); | |
149 | ||
150 | # now try a grouped count | |
151 | $schema->resultset('Artist')->create({ name => 'Artist 6' }) | |
152 | for (1..6); | |
153 | ||
154 | $it = $schema->resultset('Artist')->search({}, { | |
155 | group_by => 'name' | |
156 | }); | |
157 | ||
158 | is( $it->count, 7, 'COUNT of GROUP_BY ok' ); | |
159 | ||
160 | # do an IDENTITY_INSERT | |
161 | { | |
162 | no warnings 'redefine'; | |
163 | ||
164 | my @debug_out; | |
165 | local $schema->storage->{debug} = 1; | |
166 | local $schema->storage->debugobj->{callback} = sub { | |
167 | push @debug_out, $_[1]; | |
168 | }; | |
169 | ||
170 | my $txn_used = 0; | |
171 | my $txn_commit = \&DBIx::Class::Storage::DBI::txn_commit; | |
172 | local *DBIx::Class::Storage::DBI::txn_commit = sub { | |
173 | $txn_used = 1; | |
174 | goto &$txn_commit; | |
175 | }; | |
176 | ||
177 | $schema->resultset('Artist') | |
178 | ->create({ artistid => 999, name => 'mtfnpy' }); | |
179 | ||
180 | ok((grep /IDENTITY_INSERT/i, @debug_out), 'IDENTITY_INSERT used'); | |
181 | ||
182 | SKIP: { | |
183 | skip 'not testing lack of txn on IDENTITY_INSERT with NoBindVars', 1 | |
184 | if $storage_type =~ /NoBindVars/i; | |
185 | ||
186 | is $txn_used, 0, 'no txn on insert with IDENTITY_INSERT'; | |
187 | } | |
188 | } | |
189 | ||
190 | # do an IDENTITY_UPDATE | |
191 | { | |
192 | my @debug_out; | |
193 | local $schema->storage->{debug} = 1; | |
194 | local $schema->storage->debugobj->{callback} = sub { | |
195 | push @debug_out, $_[1]; | |
196 | }; | |
197 | ||
198 | lives_and { | |
199 | $schema->resultset('Artist') | |
200 | ->find(999)->update({ artistid => 555 }); | |
201 | ok((grep /IDENTITY_UPDATE/i, @debug_out)); | |
202 | } 'IDENTITY_UPDATE used'; | |
203 | $ping_count-- if $@; | |
204 | } | |
205 | ||
206 | my $bulk_rs = $schema->resultset('Artist')->search({ | |
207 | name => { -like => 'bulk artist %' } | |
208 | }); | |
209 | ||
210 | # test insert_bulk using populate. | |
211 | SKIP: { | |
212 | skip 'insert_bulk not supported', 4 | |
213 | unless $storage_type !~ /NoBindVars/i; | |
214 | ||
215 | lives_ok { | |
216 | $schema->resultset('Artist')->populate([ | |
217 | { | |
218 | name => 'bulk artist 1', | |
219 | charfield => 'foo', | |
220 | }, | |
221 | { | |
222 | name => 'bulk artist 2', | |
223 | charfield => 'foo', | |
224 | }, | |
225 | { | |
226 | name => 'bulk artist 3', | |
227 | charfield => 'foo', | |
228 | }, | |
229 | ]); | |
230 | } 'insert_bulk via populate'; | |
231 | ||
232 | is $bulk_rs->count, 3, 'correct number inserted via insert_bulk'; | |
233 | ||
234 | is ((grep $_->charfield eq 'foo', $bulk_rs->all), 3, | |
235 | 'column set correctly via insert_bulk'); | |
236 | ||
237 | my %bulk_ids; | |
238 | @bulk_ids{map $_->artistid, $bulk_rs->all} = (); | |
239 | ||
240 | is ((scalar keys %bulk_ids), 3, | |
241 | 'identities generated correctly in insert_bulk'); | |
242 | ||
243 | $bulk_rs->delete; | |
244 | } | |
245 | ||
246 | # make sure insert_bulk works a second time on the same connection | |
247 | SKIP: { | |
248 | skip 'insert_bulk not supported', 3 | |
249 | unless $storage_type !~ /NoBindVars/i; | |
250 | ||
251 | lives_ok { | |
252 | $schema->resultset('Artist')->populate([ | |
253 | { | |
254 | name => 'bulk artist 1', | |
255 | charfield => 'bar', | |
256 | }, | |
257 | { | |
258 | name => 'bulk artist 2', | |
259 | charfield => 'bar', | |
260 | }, | |
261 | { | |
262 | name => 'bulk artist 3', | |
263 | charfield => 'bar', | |
264 | }, | |
265 | ]); | |
266 | } 'insert_bulk via populate called a second time'; | |
267 | ||
268 | is $bulk_rs->count, 3, | |
269 | 'correct number inserted via insert_bulk'; | |
270 | ||
271 | is ((grep $_->charfield eq 'bar', $bulk_rs->all), 3, | |
272 | 'column set correctly via insert_bulk'); | |
273 | ||
274 | $bulk_rs->delete; | |
275 | } | |
276 | ||
277 | # test invalid insert_bulk (missing required column) | |
278 | # | |
279 | # There should be a rollback, reconnect and the next valid insert_bulk should | |
280 | # succeed. | |
281 | throws_ok { | |
282 | $schema->resultset('Artist')->populate([ | |
283 | { | |
284 | charfield => 'foo', | |
285 | } | |
286 | ]); | |
287 | } qr/no value or default|does not allow null|placeholders/i, | |
288 | # The second pattern is the error from fallback to regular array insert on | |
289 | # incompatible charset. | |
290 | # The third is for ::NoBindVars with no syb_has_blk. | |
291 | 'insert_bulk with missing required column throws error'; | |
292 | ||
293 | # now test insert_bulk with IDENTITY_INSERT | |
294 | SKIP: { | |
295 | skip 'insert_bulk not supported', 3 | |
296 | unless $storage_type !~ /NoBindVars/i; | |
297 | ||
298 | lives_ok { | |
299 | $schema->resultset('Artist')->populate([ | |
300 | { | |
301 | artistid => 2001, | |
302 | name => 'bulk artist 1', | |
303 | charfield => 'foo', | |
304 | }, | |
305 | { | |
306 | artistid => 2002, | |
307 | name => 'bulk artist 2', | |
308 | charfield => 'foo', | |
309 | }, | |
310 | { | |
311 | artistid => 2003, | |
312 | name => 'bulk artist 3', | |
313 | charfield => 'foo', | |
314 | }, | |
315 | ]); | |
316 | } 'insert_bulk with IDENTITY_INSERT via populate'; | |
317 | ||
318 | is $bulk_rs->count, 3, | |
319 | 'correct number inserted via insert_bulk with IDENTITY_INSERT'; | |
320 | ||
321 | is ((grep $_->charfield eq 'foo', $bulk_rs->all), 3, | |
322 | 'column set correctly via insert_bulk with IDENTITY_INSERT'); | |
323 | ||
324 | $bulk_rs->delete; | |
325 | } | |
326 | ||
327 | # test correlated subquery | |
328 | my $subq = $schema->resultset('Artist')->search({ artistid => { '>' => 3 } }) | |
329 | ->get_column('artistid') | |
330 | ->as_query; | |
331 | my $subq_rs = $schema->resultset('Artist')->search({ | |
332 | artistid => { -in => $subq } | |
333 | }); | |
334 | is $subq_rs->count, 11, 'correlated subquery'; | |
335 | ||
336 | # mostly stolen from the blob stuff Nniuq wrote for t/73oracle.t | |
337 | SKIP: { | |
338 | skip 'TEXT/IMAGE support does not work with FreeTDS', 22 | |
339 | if $schema->storage->using_freetds; | |
340 | ||
341 | my $dbh = $schema->storage->_dbh; | |
342 | { | |
343 | local $SIG{__WARN__} = sub {}; | |
344 | eval { $dbh->do('DROP TABLE bindtype_test') }; | |
345 | ||
346 | $dbh->do(qq[ | |
347 | CREATE TABLE bindtype_test | |
348 | ( | |
349 | id INT IDENTITY PRIMARY KEY, | |
350 | bytea IMAGE NULL, | |
351 | blob IMAGE NULL, | |
352 | clob TEXT NULL | |
353 | ) | |
354 | ],{ RaiseError => 1, PrintError => 0 }); | |
355 | } | |
356 | ||
357 | my %binstr = ( 'small' => join('', map { chr($_) } ( 1 .. 127 )) ); | |
358 | $binstr{'large'} = $binstr{'small'} x 1024; | |
359 | ||
360 | my $maxloblen = length $binstr{'large'}; | |
361 | ||
362 | if (not $schema->storage->using_freetds) { | |
363 | $dbh->{'LongReadLen'} = $maxloblen * 2; | |
364 | } else { | |
365 | $dbh->do("set textsize ".($maxloblen * 2)); | |
366 | } | |
367 | ||
368 | my $rs = $schema->resultset('BindType'); | |
369 | my $last_id; | |
370 | ||
371 | foreach my $type (qw(blob clob)) { | |
372 | foreach my $size (qw(small large)) { | |
373 | no warnings 'uninitialized'; | |
374 | ||
375 | my $created; | |
376 | lives_ok { | |
377 | $created = $rs->create( { $type => $binstr{$size} } ) | |
378 | } "inserted $size $type without dying"; | |
379 | ||
380 | $last_id = $created->id if $created; | |
381 | ||
382 | lives_and { | |
383 | ok($rs->find($last_id)->$type eq $binstr{$size}) | |
384 | } "verified inserted $size $type"; | |
385 | } | |
386 | } | |
387 | ||
388 | $rs->delete; | |
389 | ||
390 | # blob insert with explicit PK | |
391 | # also a good opportunity to test IDENTITY_INSERT | |
392 | lives_ok { | |
393 | $rs->create( { id => 1, blob => $binstr{large} } ) | |
394 | } 'inserted large blob without dying with manual PK'; | |
395 | ||
396 | lives_and { | |
397 | ok($rs->find(1)->blob eq $binstr{large}) | |
398 | } 'verified inserted large blob with manual PK'; | |
399 | ||
400 | # try a blob update | |
401 | my $new_str = $binstr{large} . 'mtfnpy'; | |
402 | ||
403 | # check redispatch to storage-specific update when auto-detected storage | |
404 | if ($storage_type eq 'DBI::Sybase') { | |
405 | DBICTest::Schema->storage_type('::DBI'); | |
406 | $schema = get_schema(); | |
407 | } | |
408 | ||
409 | lives_ok { | |
410 | $rs->search({ id => 1 })->update({ blob => $new_str }) | |
411 | } 'updated blob successfully'; | |
412 | ||
413 | lives_and { | |
414 | ok($rs->find(1)->blob eq $new_str) | |
415 | } 'verified updated blob'; | |
416 | ||
417 | # try a blob update with IDENTITY_UPDATE | |
418 | lives_and { | |
419 | $new_str = $binstr{large} . 'hlagh'; | |
420 | $rs->find(1)->update({ id => 999, blob => $new_str }); | |
421 | ok($rs->find(999)->blob eq $new_str); | |
422 | } 'verified updated blob with IDENTITY_UPDATE'; | |
423 | ||
424 | ## try multi-row blob update | |
425 | # first insert some blobs | |
426 | $new_str = $binstr{large} . 'foo'; | |
427 | lives_and { | |
428 | $rs->delete; | |
429 | $rs->create({ blob => $binstr{large} }) for (1..2); | |
430 | $rs->update({ blob => $new_str }); | |
431 | is((grep $_->blob eq $new_str, $rs->all), 2); | |
432 | } 'multi-row blob update'; | |
433 | ||
434 | $rs->delete; | |
435 | ||
436 | # now try insert_bulk with blobs and only blobs | |
437 | $new_str = $binstr{large} . 'bar'; | |
438 | lives_ok { | |
439 | $rs->populate([ | |
440 | { | |
441 | bytea => 1, | |
442 | blob => $binstr{large}, | |
443 | clob => $new_str, | |
444 | }, | |
445 | { | |
446 | bytea => 1, | |
447 | blob => $binstr{large}, | |
448 | clob => $new_str, | |
449 | }, | |
450 | ]); | |
451 | } 'insert_bulk with blobs does not die'; | |
452 | ||
453 | is((grep $_->blob eq $binstr{large}, $rs->all), 2, | |
454 | 'IMAGE column set correctly via insert_bulk'); | |
455 | ||
456 | is((grep $_->clob eq $new_str, $rs->all), 2, | |
457 | 'TEXT column set correctly via insert_bulk'); | |
458 | ||
459 | # now try insert_bulk with blobs and a non-blob which also happens to be an | |
460 | # identity column | |
461 | SKIP: { | |
462 | skip 'no insert_bulk without placeholders', 4 | |
463 | if $storage_type =~ /NoBindVars/i; | |
464 | ||
465 | $rs->delete; | |
466 | $new_str = $binstr{large} . 'bar'; | |
467 | lives_ok { | |
468 | $rs->populate([ | |
469 | { | |
470 | id => 1, | |
471 | bytea => 1, | |
472 | blob => $binstr{large}, | |
473 | clob => $new_str, | |
474 | }, | |
475 | { | |
476 | id => 2, | |
477 | bytea => 1, | |
478 | blob => $binstr{large}, | |
479 | clob => $new_str, | |
480 | }, | |
481 | ]); | |
482 | } 'insert_bulk with blobs and explicit identity does NOT die'; | |
483 | ||
484 | is((grep $_->blob eq $binstr{large}, $rs->all), 2, | |
485 | 'IMAGE column set correctly via insert_bulk with identity'); | |
486 | ||
487 | is((grep $_->clob eq $new_str, $rs->all), 2, | |
488 | 'TEXT column set correctly via insert_bulk with identity'); | |
489 | ||
490 | is_deeply [ map $_->id, $rs->all ], [ 1,2 ], | |
491 | 'explicit identities set correctly via insert_bulk with blobs'; | |
492 | } | |
493 | ||
494 | lives_and { | |
495 | $rs->delete; | |
496 | $rs->create({ blob => $binstr{large} }) for (1..2); | |
497 | $rs->update({ blob => undef }); | |
498 | is((grep !defined($_->blob), $rs->all), 2); | |
499 | } 'blob update to NULL'; | |
500 | } | |
501 | ||
502 | # test MONEY column support (and some other misc. stuff) | |
503 | $schema->storage->dbh_do (sub { | |
504 | my ($storage, $dbh) = @_; | |
505 | eval { $dbh->do("DROP TABLE money_test") }; | |
506 | $dbh->do(<<'SQL'); | |
507 | CREATE TABLE money_test ( | |
508 | id INT IDENTITY PRIMARY KEY, | |
509 | amount MONEY DEFAULT $999.99 NULL | |
510 | ) | |
511 | SQL | |
512 | }); | |
513 | ||
514 | my $rs = $schema->resultset('Money'); | |
515 | ||
516 | # test insert with defaults | |
517 | lives_and { | |
518 | $rs->create({}); | |
519 | is((grep $_->amount == 999.99, $rs->all), 1); | |
520 | } 'insert with all defaults works'; | |
521 | $rs->delete; | |
522 | ||
523 | # test insert transaction when there's an active cursor | |
524 | { | |
525 | my $artist_rs = $schema->resultset('Artist'); | |
526 | $artist_rs->first; | |
527 | lives_ok { | |
528 | my $row = $schema->resultset('Money')->create({ amount => 100 }); | |
529 | $row->delete; | |
530 | } 'inserted a row with an active cursor'; | |
531 | $ping_count-- if $@; # dbh_do calls ->connected | |
532 | } | |
533 | ||
534 | # test insert in an outer transaction when there's an active cursor | |
535 | TODO: { | |
536 | local $TODO = 'this should work once we have eager cursors'; | |
537 | ||
538 | # clear state, or we get a deadlock on $row->delete | |
539 | # XXX figure out why this happens | |
540 | $schema->storage->disconnect; | |
541 | ||
542 | lives_ok { | |
543 | $schema->txn_do(sub { | |
544 | my $artist_rs = $schema->resultset('Artist'); | |
545 | $artist_rs->first; | |
546 | my $row = $schema->resultset('Money')->create({ amount => 100 }); | |
547 | $row->delete; | |
548 | }); | |
549 | } 'inserted a row with an active cursor in outer txn'; | |
550 | $ping_count-- if $@; # dbh_do calls ->connected | |
551 | } | |
552 | ||
553 | # Now test money values. | |
554 | my $row; | |
555 | lives_ok { | |
556 | $row = $rs->create({ amount => 100 }); | |
557 | } 'inserted a money value'; | |
558 | ||
559 | is eval { $rs->find($row->id)->amount }, 100, 'money value round-trip'; | |
560 | ||
561 | lives_ok { | |
562 | $row->update({ amount => 200 }); | |
563 | } 'updated a money value'; | |
564 | ||
565 | is eval { $rs->find($row->id)->amount }, | |
566 | 200, 'updated money value round-trip'; | |
567 | ||
568 | lives_ok { | |
569 | $row->update({ amount => undef }); | |
570 | } 'updated a money value to NULL'; | |
571 | ||
572 | my $null_amount = eval { $rs->find($row->id)->amount }; | |
573 | ok( | |
574 | (($null_amount == undef) && (not $@)), | |
575 | 'updated money value to NULL round-trip' | |
576 | ); | |
577 | diag $@ if $@; | |
74 | 578 | } |
75 | 579 | |
76 | # The iterator still works correctly with rows => 3, even though the sql is | |
77 | # fucked, very interesting. | |
78 | ||
79 | is( $it->next->name, "foo", "iterator->next ok" ); | |
80 | $it->next; | |
81 | is( $it->next->name, "Artist 2", "iterator->next ok" ); | |
82 | is( $it->next, undef, "next past end of resultset ok" ); | |
83 | ||
580 | is $ping_count, 0, 'no pings'; | |
84 | 581 | |
85 | 582 | # clean up our mess |
86 | 583 | END { |
87 | my $dbh = eval { $schema->storage->_dbh }; | |
88 | $dbh->do('DROP TABLE artist') if $dbh; | |
584 | if (my $dbh = eval { $schema->storage->_dbh }) { | |
585 | eval { $dbh->do("DROP TABLE $_") } | |
586 | for qw/artist bindtype_test money_test/; | |
587 | } | |
89 | 588 | } |
90 |
17 | 17 | plan skip_all => 'Set $ENV{DBICTEST_MSSQL_DSN}, _USER and _PASS to run this test' |
18 | 18 | unless ($dsn); |
19 | 19 | |
20 | my $TESTS = 13; | |
20 | my $TESTS = 18; | |
21 | 21 | |
22 | 22 | plan tests => $TESTS * 2; |
23 | 23 | |
132 | 132 | |
133 | 133 | is $rs->find($row->id)->amount, |
134 | 134 | undef, 'updated money value to NULL round-trip'; |
135 | ||
136 | $rs->create({ amount => 300 }) for (1..3); | |
137 | ||
138 | # test multiple active statements | |
139 | lives_ok { | |
140 | my $artist_rs = $schema->resultset('Artist'); | |
141 | while (my $row = $rs->next) { | |
142 | my $artist = $artist_rs->next; | |
143 | } | |
144 | $rs->reset; | |
145 | } 'multiple active statements'; | |
146 | ||
147 | $rs->delete; | |
148 | ||
149 | # test simple transaction with commit | |
150 | lives_ok { | |
151 | $schema->txn_do(sub { | |
152 | $rs->create({ amount => 400 }); | |
153 | }); | |
154 | } 'simple transaction'; | |
155 | ||
156 | cmp_ok $rs->first->amount, '==', 400, 'committed'; | |
157 | $rs->reset; | |
158 | ||
159 | $rs->delete; | |
160 | ||
161 | # test rollback | |
162 | throws_ok { | |
163 | $schema->txn_do(sub { | |
164 | $rs->create({ amount => 400 }); | |
165 | die 'mtfnpy'; | |
166 | }); | |
167 | } qr/mtfnpy/, 'simple failed txn'; | |
168 | ||
169 | is $rs->first, undef, 'rolled back'; | |
170 | $rs->reset; | |
135 | 171 | } |
136 | 172 | |
137 | 173 | # clean up our mess |
51 | 51 | my $cd_rs = $schema->resultset('CD')->search({ 'artist.name' => 'Caterwauler McCrae' }, { join => 'artist' }); |
52 | 52 | |
53 | 53 | my $cd = $cd_rs->find_or_new({ title => 'Huh?', year => 2006 }); |
54 | ok(! $cd->in_storage, 'new CD not in storage yet'); | |
54 | is($cd->in_storage, 0, 'new CD not in storage yet'); | |
55 | 55 | is($cd->title, 'Huh?', 'new CD title is correct'); |
56 | 56 | is($cd->year, 2006, 'new CD year is correct'); |
57 | 57 | } |
194 | 194 | { key => 'cd_artist_title' } |
195 | 195 | ); |
196 | 196 | |
197 | ok(!$cd1->in_storage, 'CD is not in storage yet after update_or_new'); | |
197 | is($cd1->in_storage, 0, 'CD is not in storage yet after update_or_new'); | |
198 | 198 | $cd1->insert; |
199 | 199 | ok($cd1->in_storage, 'CD got added to strage after update_or_new && insert'); |
200 | 200 |
4 | 4 | use Test::More; |
5 | 5 | use File::Spec; |
6 | 6 | use File::Copy; |
7 | use Time::HiRes qw/time sleep/; | |
8 | 7 | |
9 | 8 | #warn "$dsn $user $pass"; |
10 | 9 | my ($dsn, $user, $pass); |
14 | 13 | |
15 | 14 | plan skip_all => 'Set $ENV{DBICTEST_MYSQL_DSN}, _USER and _PASS to run this test' |
16 | 15 | unless ($dsn); |
16 | ||
17 | eval { require Time::HiRes } | |
18 | || plan skip_all => 'Test needs Time::HiRes'; | |
19 | Time::HiRes->import(qw/time sleep/); | |
17 | 20 | |
18 | 21 | require DBIx::Class::Storage::DBI; |
19 | 22 | plan skip_all => |
0 | use strict; | |
1 | use warnings; | |
2 | ||
3 | use Test::More; | |
4 | use Test::Exception; | |
5 | use lib qw(t/lib); | |
6 | use DBICTest; | |
7 | ||
8 | my ($dsn, $user, $pass) = @ENV{map { "DBICTEST_SYBASE_${_}" } qw/DSN USER PASS/}; | |
9 | ||
10 | if (not ($dsn && $user)) { | |
11 | plan skip_all => | |
12 | 'Set $ENV{DBICTEST_SYBASE_DSN}, _USER and _PASS to run this test' . | |
13 | "\nWarning: This test drops and creates a table called 'track'"; | |
14 | } else { | |
15 | eval "use DateTime; use DateTime::Format::Sybase;"; | |
16 | if ($@) { | |
17 | plan skip_all => 'needs DateTime and DateTime::Format::Sybase for testing'; | |
18 | } | |
19 | else { | |
20 | plan tests => (4 * 2 * 2) + 2; # (tests * dt_types * storage_types) + storage_tests | |
21 | } | |
22 | } | |
23 | ||
24 | my @storage_types = ( | |
25 | 'DBI::Sybase', | |
26 | 'DBI::Sybase::NoBindVars', | |
27 | ); | |
28 | my $schema; | |
29 | ||
30 | for my $storage_type (@storage_types) { | |
31 | $schema = DBICTest::Schema->clone; | |
32 | ||
33 | unless ($storage_type eq 'DBI::Sybase') { # autodetect | |
34 | $schema->storage_type("::$storage_type"); | |
35 | } | |
36 | $schema->connection($dsn, $user, $pass, { | |
37 | AutoCommit => 1, | |
38 | on_connect_call => [ 'datetime_setup' ], | |
39 | }); | |
40 | ||
41 | $schema->storage->ensure_connected; | |
42 | ||
43 | isa_ok( $schema->storage, "DBIx::Class::Storage::$storage_type" ); | |
44 | ||
45 | # coltype, col, date | |
46 | my @dt_types = ( | |
47 | ['DATETIME', 'last_updated_at', '2004-08-21T14:36:48.080Z'], | |
48 | # minute precision | |
49 | ['SMALLDATETIME', 'small_dt', '2004-08-21T14:36:00.000Z'], | |
50 | ); | |
51 | ||
52 | for my $dt_type (@dt_types) { | |
53 | my ($type, $col, $sample_dt) = @$dt_type; | |
54 | ||
55 | eval { $schema->storage->dbh->do("DROP TABLE track") }; | |
56 | $schema->storage->dbh->do(<<"SQL"); | |
57 | CREATE TABLE track ( | |
58 | trackid INT IDENTITY PRIMARY KEY, | |
59 | cd INT, | |
60 | position INT, | |
61 | $col $type, | |
62 | ) | |
63 | SQL | |
64 | ok(my $dt = DateTime::Format::Sybase->parse_datetime($sample_dt)); | |
65 | ||
66 | my $row; | |
67 | ok( $row = $schema->resultset('Track')->create({ | |
68 | $col => $dt, | |
69 | cd => 1, | |
70 | })); | |
71 | ok( $row = $schema->resultset('Track') | |
72 | ->search({ trackid => $row->trackid }, { select => [$col] }) | |
73 | ->first | |
74 | ); | |
75 | is( $row->$col, $dt, 'DateTime roundtrip' ); | |
76 | } | |
77 | } | |
78 | ||
79 | # clean up our mess | |
80 | END { | |
81 | if (my $dbh = eval { $schema->storage->_dbh }) { | |
82 | $dbh->do('DROP TABLE track'); | |
83 | } | |
84 | } |
0 | 0 | use strict; |
1 | use warnings; | |
1 | use warnings; | |
2 | 2 | |
3 | use Test::More qw(no_plan); | |
3 | use Test::More; | |
4 | 4 | use lib qw(t/lib); |
5 | 5 | use DBICTest; |
6 | 6 | my $schema = DBICTest->init_schema(); |
8 | 8 | # Under some versions of SQLite if the $rs is left hanging around it will lock |
9 | 9 | # So we create a scope here cos I'm lazy |
10 | 10 | { |
11 | my $rs = $schema->resultset('CD'); | |
11 | my $rs = $schema->resultset('CD')->search ({}, { order_by => 'cdid' }); | |
12 | 12 | |
13 | 13 | # get the defined columns |
14 | 14 | my @dbic_cols = sort $rs->result_source->columns; |
22 | 22 | my @hashref_cols = sort keys %$datahashref1; |
23 | 23 | |
24 | 24 | is_deeply( \@dbic_cols, \@hashref_cols, 'returned columns' ); |
25 | ||
26 | my $cd1 = $rs->find ({cdid => 1}); | |
27 | is_deeply ( $cd1, $datahashref1, 'first/find return the same thing'); | |
25 | 28 | } |
26 | ||
27 | 29 | |
28 | 30 | sub check_cols_of { |
29 | 31 | my ($dbic_obj, $datahashref) = @_; |
134 | 136 | [{ $artist->get_columns, cds => [] }], |
135 | 137 | 'nested has_many prefetch without entries' |
136 | 138 | ); |
139 | ||
140 | done_testing; |
0 | -- | |
1 | -- Created by SQL::Translator::Producer::SQLite | |
2 | -- Created on Mon Sep 21 00:11:34 2009 | |
0 | -- Created on Tue Aug 25 12:34:34 2009 | |
3 | 1 | -- |
4 | 2 | |
5 | 3 |
4 | 4 | use Test::Exception; |
5 | 5 | use lib qw(t/lib); |
6 | 6 | use DBICTest; |
7 | use DBIC::SqlMakerTest; | |
7 | 8 | |
8 | 9 | my $schema = DBICTest->init_schema(); |
9 | 10 | my $sdebug = $schema->storage->debug; |
10 | ||
11 | plan tests => 79; | |
12 | 11 | |
13 | 12 | # has_a test |
14 | 13 | my $cd = $schema->resultset("CD")->find(4); |
133 | 132 | year => 2007, |
134 | 133 | } ); |
135 | 134 | is( $cd->title, 'Greatest Hits 2: Louder Than Ever', 'find_or_new_related new record ok' ); |
136 | ok( ! $cd->in_storage, 'find_or_new_related on a new record: not in_storage' ); | |
135 | is( $cd->in_storage, 0, 'find_or_new_related on a new record: not in_storage' ); | |
137 | 136 | |
138 | 137 | $cd->artist(undef); |
139 | 138 | my $newartist = $cd->find_or_new_related( 'artist', { |
259 | 258 | is($def_artist_cd->search_related('artist')->count, 0, 'closed search on null FK'); |
260 | 259 | |
261 | 260 | # test undirected many-to-many relationship (e.g. "related artists") |
262 | my $undir_maps = $schema->resultset("Artist")->find(1)->artist_undirected_maps; | |
261 | my $undir_maps = $schema->resultset("Artist") | |
262 | ->search ({artistid => 1}) | |
263 | ->search_related ('artist_undirected_maps'); | |
263 | 264 | is($undir_maps->count, 1, 'found 1 undirected map for artist 1'); |
265 | is_same_sql_bind ( | |
266 | $undir_maps->as_query, | |
267 | '( | |
268 | SELECT artist_undirected_maps.id1, artist_undirected_maps.id2 | |
269 | FROM artist me | |
270 | LEFT JOIN artist_undirected_map artist_undirected_maps | |
271 | ON artist_undirected_maps.id1 = me.artistid OR artist_undirected_maps.id2 = me.artistid | |
272 | WHERE ( artistid = ? ) | |
273 | )', | |
274 | [[artistid => 1]], | |
275 | 'expected join sql produced', | |
276 | ); | |
264 | 277 | |
265 | 278 | $undir_maps = $schema->resultset("Artist")->find(2)->artist_undirected_maps; |
266 | 279 | is($undir_maps->count, 1, 'found 1 undirected map for artist 2'); |
309 | 322 | |
310 | 323 | $cds = $schema->resultset("CD")->search({ 'me.cdid' => 5 }, { join => { single_track => { cd => {} } } }); |
311 | 324 | is($cds->count, 1, "subjoins under left joins force_left (hashref)"); |
325 | ||
326 | done_testing; |
75 | 75 | $schema->storage->debugcb(undef); |
76 | 76 | $schema->storage->debug ($sdebug); |
77 | 77 | |
78 | my ($search_sql) = $sql[0] =~ /^(SELECT .+?)\:/; | |
78 | 79 | is_same_sql ( |
79 | $sql[0], | |
80 | $search_sql, | |
80 | 81 | 'SELECT me.cdid, me.artist, me.title, me.year, me.genreid, me.single_track |
81 | 82 | FROM cd me |
82 | 83 | WHERE ( me.artist = ? AND me.title = ? AND me.genreid = ? ) |
0 | use strict; | |
1 | use warnings; | |
2 | ||
3 | use lib qw(t/lib); | |
4 | use Test::More; | |
5 | use Test::Exception; | |
6 | use DBICTest; | |
7 | ||
8 | my $schema = DBICTest->init_schema(); | |
9 | ||
10 | my $tkfks = $schema->resultset('Artist'); | |
11 | ||
12 | ok !$tkfks->is_paged, 'vanilla resultset is not paginated'; | |
13 | ||
14 | my $paginated = $tkfks->search(undef, { page => 5 }); | |
15 | ok $paginated->is_paged, 'resultset is paginated now'; | |
16 | ||
17 | done_testing; | |
18 |
78 | 78 | ); |
79 | 79 | |
80 | 80 | # grouping on PKs only should pass |
81 | $sub_rs->search ({}, { group_by => [ reverse $sub_rs->result_source->primary_columns ] }) # reverse to make sure the comaprison works | |
82 | ->update ({ pilot_sequence => \ 'pilot_sequence + 1' }); | |
81 | $sub_rs->search ( | |
82 | {}, | |
83 | { | |
84 | group_by => [ reverse $sub_rs->result_source->primary_columns ], # reverse to make sure the PK-list comaprison works | |
85 | }, | |
86 | )->update ({ pilot_sequence => \ 'pilot_sequence + 1' }); | |
83 | 87 | |
84 | 88 | is_deeply ( |
85 | 89 | [ $tkfks->search ({ autopilot => [qw/a b x y/]}, { order_by => 'autopilot' }) |
89 | 93 | 'Only two rows incremented', |
90 | 94 | ); |
91 | 95 | |
96 | # also make sure weird scalarref usage works (RT#51409) | |
97 | $tkfks->search ( | |
98 | \ 'pilot_sequence BETWEEN 11 AND 21', | |
99 | )->update ({ pilot_sequence => \ 'pilot_sequence + 1' }); | |
100 | ||
101 | is_deeply ( | |
102 | [ $tkfks->search ({ autopilot => [qw/a b x y/]}, { order_by => 'autopilot' }) | |
103 | ->get_column ('pilot_sequence')->all | |
104 | ], | |
105 | [qw/12 22 30 40/], | |
106 | 'Only two rows incremented (where => scalarref works)', | |
107 | ); | |
108 | ||
92 | 109 | $sub_rs->delete; |
93 | 110 | |
94 | 111 | is ($tkfks->count, $tkfk_cnt -= 2, 'Only two rows deleted'); |
2 | 2 | no warnings qw/once redefine/; |
3 | 3 | |
4 | 4 | use lib qw(t/lib); |
5 | use DBI; | |
5 | 6 | use DBICTest; |
7 | use DBICTest::Schema; | |
8 | use DBIx::Class::Storage::DBI; | |
6 | 9 | |
7 | use Test::More tests => 9; | |
10 | # !!! do not replace this with done_testing - tests reside in the callbacks | |
11 | # !!! number of calls is important | |
12 | use Test::More tests => 16; | |
13 | # !!! | |
8 | 14 | |
9 | use DBIx::Class::Storage::DBI; | |
10 | my $schema = DBICTest->init_schema( | |
11 | no_connect => 1, | |
12 | no_deploy => 1, | |
13 | ); | |
15 | my $schema = DBICTest::Schema->clone; | |
14 | 16 | |
15 | local *DBIx::Class::Storage::DBI::connect_call_foo = sub { | |
16 | isa_ok $_[0], 'DBIx::Class::Storage::DBI', | |
17 | 'got storage in connect_call method'; | |
18 | is $_[1], 'bar', 'got param in connect_call method'; | |
19 | }; | |
17 | { | |
18 | *DBIx::Class::Storage::DBI::connect_call_foo = sub { | |
19 | isa_ok $_[0], 'DBIx::Class::Storage::DBI', | |
20 | 'got storage in connect_call method'; | |
21 | is $_[1], 'bar', 'got param in connect_call method'; | |
22 | }; | |
20 | 23 | |
21 | local *DBIx::Class::Storage::DBI::disconnect_call_foo = sub { | |
22 | isa_ok $_[0], 'DBIx::Class::Storage::DBI', | |
23 | 'got storage in disconnect_call method'; | |
24 | }; | |
24 | *DBIx::Class::Storage::DBI::disconnect_call_foo = sub { | |
25 | isa_ok $_[0], 'DBIx::Class::Storage::DBI', | |
26 | 'got storage in disconnect_call method'; | |
27 | }; | |
25 | 28 | |
26 | ok $schema->connection( | |
27 | DBICTest->_database, | |
28 | { | |
29 | on_connect_call => [ | |
30 | [ do_sql => 'create table test1 (id integer)' ], | |
31 | [ do_sql => [ 'insert into test1 values (?)', {}, 1 ] ], | |
32 | [ do_sql => sub { ['insert into test1 values (2)'] } ], | |
33 | [ sub { $_[0]->dbh->do($_[1]) }, 'insert into test1 values (3)' ], | |
34 | # this invokes $storage->connect_call_foo('bar') (above) | |
35 | [ foo => 'bar' ], | |
36 | ], | |
37 | on_connect_do => 'insert into test1 values (4)', | |
38 | on_disconnect_call => 'foo', | |
39 | }, | |
40 | ), 'connection()'; | |
29 | ok $schema->connection( | |
30 | DBICTest->_database, | |
31 | { | |
32 | on_connect_call => [ | |
33 | [ do_sql => 'create table test1 (id integer)' ], | |
34 | [ do_sql => [ 'insert into test1 values (?)', {}, 1 ] ], | |
35 | [ do_sql => sub { ['insert into test1 values (2)'] } ], | |
36 | [ sub { $_[0]->dbh->do($_[1]) }, 'insert into test1 values (3)' ], | |
37 | # this invokes $storage->connect_call_foo('bar') (above) | |
38 | [ foo => 'bar' ], | |
39 | ], | |
40 | on_connect_do => 'insert into test1 values (4)', | |
41 | on_disconnect_call => 'foo', | |
42 | }, | |
43 | ), 'connection()'; | |
41 | 44 | |
42 | is_deeply ( | |
43 | $schema->storage->dbh->selectall_arrayref('select * from test1'), | |
44 | [ [ 1 ], [ 2 ], [ 3 ], [ 4 ] ], | |
45 | 'on_connect_call/do actions worked' | |
46 | ); | |
45 | ok (! $schema->storage->connected, 'start disconnected'); | |
47 | 46 | |
48 | local *DBIx::Class::Storage::DBI::connect_call_foo = sub { | |
49 | isa_ok $_[0], 'DBIx::Class::Storage::DBI', | |
50 | 'got storage in connect_call method'; | |
51 | }; | |
47 | is_deeply ( | |
48 | $schema->storage->dbh->selectall_arrayref('select * from test1'), | |
49 | [ [ 1 ], [ 2 ], [ 3 ], [ 4 ] ], | |
50 | 'on_connect_call/do actions worked' | |
51 | ); | |
52 | 52 | |
53 | local *DBIx::Class::Storage::DBI::connect_call_bar = sub { | |
54 | isa_ok $_[0], 'DBIx::Class::Storage::DBI', | |
55 | 'got storage in connect_call method'; | |
56 | }; | |
53 | $schema->storage->disconnect; | |
54 | } | |
57 | 55 | |
58 | $schema->storage->disconnect; | |
56 | { | |
57 | *DBIx::Class::Storage::DBI::connect_call_foo = sub { | |
58 | isa_ok $_[0], 'DBIx::Class::Storage::DBI', | |
59 | 'got storage in connect_call method'; | |
60 | }; | |
59 | 61 | |
60 | ok $schema->connection( | |
61 | DBICTest->_database, | |
62 | { | |
63 | # method list form | |
64 | on_connect_call => [ 'foo', sub { ok 1, "coderef in list form" }, 'bar' ], | |
65 | }, | |
66 | ), 'connection()'; | |
62 | *DBIx::Class::Storage::DBI::connect_call_bar = sub { | |
63 | isa_ok $_[0], 'DBIx::Class::Storage::DBI', | |
64 | 'got storage in connect_call method'; | |
65 | }; | |
67 | 66 | |
68 | $schema->storage->ensure_connected; | |
67 | ||
68 | ok $schema->connection( | |
69 | DBICTest->_database, | |
70 | { | |
71 | # method list form | |
72 | on_connect_call => [ 'foo', sub { ok 1, "coderef in list form" }, 'bar' ], | |
73 | }, | |
74 | ), 'connection()'; | |
75 | ||
76 | ok (! $schema->storage->connected, 'start disconnected'); | |
77 | $schema->storage->ensure_connected; | |
78 | $schema->storage->disconnect; # this should not fire any tests | |
79 | } | |
80 | ||
81 | { | |
82 | ok $schema->connection( | |
83 | sub { DBI->connect(DBICTest->_database) }, | |
84 | { | |
85 | # method list form | |
86 | on_connect_call => [ sub { ok 1, "on_connect_call after DT parser" }, ], | |
87 | on_disconnect_call => [ sub { ok 1, "on_disconnect_call after DT parser" }, ], | |
88 | }, | |
89 | ), 'connection()'; | |
90 | ||
91 | ok (! $schema->storage->connected, 'start disconnected'); | |
92 | ||
93 | $schema->storage->_determine_driver; # this should connect due to the coderef | |
94 | ||
95 | ok ($schema->storage->connected, 'determine driver connects'); | |
96 | $schema->storage->disconnect; | |
97 | } |
4 | 4 | |
5 | 5 | use lib qw(t/lib); |
6 | 6 | use base 'DBICTest'; |
7 | require DBI; | |
7 | 8 | |
8 | 9 | |
9 | 10 | my $schema = DBICTest->init_schema( |
27 | 28 | $schema->storage->disconnect; |
28 | 29 | |
29 | 30 | ok $schema->connection( |
30 | DBICTest->_database, | |
31 | sub { DBI->connect(DBICTest->_database) }, | |
31 | 32 | { |
32 | 33 | on_connect_do => [ |
33 | 34 | 'CREATE TABLE TEST_empty (id INTEGER)', |
65 | 65 | |
66 | 66 | sub init_schema { |
67 | 67 | # current SQLT SQLite producer does not handle DROP TABLE IF EXISTS, trap warnings here |
68 | local $SIG{__WARN__} = sub { warn @_ unless $_[0] =~ /no such table.+DROP TABLE/ }; | |
68 | local $SIG{__WARN__} = sub { warn @_ unless $_[0] =~ /no such table.+DROP TABLE/s }; | |
69 | 69 | |
70 | 70 | my ($class, $schema_method) = @_; |
71 | 71 |