Merge tag '0.5.3' into debian/unstable
Thomas Goirand
10 years ago
0 | 0 | *.pyc |
1 | build/ | |
1 | /build | |
2 | 2 | dist/ |
3 | 3 | docs/build/output/ |
4 | 4 | *.orig |
5 | 5 | alembic.ini |
6 | 6 | tox.ini |
7 | 7 | .venv |
8 | .egg-info | |
8 | *.egg | |
9 | *.egg-info | |
9 | 10 | .coverage |
0 | Copyright (c) 2011-2013 Mike Bayer | |
0 | Copyright (c) 2011-2014 Mike Bayer | |
1 | 1 | |
2 | 2 | All rights reserved. |
3 | 3 |
13 | 13 | ============= |
14 | 14 | |
15 | 15 | See the section :ref:`creating_backends` for details on how to |
16 | register new backends or :ref:`changing_backend_behavior` for details on | |
17 | how to alter the behavior of existing backends. | |
16 | register new backends or :ref:`changing_backend_behavior` for details on | |
17 | how to alter the behavior of existing backends. | |
18 | 18 | |
19 | 19 | .. automodule:: dogpile.cache.api |
20 | 20 | :members: |
21 | ||
21 | ||
22 | 22 | |
23 | 23 | Backends |
24 | 24 | ========== |
34 | 34 | |
35 | 35 | .. automodule:: dogpile.cache.backends.file |
36 | 36 | :members: |
37 | ||
37 | ||
38 | 38 | .. automodule:: dogpile.cache.proxy |
39 | 39 | :members: |
40 | ||
41 | ||
40 | ||
41 | ||
42 | 42 | Plugins |
43 | 43 | ======== |
44 | 44 |
0 | 0 | ============== |
1 | 1 | Changelog |
2 | 2 | ============== |
3 | .. changelog:: | |
4 | :version: 0.5.3 | |
5 | :released: Wed Jan 8 2014 | |
6 | ||
7 | .. change:: | |
8 | :tags: bug | |
9 | :pullreq: 10 | |
10 | ||
11 | Fixed bug where the key_mangler would get in the way of usage of the | |
12 | async_creation_runner feature within the :meth:`.Region.get_or_create` | |
13 | method, by sending in the mangled key instead of the original key. The | |
14 | "mangled" key is only supposed to be exposed within the backend storage, | |
15 | not the creation function which sends the key back into the :meth:`.Region.set`, | |
16 | which does the mangling itself. Pull request courtesy Ryan Kolak. | |
17 | ||
18 | .. change:: | |
19 | :tags: bug, py3k | |
20 | ||
21 | Fixed bug where the :meth:`.Region.get_multi` method wasn't calling | |
22 | the backend correctly in Py3K (e.g. was passing a destructive ``map()`` | |
23 | object) which would cause this method to fail on the memcached backend. | |
24 | ||
25 | .. change:: | |
26 | :tags: feature | |
27 | :tickets: 55 | |
28 | ||
29 | Added a ``get()`` method to complement the ``set()``, ``invalidate()`` | |
30 | and ``refresh()`` methods established on functions decorated by | |
31 | :meth:`.CacheRegion.cache_on_arguments` and | |
32 | :meth:`.CacheRegion.cache_multi_on_arguments`. Pullreq courtesy | |
33 | Eric Hanchrow. | |
34 | ||
35 | .. change:: | |
36 | :tags: feature | |
37 | :tickets: 51 | |
38 | :pullreq: 11 | |
39 | ||
40 | Added a new variant on :class:`.MemoryBackend`, :class:`.MemoryPickleBackend`. | |
41 | This backend applies ``pickle.dumps()`` and ``pickle.loads()`` to cached | |
42 | values upon set and get, so that similar copy-on-cache behavior as that | |
43 | of other backends is employed, guarding cached values against subsequent | |
44 | in-memory state changes. Pullreq courtesy Jonathan Vanasco. | |
45 | ||
46 | .. change:: | |
47 | :tags: bug | |
48 | :pullreq: 9 | |
49 | ||
50 | Fixed a format call in the redis backend which would otherwise fail | |
51 | on Python 2.6; courtesy Jeff Dairiki. | |
52 | ||
53 | .. changelog:: | |
54 | :version: 0.5.2 | |
55 | :released: Fri Nov 15 2013 | |
56 | ||
57 | .. change:: | |
58 | :tags: bug | |
59 | ||
60 | Fixes to routines on Windows, including that default unit tests pass, | |
61 | and an adjustment to the "soft expiration" feature to ensure the | |
62 | expiration works given windows time.time() behavior. | |
63 | ||
64 | .. change:: | |
65 | :tags: bug | |
66 | ||
67 | Added py2.6 compatibility for unsupported ``total_seconds()`` call | |
68 | in region.py | |
69 | ||
70 | .. change:: | |
71 | :tags: feature | |
72 | :tickets: 44 | |
73 | ||
74 | Added a new argument ``lock_factory`` to the :class:`.DBMBackend` | |
75 | implementation. This allows for drop-in replacement of the default | |
76 | :class:`.FileLock` backend, which builds on ``os.flock()`` and only | |
77 | supports Unix platforms. A new abstract base :class:`.AbstractFileLock` | |
78 | has been added to provide a common base for custom lock implementations. | |
79 | The documentation points to an example thread-based rw lock which is | |
80 | now tested on Windows. | |
81 | ||
82 | .. changelog:: | |
83 | :version: 0.5.1 | |
84 | :released: Thu Oct 10 2013 | |
85 | ||
86 | .. change:: | |
87 | :tags: feature | |
88 | :tickets: 38 | |
89 | ||
90 | The :meth:`.CacheRegion.invalidate` method now supports an option | |
91 | ``hard=True|False``. A "hard" invalidation, equivalent to the | |
92 | existing functionality of :meth:`.CacheRegion.invalidate`, means | |
93 | :meth:`.CacheRegion.get_or_create` will not return the "old" value at | |
94 | all, forcing all getters to regenerate or wait for a regeneration. | |
95 | "soft" invalidation means that getters can continue to return the | |
96 | old value until a new one is generated. | |
97 | ||
98 | .. change:: | |
99 | :tags: feature | |
100 | :tickets: 40 | |
101 | ||
102 | New dogpile-specific exception classes have been added, so that | |
103 | issues like "region already configured", "region unconfigured", | |
104 | raise dogpile-specific exceptions. Other exception classes have | |
105 | been made more specific. Also added new accessor | |
106 | :attr:`.CacheRegion.is_configured`. Pullreq courtesy Morgan Fainberg. | |
107 | ||
108 | .. change:: | |
109 | :tags: bug | |
110 | ||
111 | Erroneously missed when the same change was made for ``set()`` | |
112 | in 0.5.0, the Redis backend now uses ``pickle.HIGHEST_PROTOCOL`` | |
113 | for the ``set_multi()`` method as well when producing pickles. | |
114 | Courtesy Ćukasz Fidosz. | |
115 | ||
116 | .. change:: | |
117 | :tags: bug, redis, py3k | |
118 | :tickets: 39 | |
119 | ||
120 | Fixed an errant ``u''`` causing incompatibility in Python3.2 | |
121 | in the Redis backend, courtesy Jimmey Mabey. | |
122 | ||
123 | .. change:: | |
124 | :tags: bug | |
125 | ||
126 | The :func:`.util.coerce_string_conf` method now correctly coerces | |
127 | negative integers and those with a leading + sign. This previously | |
128 | prevented configuring a :class:`.CacheRegion` with an ``expiration_time`` | |
129 | of ``'-1'``. Courtesy David Beitey. | |
130 | ||
131 | .. change:: | |
132 | :tags: bug | |
133 | ||
134 | The ``refresh()`` method on :meth:`.CacheRegion.cache_multi_on_arguments` | |
135 | now supports the ``asdict`` flag. | |
136 | ||
3 | 137 | .. changelog:: |
4 | 138 | :version: 0.5.0 |
5 | 139 | :released: Fri Jun 21 2013 |
50 | 50 | |
51 | 51 | # General information about the project. |
52 | 52 | project = u'dogpile.cache' |
53 | copyright = u'2011-2013 Mike Bayer' | |
53 | copyright = u'2011-2014 Mike Bayer' | |
54 | 54 | |
55 | 55 | # The version info for the project you're documenting, acts as replacement for |
56 | 56 | # |version| and |release|, also used in various other places throughout the |
172 | 172 | |
173 | 173 | from dogpile.cache import make_region |
174 | 174 | |
175 | region = make_region("dictionary") | |
175 | region = make_region("myregion") | |
176 | ||
177 | region.configure("dictionary") | |
176 | 178 | |
177 | 179 | data = region.set("somekey", "somevalue") |
178 | 180 |
0 | __version__ = '0.5.0' | |
0 | __version__ = '0.5.3' | |
1 | 1 | |
2 | 2 | from .region import CacheRegion, register_backend, make_region |
4 | 4 | register_backend("dogpile.cache.bmemcached", "dogpile.cache.backends.memcached", "BMemcachedBackend") |
5 | 5 | register_backend("dogpile.cache.memcached", "dogpile.cache.backends.memcached", "MemcachedBackend") |
6 | 6 | register_backend("dogpile.cache.memory", "dogpile.cache.backends.memory", "MemoryBackend") |
7 | register_backend("dogpile.cache.memory_pickle", "dogpile.cache.backends.memory", "MemoryPickleBackend") | |
7 | 8 | register_backend("dogpile.cache.redis", "dogpile.cache.backends.redis", "RedisBackend") |
11 | 11 | from dogpile.cache import compat |
12 | 12 | from dogpile.cache import util |
13 | 13 | import os |
14 | import fcntl | |
15 | ||
16 | __all__ = 'DBMBackend', 'FileLock' | |
14 | ||
15 | __all__ = 'DBMBackend', 'FileLock', 'AbstractFileLock' | |
17 | 16 | |
18 | 17 | class DBMBackend(CacheBackend): |
19 | 18 | """A file-backend using a dbm file to store keys. |
47 | 46 | concurrent writes, the other is to coordinate |
48 | 47 | value creation (i.e. the dogpile lock). By default, |
49 | 48 | these lockfiles use the ``flock()`` system call |
50 | for locking; this is only available on Unix | |
51 | platforms. | |
49 | for locking; this is **only available on Unix | |
50 | platforms**. An alternative lock implementation, such as one | |
51 | which is based on threads or uses a third-party system | |
52 | such as `portalocker <https://pypi.python.org/pypi/portalocker>`_, | |
53 | can be dropped in using the ``lock_factory`` argument | |
54 | in conjunction with the :class:`.AbstractFileLock` base class. | |
52 | 55 | |
53 | 56 | Currently, the dogpile lock is against the entire |
54 | 57 | DBM file, not per key. This means there can |
79 | 82 | suffix ".dogpile.lock" to the DBM filename. If |
80 | 83 | False, then dogpile.cache uses the default dogpile |
81 | 84 | lock, a plain thread-based mutex. |
85 | :param lock_factory: a function or class which provides | |
86 | for a read/write lock. Defaults to :class:`.FileLock`. | |
87 | Custom implementations need to implement context-manager | |
88 | based ``read()`` and ``write()`` functions - the | |
89 | :class:`.AbstractFileLock` class is provided as a base class | |
90 | which provides these methods based on individual read/write lock | |
91 | functions. E.g. to replace the lock with the dogpile.core | |
92 | :class:`.ReadWriteMutex`:: | |
93 | ||
94 | from dogpile.core.readwrite_lock import ReadWriteMutex | |
95 | from dogpile.cache.backends.file import AbstractFileLock | |
96 | ||
97 | class MutexLock(AbstractFileLock): | |
98 | def __init__(self, filename): | |
99 | self.mutex = ReadWriteMutex() | |
100 | ||
101 | def acquire_read_lock(self, wait): | |
102 | ret = self.mutex.acquire_read_lock(wait) | |
103 | return wait or ret | |
104 | ||
105 | def acquire_write_lock(self, wait): | |
106 | ret = self.mutex.acquire_write_lock(wait) | |
107 | return wait or ret | |
108 | ||
109 | def release_read_lock(self): | |
110 | return self.mutex.release_read_lock() | |
111 | ||
112 | def release_write_lock(self): | |
113 | return self.mutex.release_write_lock() | |
114 | ||
115 | from dogpile.cache import make_region | |
116 | ||
117 | region = make_region().configure( | |
118 | "dogpile.cache.dbm", | |
119 | expiration_time=300, | |
120 | arguments={ | |
121 | "filename": "file.dbm", | |
122 | "lock_factory": MutexLock | |
123 | } | |
124 | ) | |
125 | ||
126 | While the included :class:`.FileLock` uses ``os.flock()``, a | |
127 | windows-compatible implementation can be built using a library | |
128 | such as `portalocker <https://pypi.python.org/pypi/portalocker>`_. | |
129 | ||
130 | .. versionadded:: 0.5.2 | |
131 | ||
82 | 132 | |
83 | 133 | |
84 | 134 | """ |
88 | 138 | ) |
89 | 139 | dir_, filename = os.path.split(self.filename) |
90 | 140 | |
141 | self.lock_factory = arguments.get("lock_factory", FileLock) | |
91 | 142 | self._rw_lock = self._init_lock( |
92 | 143 | arguments.get('rw_lockfile'), |
93 | 144 | ".rw.lock", dir_, filename) |
107 | 158 | |
108 | 159 | def _init_lock(self, argument, suffix, basedir, basefile, wrapper=None): |
109 | 160 | if argument is None: |
110 | lock = FileLock(os.path.join(basedir, basefile + suffix)) | |
161 | lock = self.lock_factory(os.path.join(basedir, basefile + suffix)) | |
111 | 162 | elif argument is not False: |
112 | lock = FileLock( | |
163 | lock = self.lock_factory( | |
113 | 164 | os.path.abspath( |
114 | 165 | os.path.normpath(argument) |
115 | 166 | )) |
203 | 254 | except KeyError: |
204 | 255 | pass |
205 | 256 | |
206 | class FileLock(object): | |
207 | """Use lockfiles to coordinate read/write access to a file. | |
208 | ||
209 | Only works on Unix systems, using | |
210 | `fcntl.flock() <http://docs.python.org/library/fcntl.html>`_. | |
257 | class AbstractFileLock(object): | |
258 | """Coordinate read/write access to a file. | |
259 | ||
260 | typically is a file-based lock but doesn't necessarily have to be. | |
261 | ||
262 | The default implementation here is :class:`.FileLock`. | |
263 | ||
264 | Implementations should provide the following methods:: | |
265 | ||
266 | * __init__() | |
267 | * acquire_read_lock() | |
268 | * acquire_write_lock() | |
269 | * release_read_lock() | |
270 | * release_write_lock() | |
271 | ||
272 | The ``__init__()`` method accepts a single argument "filename", which | |
273 | may be used as the "lock file", for those implementations that use a lock | |
274 | file. | |
275 | ||
276 | Note that multithreaded environments must provide a thread-safe | |
277 | version of this lock. The recommended approach for file-descriptor-based | |
278 | locks is to use a Python ``threading.local()`` so that a unique file descriptor | |
279 | is held per thread. See the source code of :class:`.FileLock` for an | |
280 | implementation example. | |
281 | ||
211 | 282 | |
212 | 283 | """ |
213 | 284 | |
214 | 285 | def __init__(self, filename): |
215 | self._filedescriptor = compat.threading.local() | |
216 | self.filename = filename | |
286 | """Constructor, is given the filename of a potential lockfile. | |
287 | ||
288 | The usage of this filename is optional and no file is | |
289 | created by default. | |
290 | ||
291 | Raises ``NotImplementedError`` by default, must be | |
292 | implemented by subclasses. | |
293 | """ | |
294 | raise NotImplementedError() | |
217 | 295 | |
218 | 296 | def acquire(self, wait=True): |
297 | """Acquire the "write" lock. | |
298 | ||
299 | This is a direct call to :meth:`.AbstractFileLock.acquire_write_lock`. | |
300 | ||
301 | """ | |
219 | 302 | return self.acquire_write_lock(wait) |
220 | 303 | |
221 | 304 | def release(self): |
305 | """Release the "write" lock. | |
306 | ||
307 | This is a direct call to :meth:`.AbstractFileLock.release_write_lock`. | |
308 | ||
309 | """ | |
222 | 310 | self.release_write_lock() |
223 | ||
224 | @property | |
225 | def is_open(self): | |
226 | return hasattr(self._filedescriptor, 'fileno') | |
227 | 311 | |
228 | 312 | @contextmanager |
229 | 313 | def read(self): |
314 | """Provide a context manager for the "read" lock. | |
315 | ||
316 | This method makes use of :meth:`.AbstractFileLock.acquire_read_lock` | |
317 | and :meth:`.AbstractFileLock.release_read_lock` | |
318 | ||
319 | """ | |
320 | ||
230 | 321 | self.acquire_read_lock(True) |
231 | 322 | try: |
232 | 323 | yield |
235 | 326 | |
236 | 327 | @contextmanager |
237 | 328 | def write(self): |
329 | """Provide a context manager for the "write" lock. | |
330 | ||
331 | This method makes use of :meth:`.AbstractFileLock.acquire_write_lock` | |
332 | and :meth:`.AbstractFileLock.release_write_lock` | |
333 | ||
334 | """ | |
335 | ||
238 | 336 | self.acquire_write_lock(True) |
239 | 337 | try: |
240 | 338 | yield |
241 | 339 | finally: |
242 | 340 | self.release_write_lock() |
243 | 341 | |
342 | @property | |
343 | def is_open(self): | |
344 | """optional method.""" | |
345 | raise NotImplementedError() | |
346 | ||
244 | 347 | def acquire_read_lock(self, wait): |
245 | return self._acquire(wait, os.O_RDONLY, fcntl.LOCK_SH) | |
348 | """Acquire a 'reader' lock. | |
349 | ||
350 | Raises ``NotImplementedError`` by default, must be | |
351 | implemented by subclasses. | |
352 | """ | |
353 | raise NotImplementedError() | |
246 | 354 | |
247 | 355 | def acquire_write_lock(self, wait): |
248 | return self._acquire(wait, os.O_WRONLY, fcntl.LOCK_EX) | |
356 | """Acquire a 'write' lock. | |
357 | ||
358 | Raises ``NotImplementedError`` by default, must be | |
359 | implemented by subclasses. | |
360 | """ | |
361 | raise NotImplementedError() | |
362 | ||
363 | def release_read_lock(self): | |
364 | """Release a 'reader' lock. | |
365 | ||
366 | Raises ``NotImplementedError`` by default, must be | |
367 | implemented by subclasses. | |
368 | """ | |
369 | raise NotImplementedError() | |
370 | ||
371 | def release_write_lock(self): | |
372 | """Release a 'writer' lock. | |
373 | ||
374 | Raises ``NotImplementedError`` by default, must be | |
375 | implemented by subclasses. | |
376 | """ | |
377 | raise NotImplementedError() | |
378 | ||
379 | class FileLock(AbstractFileLock): | |
380 | """Use lockfiles to coordinate read/write access to a file. | |
381 | ||
382 | Only works on Unix systems, using | |
383 | `fcntl.flock() <http://docs.python.org/library/fcntl.html>`_. | |
384 | ||
385 | """ | |
386 | ||
387 | def __init__(self, filename): | |
388 | self._filedescriptor = compat.threading.local() | |
389 | self.filename = filename | |
390 | ||
391 | @util.memoized_property | |
392 | def _module(self): | |
393 | import fcntl | |
394 | return fcntl | |
395 | ||
396 | @property | |
397 | def is_open(self): | |
398 | return hasattr(self._filedescriptor, 'fileno') | |
399 | ||
400 | def acquire_read_lock(self, wait): | |
401 | return self._acquire(wait, os.O_RDONLY, self._module.LOCK_SH) | |
402 | ||
403 | def acquire_write_lock(self, wait): | |
404 | return self._acquire(wait, os.O_WRONLY, self._module.LOCK_EX) | |
249 | 405 | |
250 | 406 | def release_read_lock(self): |
251 | 407 | self._release() |
258 | 414 | fileno = os.open(self.filename, wrflag) |
259 | 415 | try: |
260 | 416 | if not wait: |
261 | lockflag |= fcntl.LOCK_NB | |
262 | fcntl.flock(fileno, lockflag) | |
417 | lockflag |= self._module.LOCK_NB | |
418 | self._module.flock(fileno, lockflag) | |
263 | 419 | except IOError: |
264 | 420 | os.close(fileno) |
265 | 421 | if not wait: |
279 | 435 | except AttributeError: |
280 | 436 | return |
281 | 437 | else: |
282 | fcntl.flock(fileno, fcntl.LOCK_UN) | |
438 | self._module.flock(fileno, self._module.LOCK_UN) | |
283 | 439 | os.close(fileno) |
284 | 440 | del self._filedescriptor.fileno |
217 | 217 | ``pylibmc.Client``. |
218 | 218 | :param behaviors: a dictionary which will be passed to |
219 | 219 | ``pylibmc.Client`` as the ``behaviors`` parameter. |
220 | :param min_compres_len: Integer, will be passed as the | |
220 | :param min_compress_len: Integer, will be passed as the | |
221 | 221 | ``min_compress_len`` parameter to the ``pylibmc.Client.set`` |
222 | 222 | method. |
223 | 223 |
0 | 0 | """ |
1 | Memory Backend | |
2 | -------------- | |
1 | Memory Backends | |
2 | --------------- | |
3 | 3 | |
4 | Provides a simple dictionary-based backend. | |
4 | Provides simple dictionary-based backends. | |
5 | ||
6 | The two backends are :class:`.MemoryBackend` and :class:`.MemoryPickleBackend`; | |
7 | the latter applies a serialization step to cached values while the former | |
8 | places the value as given into the dictionary. | |
5 | 9 | |
6 | 10 | """ |
7 | 11 | |
8 | 12 | from dogpile.cache.api import CacheBackend, NO_VALUE |
13 | from dogpile.cache.compat import pickle | |
9 | 14 | |
10 | 15 | class MemoryBackend(CacheBackend): |
11 | 16 | """A backend that uses a plain dictionary. |
40 | 45 | |
41 | 46 | |
42 | 47 | """ |
48 | pickle_values = False | |
49 | ||
43 | 50 | def __init__(self, arguments): |
44 | 51 | self._cache = arguments.pop("cache_dict", {}) |
45 | 52 | |
46 | 53 | def get(self, key): |
47 | return self._cache.get(key, NO_VALUE) | |
54 | value = self._cache.get(key, NO_VALUE) | |
55 | if value is not NO_VALUE and self.pickle_values: | |
56 | value = pickle.loads(value) | |
57 | return value | |
48 | 58 | |
49 | 59 | def get_multi(self, keys): |
50 | return [ | |
51 | self._cache.get(key, NO_VALUE) | |
52 | for key in keys | |
53 | ] | |
60 | ret = [self._cache.get(key, NO_VALUE) | |
61 | for key in keys] | |
62 | if self.pickle_values: | |
63 | ret = [ | |
64 | pickle.loads(value) | |
65 | if value is not NO_VALUE else value | |
66 | for value in ret | |
67 | ] | |
68 | return ret | |
54 | 69 | |
55 | 70 | def set(self, key, value): |
71 | if self.pickle_values: | |
72 | value = pickle.dumps(value, pickle.HIGHEST_PROTOCOL) | |
56 | 73 | self._cache[key] = value |
57 | 74 | |
58 | 75 | def set_multi(self, mapping): |
59 | for key,value in mapping.items(): | |
76 | pickle_values = self.pickle_values | |
77 | for key, value in mapping.items(): | |
78 | if pickle_values: | |
79 | value = pickle.dumps(value, pickle.HIGHEST_PROTOCOL) | |
60 | 80 | self._cache[key] = value |
61 | 81 | |
62 | 82 | def delete(self, key): |
65 | 85 | def delete_multi(self, keys): |
66 | 86 | for key in keys: |
67 | 87 | self._cache.pop(key, None) |
88 | ||
89 | ||
90 | class MemoryPickleBackend(MemoryBackend): | |
91 | """A backend that uses a plain dictionary, but serializes objects on | |
92 | :meth:`.MemoryBackend.set` and deserializes :meth:`.MemoryBackend.get`. | |
93 | ||
94 | E.g.:: | |
95 | ||
96 | from dogpile.cache import make_region | |
97 | ||
98 | region = make_region().configure( | |
99 | 'dogpile.cache.memory_pickle' | |
100 | ) | |
101 | ||
102 | The usage of pickle to serialize cached values allows an object | |
103 | as placed in the cache to be a copy of the original given object, so | |
104 | that any subsequent changes to the given object aren't reflected | |
105 | in the cached value, thus making the backend behave the same way | |
106 | as other backends which make use of serialization. | |
107 | ||
108 | The serialization is performed via pickle, and incurs the same | |
109 | performance hit in doing so as that of other backends; in this way | |
110 | the :class:`.MemoryPickleBackend` performance is somewhere in between | |
111 | that of the pure :class:`.MemoryBackend` and the remote server oriented | |
112 | backends such as that of Memcached or Redis. | |
113 | ||
114 | Pickle behavior here is the same as that of the Redis backend, using | |
115 | either ``cPickle`` or ``pickle`` and specifying ``HIGHEST_PROTOCOL`` | |
116 | upon serialize. | |
117 | ||
118 | .. versionadded:: 0.5.3 | |
119 | ||
120 | """ | |
121 | pickle_values = True |
7 | 7 | |
8 | 8 | from __future__ import absolute_import |
9 | 9 | from dogpile.cache.api import CacheBackend, NO_VALUE |
10 | from dogpile.cache.compat import pickle | |
10 | from dogpile.cache.compat import pickle, u | |
11 | 11 | |
12 | 12 | redis = None |
13 | 13 | |
104 | 104 | |
105 | 105 | def get_mutex(self, key): |
106 | 106 | if self.distributed_lock: |
107 | return self.client.lock(u"_lock{}".format(key), self.lock_timeout, | |
108 | self.lock_sleep) | |
107 | return self.client.lock(u('_lock{0}').format(key), | |
108 | self.lock_timeout, self.lock_sleep) | |
109 | 109 | else: |
110 | 110 | return None |
111 | 111 | |
128 | 128 | self.client.set(key, pickle.dumps(value, pickle.HIGHEST_PROTOCOL)) |
129 | 129 | |
130 | 130 | def set_multi(self, mapping): |
131 | mapping = dict((k, pickle.dumps(v)) for k, v in mapping.items()) | |
131 | mapping = dict( | |
132 | (k, pickle.dumps(v, pickle.HIGHEST_PROTOCOL)) | |
133 | for k, v in mapping.items() | |
134 | ) | |
132 | 135 | |
133 | 136 | if not self.redis_expiration_time: |
134 | 137 | self.client.mset(mapping) |
3 | 3 | py2k = sys.version_info < (3, 0) |
4 | 4 | py3k = sys.version_info >= (3, 0) |
5 | 5 | py32 = sys.version_info >= (3, 2) |
6 | py27 = sys.version_info >= (2, 7) | |
6 | 7 | jython = sys.platform.startswith('java') |
7 | ||
8 | win32 = sys.platform.startswith('win') | |
8 | 9 | |
9 | 10 | try: |
10 | 11 | import threading |
54 | 55 | if py3k or jython: |
55 | 56 | import pickle |
56 | 57 | else: |
57 | import cPickle as pickleâ | |
58 | import cPickle as pickle | |
59 | ||
60 | ||
61 | def timedelta_total_seconds(td): | |
62 | if py27: | |
63 | return td.total_seconds() | |
64 | else: | |
65 | return (td.microseconds + (td.seconds + td.days * 24 * 3600) * 1e6) / 1e6 | |
66 | ||
67 |
0 | """Exception classes for dogpile.cache.""" | |
1 | ||
2 | ||
3 | class DogpileCacheException(Exception): | |
4 | """Base Exception for dogpile.cache exceptions to inherit from.""" | |
5 | ||
6 | ||
7 | class RegionAlreadyConfigured(DogpileCacheException): | |
8 | """CacheRegion instance is already configured.""" | |
9 | ||
10 | ||
11 | class RegionNotConfigured(DogpileCacheException): | |
12 | """CacheRegion instance has not been configured.""" | |
13 | ||
14 | ||
15 | class ValidationError(DogpileCacheException): | |
16 | """Error validating a value or option.""" |
0 | 0 | from __future__ import with_statement |
1 | 1 | from dogpile.core import Lock, NeedRegenerationException |
2 | 2 | from dogpile.core.nameregistry import NameRegistry |
3 | from . import exception | |
3 | 4 | from .util import function_key_generator, PluginLoader, \ |
4 | 5 | memoized_property, coerce_string_conf, function_multi_key_generator |
5 | 6 | from .api import NO_VALUE, CachedValue |
169 | 170 | self.key_mangler = key_mangler |
170 | 171 | else: |
171 | 172 | self.key_mangler = None |
172 | self._invalidated = None | |
173 | self._hard_invalidated = None | |
174 | self._soft_invalidated = None | |
173 | 175 | self.async_creation_runner = async_creation_runner |
174 | 176 | |
175 | 177 | def configure(self, backend, |
220 | 222 | """ |
221 | 223 | |
222 | 224 | if "backend" in self.__dict__: |
223 | raise Exception( | |
225 | raise exception.RegionAlreadyConfigured( | |
224 | 226 | "This region is already " |
225 | 227 | "configured with backend: %s" |
226 | 228 | % self.backend) |
236 | 238 | if not expiration_time or isinstance(expiration_time, Number): |
237 | 239 | self.expiration_time = expiration_time |
238 | 240 | elif isinstance(expiration_time, datetime.timedelta): |
239 | self.expiration_time = int(expiration_time.total_seconds()) | |
241 | self.expiration_time = int(compat.timedelta_total_seconds(expiration_time)) | |
240 | 242 | else: |
241 | raise Exception('expiration_time is not a number or timedelta.') | |
243 | raise exception.ValidationError( | |
244 | 'expiration_time is not a number or timedelta.') | |
242 | 245 | |
243 | 246 | if self.key_mangler is None: |
244 | 247 | self.key_mangler = self.backend.key_mangler |
261 | 264 | proxy = proxy() |
262 | 265 | |
263 | 266 | if not issubclass(type(proxy), ProxyBackend): |
264 | raise Exception("Type %s is not a valid ProxyBackend" | |
265 | % type(proxy)) | |
267 | raise TypeError("Type %s is not a valid ProxyBackend" | |
268 | % type(proxy)) | |
266 | 269 | |
267 | 270 | self.backend = proxy.wrap(self.backend) |
268 | 271 | |
288 | 291 | else: |
289 | 292 | return self._LockWrapper() |
290 | 293 | |
291 | def invalidate(self): | |
294 | def invalidate(self, hard=True): | |
292 | 295 | """Invalidate this :class:`.CacheRegion`. |
293 | 296 | |
294 | 297 | Invalidation works by setting a current timestamp |
301 | 304 | local to this instance of :class:`.CacheRegion`. |
302 | 305 | |
303 | 306 | Once set, the invalidation time is honored by |
304 | the :meth:`.CacheRegion.get_or_create` and | |
307 | the :meth:`.CacheRegion.get_or_create`, | |
308 | :meth:`.CacheRegion.get_or_create_multi` and | |
305 | 309 | :meth:`.CacheRegion.get` methods. |
306 | 310 | |
311 | The method | |
312 | supports both "hard" and "soft" invalidation options. With "hard" | |
313 | invalidation, :meth:`.CacheRegion.get_or_create` will force an immediate | |
314 | regeneration of the value which all getters will wait for. With | |
315 | "soft" invalidation, subsequent getters will return the "old" value until | |
316 | the new one is available. | |
317 | ||
318 | Usage of "soft" invalidation requires that the region or the method | |
319 | is given a non-None expiration time. | |
320 | ||
307 | 321 | .. versionadded:: 0.3.0 |
308 | 322 | |
323 | :param hard: if True, cache values will all require immediate | |
324 | regeneration; dogpile logic won't be used. If False, the | |
325 | creation time of existing values will be pushed back before | |
326 | the expiration time so that a return+regen will be invoked. | |
327 | ||
328 | .. versionadded:: 0.5.1 | |
329 | ||
309 | 330 | """ |
310 | self._invalidated = time.time() | |
331 | if hard: | |
332 | self._hard_invalidated = time.time() | |
333 | self._soft_invalidated = None | |
334 | else: | |
335 | self._hard_invalidated = None | |
336 | self._soft_invalidated = time.time() | |
311 | 337 | |
312 | 338 | def configure_from_config(self, config_dict, prefix): |
313 | 339 | """Configure from a configuration dictionary |
345 | 371 | |
346 | 372 | @memoized_property |
347 | 373 | def backend(self): |
348 | raise Exception("No backend is configured on this region.") | |
374 | raise exception.RegionNotConfigured( | |
375 | "No backend is configured on this region.") | |
376 | ||
377 | @property | |
378 | def is_configured(self): | |
379 | """Return True if the backend has been configured via the | |
380 | :meth:`.CacheRegion.configure` method already. | |
381 | ||
382 | .. versionadded:: 0.5.1 | |
383 | ||
384 | """ | |
385 | return 'backend' in self.__dict__ | |
349 | 386 | |
350 | 387 | def get(self, key, expiration_time=None, ignore_expiration=False): |
351 | 388 | """Return a value from the cache, based on the given key. |
418 | 455 | |
419 | 456 | current_time = time.time() |
420 | 457 | |
458 | invalidated = self._hard_invalidated or self._soft_invalidated | |
421 | 459 | def value_fn(value): |
422 | 460 | if value is NO_VALUE: |
423 | 461 | return value |
424 | 462 | elif expiration_time is not None and \ |
425 | 463 | current_time - value.metadata["ct"] > expiration_time: |
426 | 464 | return NO_VALUE |
427 | elif self._invalidated and \ | |
428 | value.metadata["ct"] < self._invalidated: | |
465 | elif invalidated and \ | |
466 | value.metadata["ct"] < invalidated: | |
429 | 467 | return NO_VALUE |
430 | 468 | else: |
431 | 469 | return value |
465 | 503 | |
466 | 504 | """ |
467 | 505 | if self.key_mangler: |
468 | keys = map(lambda key: self.key_mangler(key), keys) | |
506 | keys = list(map(lambda key: self.key_mangler(key), keys)) | |
469 | 507 | |
470 | 508 | backend_values = self.backend.get_multi(keys) |
471 | 509 | |
548 | 586 | :meth:`.CacheRegion.get_or_create_multi` - multiple key/value version |
549 | 587 | |
550 | 588 | """ |
589 | orig_key = key | |
551 | 590 | if self.key_mangler: |
552 | 591 | key = self.key_mangler(key) |
553 | 592 | |
555 | 594 | value = self.backend.get(key) |
556 | 595 | if value is NO_VALUE or \ |
557 | 596 | value.metadata['v'] != value_version or \ |
558 | (self._invalidated and | |
559 | value.metadata["ct"] < self._invalidated): | |
597 | (self._hard_invalidated and | |
598 | value.metadata["ct"] < self._hard_invalidated): | |
560 | 599 | raise NeedRegenerationException() |
561 | return value.payload, value.metadata["ct"] | |
600 | ct = value.metadata["ct"] | |
601 | if self._soft_invalidated: | |
602 | if ct < self._soft_invalidated: | |
603 | ct = time.time() - expiration_time - .0001 | |
604 | ||
605 | return value.payload, ct | |
562 | 606 | |
563 | 607 | def gen_value(): |
564 | 608 | created_value = creator() |
573 | 617 | if expiration_time is None: |
574 | 618 | expiration_time = self.expiration_time |
575 | 619 | |
620 | if expiration_time is None and self._soft_invalidated: | |
621 | raise exception.DogpileCacheException( | |
622 | "Non-None expiration time required " | |
623 | "for soft invalidation") | |
624 | ||
576 | 625 | if self.async_creation_runner: |
577 | 626 | def async_creator(mutex): |
578 | return self.async_creation_runner(self, key, creator, mutex) | |
627 | return self.async_creation_runner(self, orig_key, creator, mutex) | |
579 | 628 | else: |
580 | 629 | async_creator = None |
581 | 630 | |
629 | 678 | |
630 | 679 | def get_value(key): |
631 | 680 | value = values.get(key, NO_VALUE) |
681 | ||
632 | 682 | if value is NO_VALUE or \ |
633 | 683 | value.metadata['v'] != value_version or \ |
634 | (self._invalidated and | |
635 | value.metadata["ct"] < self._invalidated): | |
684 | (self._hard_invalidated and | |
685 | value.metadata["ct"] < self._hard_invalidated): | |
686 | # dogpile.core understands a 0 here as | |
687 | # "the value is not available", e.g. | |
688 | # _has_value() will return False. | |
636 | 689 | return value.payload, 0 |
637 | 690 | else: |
638 | return value.payload, value.metadata["ct"] | |
691 | ct = value.metadata["ct"] | |
692 | if self._soft_invalidated: | |
693 | if ct < self._soft_invalidated: | |
694 | ct = time.time() - expiration_time - .0001 | |
695 | ||
696 | return value.payload, ct | |
639 | 697 | |
640 | 698 | def gen_value(): |
641 | 699 | raise NotImplementedError() |
645 | 703 | |
646 | 704 | if expiration_time is None: |
647 | 705 | expiration_time = self.expiration_time |
706 | ||
707 | if expiration_time is None and self._soft_invalidated: | |
708 | raise exception.DogpileCacheException( | |
709 | "Non-None expiration time required " | |
710 | "for soft invalidation") | |
648 | 711 | |
649 | 712 | mutexes = {} |
650 | 713 | |
750 | 813 | """ |
751 | 814 | |
752 | 815 | if self.key_mangler: |
753 | keys = map(lambda key: self.key_mangler(key), keys) | |
816 | keys = list(map(lambda key: self.key_mangler(key), keys)) | |
754 | 817 | |
755 | 818 | self.backend.delete_multi(keys) |
756 | 819 | |
810 | 873 | newvalue = generate_something.refresh(5, 6) |
811 | 874 | |
812 | 875 | .. versionadded:: 0.5.0 Added ``refresh()`` method to decorated |
876 | function. | |
877 | ||
878 | Lastly, the ``get()`` method returns either the value cached | |
879 | for the given key, or the token ``NO_VALUE`` if no such key | |
880 | exists:: | |
881 | ||
882 | value = generate_something.get(5, 6) | |
883 | ||
884 | .. versionadded:: 0.5.3 Added ``get()`` method to decorated | |
813 | 885 | function. |
814 | 886 | |
815 | 887 | The default key generation will use the name |
942 | 1014 | key = key_generator(*arg, **kw) |
943 | 1015 | self.set(key, value) |
944 | 1016 | |
1017 | def get(*arg, **kw): | |
1018 | key = key_generator(*arg, **kw) | |
1019 | return self.get(key) | |
1020 | ||
945 | 1021 | def refresh(*arg, **kw): |
946 | 1022 | key = key_generator(*arg, **kw) |
947 | 1023 | value = fn(*arg, **kw) |
951 | 1027 | decorate.set = set_ |
952 | 1028 | decorate.invalidate = invalidate |
953 | 1029 | decorate.refresh = refresh |
1030 | decorate.get = get | |
954 | 1031 | |
955 | 1032 | return decorate |
956 | 1033 | return decorator |
1006 | 1083 | generate_something.set({"k1": "value1", |
1007 | 1084 | "k2": "value2", "k3": "value3"}) |
1008 | 1085 | |
1009 | an ``invalidate()`` method, which has the effect of deleting | |
1086 | ...an ``invalidate()`` method, which has the effect of deleting | |
1010 | 1087 | the given sequence of keys using the same mechanism as that of |
1011 | 1088 | :meth:`.CacheRegion.delete_multi`:: |
1012 | 1089 | |
1013 | 1090 | generate_something.invalidate("k1", "k2", "k3") |
1014 | 1091 | |
1015 | and finally a ``refresh()`` method, which will call the creation | |
1092 | ...a ``refresh()`` method, which will call the creation | |
1016 | 1093 | function, cache the new values, and return them:: |
1017 | 1094 | |
1018 | 1095 | values = generate_something.refresh("k1", "k2", "k3") |
1096 | ||
1097 | ...and a ``get()`` method, which will return values | |
1098 | based on the given arguments:: | |
1099 | ||
1100 | values = generate_something.get("k1", "k2", "k3") | |
1101 | ||
1102 | .. versionadded:: 0.5.3 Added ``get()`` method to decorated | |
1103 | function. | |
1019 | 1104 | |
1020 | 1105 | Parameters passed to :meth:`.CacheRegion.cache_multi_on_arguments` |
1021 | 1106 | have the same meaning as those passed to |
1110 | 1195 | in zip(gen_keys, keys)) |
1111 | 1196 | ) |
1112 | 1197 | |
1198 | def get(*arg): | |
1199 | keys = key_generator(*arg) | |
1200 | return self.get_multi(keys) | |
1201 | ||
1113 | 1202 | def refresh(*arg): |
1114 | 1203 | keys = key_generator(*arg) |
1115 | 1204 | values = fn(*arg) |
1116 | self.set_multi( | |
1117 | dict(zip(keys, values)) | |
1118 | ) | |
1119 | return values | |
1205 | if asdict: | |
1206 | self.set_multi( | |
1207 | dict(zip(keys, [values[a] for a in arg])) | |
1208 | ) | |
1209 | return values | |
1210 | else: | |
1211 | self.set_multi( | |
1212 | dict(zip(keys, values)) | |
1213 | ) | |
1214 | return values | |
1120 | 1215 | |
1121 | 1216 | decorate.set = set_ |
1122 | 1217 | decorate.invalidate = invalidate |
1123 | 1218 | decorate.refresh = refresh |
1219 | decorate.get = get | |
1124 | 1220 | |
1125 | 1221 | return decorate |
1126 | 1222 | return decorator |
1136 | 1232 | |
1137 | 1233 | """ |
1138 | 1234 | return CacheRegion(*arg, **kw) |
1139 |
0 | 0 | from hashlib import sha1 |
1 | 1 | import inspect |
2 | import sys | |
3 | 2 | import re |
4 | 3 | import collections |
5 | 4 | from . import compat |
13 | 12 | continue |
14 | 13 | |
15 | 14 | v = v.strip() |
16 | if re.match(r'^\d+$', v): | |
15 | if re.match(r'^[-+]?\d+$', v): | |
17 | 16 | result[k] = int(v) |
18 | 17 | elif v.lower() in ('false', 'true'): |
19 | 18 | result[k] = v.lower() == 'true' |
30 | 29 | |
31 | 30 | def load(self, name): |
32 | 31 | if name in self.impls: |
33 | return self.impls[name]() | |
34 | else: #pragma NO COVERAGE | |
35 | # TODO: if someone has ideas on how to | |
36 | # unit test entrypoint stuff, let me know. | |
32 | return self.impls[name]() | |
33 | else: # pragma NO COVERAGE | |
37 | 34 | import pkg_resources |
38 | 35 | for impl in pkg_resources.iter_entry_points( |
39 | 36 | self.group, |
1 | 1 | |
2 | 2 | [upload_docs] |
3 | 3 | upload-dir = docs/build/output/html |
4 | ||
5 | [wheel] | |
6 | universal = 1 | |
4 | 7 | |
5 | 8 | [upload] |
6 | 9 | sign = 1 |
11 | 14 | with-coverage = 1 |
12 | 15 | cover-erase = 1 |
13 | 16 | nologcapture = 1 |
17 | where = tests |
14 | 14 | description="A caching front-end based on the Dogpile lock.", |
15 | 15 | long_description=open(readme).read(), |
16 | 16 | classifiers=[ |
17 | 'Development Status :: 3 - Alpha', | |
17 | 'Development Status :: 4 - Beta', | |
18 | 18 | 'Intended Audience :: Developers', |
19 | 19 | 'License :: OSI Approved :: BSD License', |
20 | 20 | 'Programming Language :: Python', |
35 | 35 | install_requires=['dogpile.core>=0.4.1'], |
36 | 36 | test_suite='nose.collector', |
37 | 37 | tests_require=['nose', 'mock'], |
38 | )â | |
38 | ) |
1 | 1 | from nose import SkipTest |
2 | 2 | from functools import wraps |
3 | 3 | from dogpile.cache import compat |
4 | import time | |
5 | ||
4 | 6 | |
5 | 7 | def eq_(a, b, msg=None): |
6 | 8 | """Assert a == b, with repr messaging on failure.""" |
23 | 25 | |
24 | 26 | from dogpile.cache.compat import configparser, io |
25 | 27 | |
28 | def winsleep(): | |
29 | # sleep a for an amount of time | |
30 | # sufficient for windows time.time() | |
31 | # to change | |
32 | if compat.win32: | |
33 | time.sleep(.001) | |
26 | 34 | |
27 | 35 | def requires_py3k(fn): |
28 | 36 | @wraps(fn) |
4 | 4 | import time |
5 | 5 | import os |
6 | 6 | from nose import SkipTest |
7 | from dogpile.core.readwrite_lock import ReadWriteMutex | |
8 | from dogpile.cache.backends.file import AbstractFileLock | |
7 | 9 | |
8 | 10 | try: |
9 | 11 | import fcntl |
12 | has_fcntl = True | |
10 | 13 | except ImportError: |
11 | raise SkipTest("fcntl not available") | |
14 | has_fcntl = False | |
12 | 15 | |
13 | class DBMBackendTest(_GenericBackendTest): | |
16 | class MutexLock(AbstractFileLock): | |
17 | def __init__(self, filename): | |
18 | self.mutex = ReadWriteMutex() | |
19 | ||
20 | def acquire_read_lock(self, wait): | |
21 | ret = self.mutex.acquire_read_lock(wait) | |
22 | return wait or ret | |
23 | ||
24 | def acquire_write_lock(self, wait): | |
25 | ret = self.mutex.acquire_write_lock(wait) | |
26 | return wait or ret | |
27 | ||
28 | def release_read_lock(self): | |
29 | return self.mutex.release_read_lock() | |
30 | ||
31 | def release_write_lock(self): | |
32 | return self.mutex.release_write_lock() | |
33 | ||
34 | if has_fcntl: | |
35 | class DBMBackendTest(_GenericBackendTest): | |
36 | backend = "dogpile.cache.dbm" | |
37 | ||
38 | config_args = { | |
39 | "arguments": { | |
40 | "filename": "test.dbm" | |
41 | } | |
42 | } | |
43 | ||
44 | class DBMBackendConditionTest(_GenericBackendTest): | |
14 | 45 | backend = "dogpile.cache.dbm" |
15 | 46 | |
16 | 47 | config_args = { |
17 | "arguments":{ | |
18 | "filename":"test.dbm" | |
48 | "arguments": { | |
49 | "filename": "test.dbm", | |
50 | "lock_factory": MutexLock | |
19 | 51 | } |
20 | 52 | } |
53 | ||
21 | 54 | |
22 | 55 | class DBMBackendNoLockTest(_GenericBackendTest): |
23 | 56 | backend = "dogpile.cache.dbm" |
24 | 57 | |
25 | 58 | config_args = { |
26 | "arguments":{ | |
27 | "filename":"test.dbm", | |
28 | "rw_lockfile":False, | |
29 | "dogpile_lockfile":False, | |
59 | "arguments": { | |
60 | "filename": "test.dbm", | |
61 | "rw_lockfile": False, | |
62 | "dogpile_lockfile": False, | |
30 | 63 | } |
31 | 64 | } |
32 | 65 | |
33 | 66 | |
34 | class DBMMutexTest(_GenericMutexTest): | |
67 | class _DBMMutexTest(_GenericMutexTest): | |
35 | 68 | backend = "dogpile.cache.dbm" |
36 | ||
37 | config_args = { | |
38 | "arguments":{ | |
39 | "filename":"test.dbm" | |
40 | } | |
41 | } | |
42 | 69 | |
43 | 70 | def test_release_assertion_thread(self): |
44 | 71 | backend = self._backend() |
64 | 91 | finally: |
65 | 92 | m1.release() |
66 | 93 | |
94 | if has_fcntl: | |
95 | class DBMMutexFileTest(_DBMMutexTest): | |
96 | config_args = { | |
97 | "arguments": { | |
98 | "filename": "test.dbm" | |
99 | } | |
100 | } | |
101 | ||
102 | ||
103 | class DBMMutexConditionTest(_DBMMutexTest): | |
104 | config_args = { | |
105 | "arguments": { | |
106 | "filename": "test.dbm", | |
107 | "lock_factory": MutexLock | |
108 | } | |
109 | } | |
110 | ||
67 | 111 | |
68 | 112 | def teardown(): |
69 | 113 | for fname in os.listdir(os.curdir): |
0 | 0 | #! coding: utf-8 |
1 | 1 | |
2 | 2 | from ._fixtures import _GenericBackendFixture |
3 | from . import eq_, requires_py3k | |
3 | from . import eq_, requires_py3k, winsleep | |
4 | 4 | from unittest import TestCase |
5 | 5 | import time |
6 | 6 | from dogpile.cache import util, compat |
74 | 74 | def test_decorator_expire_callable_zero(self): |
75 | 75 | go = self._fixture(expiration_time=lambda: 0) |
76 | 76 | eq_(go(1, 2), (1, 1, 2)) |
77 | winsleep() | |
77 | 78 | eq_(go(1, 2), (2, 1, 2)) |
79 | winsleep() | |
78 | 80 | eq_(go(1, 2), (3, 1, 2)) |
79 | 81 | |
80 | 82 | def test_explicit_expire(self): |
95 | 97 | eq_(go(1, 2), (3, 1, 2)) |
96 | 98 | go.set(0, 1, 3) |
97 | 99 | eq_(go(1, 3), 0) |
100 | ||
101 | def test_explicit_get(self): | |
102 | go = self._fixture(expiration_time=1) | |
103 | eq_(go(1, 2), (1, 1, 2)) | |
104 | eq_(go.get(1, 2), (1, 1, 2)) | |
105 | eq_(go.get(2, 1), NO_VALUE) | |
106 | eq_(go(2, 1), (2, 2, 1)) | |
107 | eq_(go.get(2, 1), (2, 2, 1)) | |
108 | ||
109 | def test_explicit_get_multi(self): | |
110 | go = self._multi_fixture(expiration_time=1) | |
111 | eq_(go(1, 2), ['1 1', '1 2']) | |
112 | eq_(go.get(1, 2), ['1 1', '1 2']) | |
113 | eq_(go.get(3, 1), [NO_VALUE, '1 1']) | |
114 | eq_(go(3, 1), ['2 3', '1 1']) | |
115 | eq_(go.get(3, 1), ['2 3', '1 1']) | |
98 | 116 | |
99 | 117 | def test_explicit_set_multi(self): |
100 | 118 | go = self._multi_fixture(expiration_time=1) |
300 | 318 | |
301 | 319 | generate.set({7: 18, 10: 15}) |
302 | 320 | eq_(generate(2, 7, 10), {2: '2 5', 7: 18, 10: 15}) |
321 | ||
322 | eq_( | |
323 | generate.refresh(2, 7), | |
324 | {2: '2 7', 7: '7 8'} | |
325 | ) | |
326 | eq_(generate(2, 7, 10), {2: '2 7', 10: 15, 7: '7 8'}) | |
327 | ||
303 | 328 | |
304 | 329 | def test_multi_asdict_keys_missing(self): |
305 | 330 | reg = self._region() |
375 | 400 | |
376 | 401 | generate.set({7: 18, 10: 15}) |
377 | 402 | eq_(generate(2, 7, 10), ['2 5', 18, 15]) |
378 | ||
379 |
0 | 0 | from ._fixtures import _GenericBackendTest, _GenericMutexTest |
1 | from . import eq_ | |
1 | from . import eq_, winsleep | |
2 | 2 | from unittest import TestCase |
3 | 3 | from threading import Thread |
4 | 4 | import time |
5 | 5 | from nose import SkipTest |
6 | from dogpile.cache import compat | |
7 | ||
6 | 8 | |
7 | 9 | class _TestMemcachedConn(object): |
8 | 10 | @classmethod |
18 | 20 | |
19 | 21 | class _NonDistributedMemcachedTest(_TestMemcachedConn, _GenericBackendTest): |
20 | 22 | region_args = { |
21 | "key_mangler":lambda x: x.replace(" ", "_") | |
23 | "key_mangler": lambda x: x.replace(" ", "_") | |
22 | 24 | } |
23 | 25 | config_args = { |
24 | "arguments":{ | |
25 | "url":"127.0.0.1:11211" | |
26 | "arguments": { | |
27 | "url": "127.0.0.1:11211" | |
26 | 28 | } |
27 | 29 | } |
28 | 30 | |
29 | 31 | class _DistributedMemcachedTest(_TestMemcachedConn, _GenericBackendTest): |
30 | 32 | region_args = { |
31 | "key_mangler":lambda x: x.replace(" ", "_") | |
33 | "key_mangler": lambda x: x.replace(" ", "_") | |
32 | 34 | } |
33 | 35 | config_args = { |
34 | "arguments":{ | |
35 | "url":"127.0.0.1:11211", | |
36 | "distributed_lock":True | |
36 | "arguments": { | |
37 | "url": "127.0.0.1:11211", | |
38 | "distributed_lock": True | |
37 | 39 | } |
38 | 40 | } |
39 | 41 | |
40 | 42 | class _DistributedMemcachedMutexTest(_TestMemcachedConn, _GenericMutexTest): |
41 | 43 | config_args = { |
42 | "arguments":{ | |
43 | "url":"127.0.0.1:11211", | |
44 | "distributed_lock":True | |
44 | "arguments": { | |
45 | "url": "127.0.0.1:11211", | |
46 | "distributed_lock": True | |
45 | 47 | } |
46 | 48 | } |
47 | 49 | |
123 | 125 | |
124 | 126 | class PylibmcArgsTest(TestCase): |
125 | 127 | def test_binary_flag(self): |
126 | backend = MockPylibmcBackend(arguments={'url':'foo','binary':True}) | |
128 | backend = MockPylibmcBackend(arguments={'url': 'foo','binary': True}) | |
127 | 129 | eq_(backend._create_client().kw["binary"], True) |
128 | 130 | |
129 | 131 | def test_url_list(self): |
130 | backend = MockPylibmcBackend(arguments={'url':["a", "b", "c"]}) | |
132 | backend = MockPylibmcBackend(arguments={'url': ["a", "b", "c"]}) | |
131 | 133 | eq_(backend._create_client().arg[0], ["a", "b", "c"]) |
132 | 134 | |
133 | 135 | def test_url_scalar(self): |
134 | backend = MockPylibmcBackend(arguments={'url':"foo"}) | |
136 | backend = MockPylibmcBackend(arguments={'url': "foo"}) | |
135 | 137 | eq_(backend._create_client().arg[0], ["foo"]) |
136 | 138 | |
137 | 139 | def test_behaviors(self): |
138 | backend = MockPylibmcBackend(arguments={'url':"foo", | |
139 | "behaviors":{"q":"p"}}) | |
140 | backend = MockPylibmcBackend(arguments={'url': "foo", | |
141 | "behaviors": {"q": "p"}}) | |
140 | 142 | eq_(backend._create_client().kw["behaviors"], {"q": "p"}) |
141 | 143 | |
142 | 144 | def test_set_time(self): |
143 | backend = MockPylibmcBackend(arguments={'url':"foo", | |
144 | "memcached_expire_time":20}) | |
145 | backend.set("foo", "bar") | |
146 | eq_(backend._clients.memcached.canary, [{"time":20}]) | |
145 | backend = MockPylibmcBackend(arguments={'url': "foo", | |
146 | "memcached_expire_time": 20}) | |
147 | backend.set("foo", "bar") | |
148 | eq_(backend._clients.memcached.canary, [{"time": 20}]) | |
147 | 149 | |
148 | 150 | def test_set_min_compress_len(self): |
149 | backend = MockPylibmcBackend(arguments={'url':"foo", | |
150 | "min_compress_len":20}) | |
151 | backend.set("foo", "bar") | |
152 | eq_(backend._clients.memcached.canary, [{"min_compress_len":20}]) | |
151 | backend = MockPylibmcBackend(arguments={'url': "foo", | |
152 | "min_compress_len": 20}) | |
153 | backend.set("foo", "bar") | |
154 | eq_(backend._clients.memcached.canary, [{"min_compress_len": 20}]) | |
153 | 155 | |
154 | 156 | def test_no_set_args(self): |
155 | backend = MockPylibmcBackend(arguments={'url':"foo"}) | |
157 | backend = MockPylibmcBackend(arguments={'url': "foo"}) | |
156 | 158 | backend.set("foo", "bar") |
157 | 159 | eq_(backend._clients.memcached.canary, [{}]) |
158 | 160 | |
159 | 161 | class MemcachedArgstest(TestCase): |
160 | 162 | def test_set_time(self): |
161 | backend = MockMemcacheBackend(arguments={'url':"foo", | |
162 | "memcached_expire_time":20}) | |
163 | backend.set("foo", "bar") | |
164 | eq_(backend._clients.memcached.canary, [{"time":20}]) | |
163 | backend = MockMemcacheBackend(arguments={'url': "foo", | |
164 | "memcached_expire_time": 20}) | |
165 | backend.set("foo", "bar") | |
166 | eq_(backend._clients.memcached.canary, [{"time": 20}]) | |
165 | 167 | |
166 | 168 | def test_set_min_compress_len(self): |
167 | backend = MockMemcacheBackend(arguments={'url':"foo", | |
168 | "min_compress_len":20}) | |
169 | backend.set("foo", "bar") | |
170 | eq_(backend._clients.memcached.canary, [{"min_compress_len":20}]) | |
169 | backend = MockMemcacheBackend(arguments={'url': "foo", | |
170 | "min_compress_len": 20}) | |
171 | backend.set("foo", "bar") | |
172 | eq_(backend._clients.memcached.canary, [{"min_compress_len": 20}]) | |
171 | 173 | |
172 | 174 | |
173 | 175 | class LocalThreadTest(TestCase): |
200 | 202 | for t in threads: |
201 | 203 | t.join() |
202 | 204 | eq_(canary, [i + 1 for i in range(count)]) |
203 | eq_(MockClient.number_of_clients, 0) | |
204 | ||
205 | ||
205 | ||
206 | if compat.py27: | |
207 | eq_(MockClient.number_of_clients, 0) | |
208 | else: | |
209 | eq_(MockClient.number_of_clients, 1) | |
210 | ||
211 |
2 | 2 | class MemoryBackendTest(_GenericBackendTest): |
3 | 3 | backend = "dogpile.cache.memory" |
4 | 4 | |
5 | ||
6 | class MemoryPickleBackendTest(_GenericBackendTest): | |
7 | backend = "dogpile.cache.memory_pickle" |
0 | 0 | import pprint |
1 | 1 | from unittest import TestCase |
2 | 2 | from dogpile.cache.api import CacheBackend, CachedValue, NO_VALUE |
3 | from dogpile.cache import exception | |
3 | 4 | from dogpile.cache import make_region, register_backend, CacheRegion, util |
4 | 5 | from dogpile.cache.proxy import ProxyBackend |
5 | from . import eq_, is_, assert_raises_message, io, configparser | |
6 | from . import eq_, is_, assert_raises_message, io, configparser, winsleep | |
6 | 7 | import time, datetime |
7 | 8 | import itertools |
8 | 9 | from collections import defaultdict |
67 | 68 | my_region = make_region() |
68 | 69 | |
69 | 70 | assert_raises_message( |
70 | Exception, | |
71 | exception.ValidationError, | |
71 | 72 | "expiration_time is not a number or timedelta.", |
72 | 73 | my_region.configure, 'mock', 'one hour' |
73 | 74 | ) |
98 | 99 | reg = CacheRegion() |
99 | 100 | reg.configure("mock") |
100 | 101 | assert_raises_message( |
101 | Exception, | |
102 | exception.RegionAlreadyConfigured, | |
102 | 103 | "This region is already configured", |
103 | 104 | reg.configure, "mock" |
104 | 105 | ) |
106 | eq_(reg.is_configured, True) | |
105 | 107 | |
106 | 108 | def test_no_config(self): |
107 | 109 | reg = CacheRegion() |
108 | 110 | assert_raises_message( |
109 | Exception, | |
111 | exception.RegionNotConfigured, | |
110 | 112 | "No backend is configured on this region.", |
111 | 113 | getattr, reg, "backend" |
112 | 114 | ) |
115 | eq_(reg.is_configured, False) | |
113 | 116 | |
114 | 117 | def test_set_get_value(self): |
115 | 118 | reg = self._region() |
205 | 208 | eq_(reg.get("some key"), "some value 2") |
206 | 209 | |
207 | 210 | |
208 | def test_invalidate_get(self): | |
209 | reg = self._region() | |
210 | reg.set("some key", "some value") | |
211 | def test_hard_invalidate_get(self): | |
212 | reg = self._region() | |
213 | reg.set("some key", "some value") | |
214 | time.sleep(.1) | |
211 | 215 | reg.invalidate() |
212 | 216 | is_(reg.get("some key"), NO_VALUE) |
213 | 217 | |
214 | def test_invalidate_get_or_create(self): | |
218 | def test_hard_invalidate_get_or_create(self): | |
215 | 219 | reg = self._region() |
216 | 220 | counter = itertools.count(1) |
217 | 221 | def creator(): |
219 | 223 | eq_(reg.get_or_create("some key", creator), |
220 | 224 | "some value 1") |
221 | 225 | |
226 | time.sleep(.1) | |
222 | 227 | reg.invalidate() |
223 | 228 | eq_(reg.get_or_create("some key", creator), |
224 | 229 | "some value 2") |
230 | ||
231 | def test_soft_invalidate_get(self): | |
232 | reg = self._region(config_args={"expiration_time": 1}) | |
233 | reg.set("some key", "some value") | |
234 | time.sleep(.1) | |
235 | reg.invalidate(hard=False) | |
236 | is_(reg.get("some key"), NO_VALUE) | |
237 | ||
238 | def test_soft_invalidate_get_or_create(self): | |
239 | reg = self._region(config_args={"expiration_time": 1}) | |
240 | counter = itertools.count(1) | |
241 | def creator(): | |
242 | return "some value %d" % next(counter) | |
243 | eq_(reg.get_or_create("some key", creator), | |
244 | "some value 1") | |
245 | ||
246 | time.sleep(.1) | |
247 | reg.invalidate(hard=False) | |
248 | eq_(reg.get_or_create("some key", creator), | |
249 | "some value 2") | |
250 | ||
251 | def test_soft_invalidate_get_or_create_multi(self): | |
252 | reg = self._region(config_args={"expiration_time": 5}) | |
253 | values = [1, 2, 3] | |
254 | def creator(*keys): | |
255 | v = values.pop(0) | |
256 | return [v for k in keys] | |
257 | ret = reg.get_or_create_multi( | |
258 | [1, 2], creator) | |
259 | eq_(ret, [1, 1]) | |
260 | time.sleep(.1) | |
261 | reg.invalidate(hard=False) | |
262 | ret = reg.get_or_create_multi( | |
263 | [1, 2], creator) | |
264 | eq_(ret, [2, 2]) | |
265 | ||
266 | def test_soft_invalidate_requires_expire_time_get(self): | |
267 | reg = self._region() | |
268 | reg.invalidate(hard=False) | |
269 | assert_raises_message( | |
270 | exception.DogpileCacheException, | |
271 | "Non-None expiration time required for soft invalidation", | |
272 | reg.get_or_create, "some key", lambda: "x" | |
273 | ) | |
274 | ||
275 | def test_soft_invalidate_requires_expire_time_get_multi(self): | |
276 | reg = self._region() | |
277 | reg.invalidate(hard=False) | |
278 | assert_raises_message( | |
279 | exception.DogpileCacheException, | |
280 | "Non-None expiration time required for soft invalidation", | |
281 | reg.get_or_create_multi, ["k1", "k2"], lambda k: "x" | |
282 | ) | |
225 | 283 | |
226 | 284 | def test_should_cache_fn(self): |
227 | 285 | reg = self._region() |
234 | 292 | should_cache_fn=should_cache_fn) |
235 | 293 | eq_(ret, 1) |
236 | 294 | eq_(reg.backend._cache['some key'][0], 1) |
295 | time.sleep(.1) | |
237 | 296 | reg.invalidate() |
238 | 297 | ret = reg.get_or_create( |
239 | 298 | "some key", creator, |
246 | 305 | should_cache_fn=should_cache_fn) |
247 | 306 | eq_(ret, 3) |
248 | 307 | eq_(reg.backend._cache['some key'][0], 3) |
308 | ||
249 | 309 | |
250 | 310 | def test_should_cache_fn_multi(self): |
251 | 311 | reg = self._region() |
259 | 319 | should_cache_fn=should_cache_fn) |
260 | 320 | eq_(ret, [1, 1]) |
261 | 321 | eq_(reg.backend._cache[1][0], 1) |
322 | time.sleep(.1) | |
262 | 323 | reg.invalidate() |
263 | 324 | ret = reg.get_or_create_multi( |
264 | 325 | [1, 2], creator, |
265 | 326 | should_cache_fn=should_cache_fn) |
266 | 327 | eq_(ret, [2, 2]) |
267 | 328 | eq_(reg.backend._cache[1][0], 1) |
329 | time.sleep(.1) | |
268 | 330 | reg.invalidate() |
269 | 331 | ret = reg.get_or_create_multi( |
270 | 332 | [1, 2], creator, |
0 | from unittest import TestCase | |
1 | ||
2 | from dogpile.cache import util | |
3 | ||
4 | ||
5 | class UtilsTest(TestCase): | |
6 | """ Test the relevant utils functionality. | |
7 | """ | |
8 | ||
9 | def test_coerce_string_conf(self): | |
10 | settings = {'expiration_time': '-1'} | |
11 | coerced = util.coerce_string_conf(settings) | |
12 | self.assertEqual(coerced['expiration_time'], -1) | |
13 | ||
14 | settings = {'expiration_time': '+1'} | |
15 | coerced = util.coerce_string_conf(settings) | |
16 | self.assertEqual(coerced['expiration_time'], 1) |