New upstream version 1.5.3
Downstreamer
3 years ago
0 | 0 | language: python |
1 | dist: xenial | |
2 | addons: | |
3 | apt: | |
4 | sources: | |
5 | - chris-lea-redis-server | |
6 | - sourceline: 'ppa:chris-lea/zeromq' | |
7 | packages: | |
8 | - redis-server | |
9 | - libzmq3-dev | |
1 | 10 | services: |
2 | 11 | - redis-server |
3 | 12 | python: |
4 | 13 | - '2.7' |
5 | 14 | - '3.5' |
6 | 15 | - '3.6' |
7 | - pypy | |
16 | - '3.7' | |
8 | 17 | before_install: |
9 | 18 | - pip install coveralls |
10 | 19 | install: |
11 | - sudo rm -rf /dev/shm && sudo ln -s /run/shm /dev/shm | |
12 | - sudo apt-add-repository -y ppa:chris-lea/zeromq | |
13 | - sudo apt-get update | |
14 | - sudo apt-get install -y libzmq3-dev | |
15 | 20 | - pip install -U pip |
16 | 21 | - pip install cython |
17 | 22 | - cython logbook/_speedups.pyx |
26 | 31 | |
27 | 32 | matrix: |
28 | 33 | exclude: |
29 | - python: pypy | |
30 | env: CYBUILD=True | |
31 | - python: pypy3 | |
32 | env: CYBUILD=True | |
33 | 34 | include: |
34 | 35 | - python: "3.6" |
35 | 36 | env: GEVENT=True CYBUILD=True |
56 | 57 | password: |
57 | 58 | secure: WFmuAbtBDIkeZArIFQRCwyO1TdvF2PaZpo75r3mFgnY+aWm75cdgjZKoNqVprF/f+v9EsX2kDdQ7ZfuhMLgP8MNziB+ty7579ZDGwh64jGoi+DIoeblAFu5xNAqjvhie540uCE8KySk9s+Pq5EpOA5w18V4zxTw+h6tnBQ0M9cQ= |
58 | 59 | on: |
60 | python: "3.7" | |
61 | condition: $CYBUILD = 'True' | |
59 | 62 | tags: true |
60 | 63 | repo: getlogbook/logbook |
61 | 64 | distributions: "sdist" |
0 | 0 | Logbook Changelog |
1 | 1 | ================= |
2 | ||
3 | Version 1.5.1 | |
4 | ------------- | |
5 | ||
6 | Released on August 20th, 2019 | |
7 | ||
8 | - Added support for asyncio and contextvars | |
2 | 9 | |
3 | 10 | Version 1.4.3 |
4 | 11 | ------------- |
21 | 21 | [ti]: https://secure.travis-ci.org/getlogbook/logbook.svg?branch=master |
22 | 22 | [tl]: https://travis-ci.org/getlogbook/logbook |
23 | 23 | [ai]: https://ci.appveyor.com/api/projects/status/quu99exa26e06npp?svg=true |
24 | [vi]: https://img.shields.io/badge/python-2.6%2C2.7%2C3.3%2C3.4%2C3.5-green.svg | |
24 | [vi]: https://img.shields.io/badge/python-2.7%2C3.5%2C3.6%2C3.7-green.svg | |
25 | 25 | [di]: https://img.shields.io/pypi/dm/logbook.svg |
26 | 26 | [al]: https://ci.appveyor.com/project/vmalloc/logbook |
27 | 27 | [pi]: https://img.shields.io/pypi/v/logbook.svg |
28 | [pl]: https://pypi.python.org/pypi/Logbook | |
28 | [pl]: https://pypi.org/pypi/Logbook | |
29 | 29 | [ci]: https://coveralls.io/repos/getlogbook/logbook/badge.svg?branch=master&service=github |
30 | 30 | [cl]: https://coveralls.io/github/getlogbook/logbook?branch=master |
38 | 38 | - PYTHON: "C:\\Python36-x64" |
39 | 39 | CYBUILD: "TRUE" |
40 | 40 | |
41 | - PYTHON: "C:\\Python37" | |
42 | - PYTHON: "C:\\Python37" | |
43 | CYBUILD: "TRUE" | |
44 | ||
45 | - PYTHON: "C:\\Python37-x64" | |
46 | - PYTHON: "C:\\Python37-x64" | |
47 | CYBUILD: "TRUE" | |
48 | ||
41 | 49 | init: |
42 | 50 | - echo %PYTHON% |
43 | 51 | - set PATH=%PYTHON%;%PYTHON%\Scripts;%PATH% |
1 | 1 | """ |
2 | 2 | Runs the benchmarks |
3 | 3 | """ |
4 | from __future__ import print_function | |
4 | 5 | import sys |
5 | 6 | import os |
6 | 7 | import re |
38 | 39 | |
39 | 40 | |
40 | 41 | def bench_wrapper(use_gevent=False): |
41 | print '=' * 80 | |
42 | print 'Running benchmark with Logbook %s (gevent enabled=%s)' % \ | |
43 | (version, use_gevent) | |
44 | print '-' * 80 | |
42 | print('=' * 80) | |
43 | print('Running benchmark with Logbook %s (gevent enabled=%s)' % (version, use_gevent)) | |
44 | print('-' * 80) | |
45 | 45 | os.chdir(bench_directory) |
46 | 46 | for bench in list_benchmarks(): |
47 | 47 | run_bench(bench, use_gevent) |
48 | print '-' * 80 | |
48 | print('-' * 80) | |
49 | 49 | |
50 | 50 | |
51 | 51 | def main(): |
50 | 50 | * `Mailing list`_ |
51 | 51 | * IRC: ``#pocoo`` on freenode |
52 | 52 | |
53 | .. _Download from PyPI: http://pypi.python.org/pypi/Logbook | |
53 | .. _Download from PyPI: https://pypi.org/pypi/Logbook | |
54 | 54 | .. _Master repository on GitHub: https://github.com/getlogbook/logbook |
55 | 55 | .. _Mailing list: http://groups.google.com/group/pocoo-libs |
11 | 11 | from logbook.helpers import get_iterator_next_method |
12 | 12 | from logbook.concurrency import ( |
13 | 13 | thread_get_ident, greenlet_get_ident, thread_local, greenlet_local, |
14 | ThreadLock, GreenletRLock, is_gevent_enabled) | |
14 | ThreadLock, GreenletRLock, is_gevent_enabled, ContextVar, context_get_ident, | |
15 | is_context_enabled) | |
15 | 16 | |
16 | 17 | _missing = object() |
17 | 18 | _MAX_CONTEXT_OBJECT_CACHE = 256 |
66 | 67 | """Pops the stacked object from the greenlet stack.""" |
67 | 68 | raise NotImplementedError() |
68 | 69 | |
70 | def push_context(self): | |
71 | """Pushes the stacked object to the context stack.""" | |
72 | raise NotImplementedError() | |
73 | ||
74 | def pop_context(self): | |
75 | """Pops the stacked object from the context stack.""" | |
76 | raise NotImplementedError() | |
77 | ||
69 | 78 | def push_thread(self): |
70 | 79 | """Pushes the stacked object to the thread stack.""" |
71 | 80 | raise NotImplementedError() |
100 | 109 | execute code while the object is bound to the greenlet. |
101 | 110 | """ |
102 | 111 | return _cls(self, self.push_greenlet, self.pop_greenlet) |
112 | ||
113 | def contextbound(self, _cls=_StackBound): | |
114 | """Can be used in combination with the `with` statement to | |
115 | execute code while the object is bound to the concurrent | |
116 | context. | |
117 | """ | |
118 | return _cls(self, self.push_context, self.pop_context) | |
103 | 119 | |
104 | 120 | def threadbound(self, _cls=_StackBound): |
105 | 121 | """Can be used in combination with the `with` statement to |
125 | 141 | self._thread_context = thread_local() |
126 | 142 | self._greenlet_context_lock = GreenletRLock() |
127 | 143 | self._greenlet_context = greenlet_local() |
144 | self._context_stack = ContextVar('stack') | |
128 | 145 | self._cache = {} |
129 | 146 | self._stackop = get_iterator_next_method(count()) |
130 | 147 | |
133 | 150 | application and context cache. |
134 | 151 | """ |
135 | 152 | use_gevent = is_gevent_enabled() |
136 | tid = greenlet_get_ident() if use_gevent else thread_get_ident() | |
153 | use_context = is_context_enabled() | |
154 | ||
155 | if use_gevent: | |
156 | tid = greenlet_get_ident() | |
157 | elif use_context: | |
158 | tid = context_get_ident() | |
159 | else: | |
160 | tid = thread_get_ident() | |
161 | ||
137 | 162 | objects = self._cache.get(tid) |
138 | 163 | if objects is None: |
139 | 164 | if len(self._cache) > _MAX_CONTEXT_OBJECT_CACHE: |
140 | 165 | self._cache.clear() |
141 | 166 | objects = self._global[:] |
142 | 167 | objects.extend(getattr(self._thread_context, 'stack', ())) |
168 | ||
143 | 169 | if use_gevent: |
144 | 170 | objects.extend(getattr(self._greenlet_context, 'stack', ())) |
171 | ||
172 | if use_context: | |
173 | objects.extend(self._context_stack.get([])) | |
174 | ||
145 | 175 | objects.sort(reverse=True) |
146 | 176 | objects = [x[1] for x in objects] |
147 | 177 | self._cache[tid] = objects |
172 | 202 | finally: |
173 | 203 | self._greenlet_context_lock.release() |
174 | 204 | |
205 | def push_context(self, obj): | |
206 | self._cache.pop(context_get_ident(), None) | |
207 | item = (self._stackop(), obj) | |
208 | stack = self._context_stack.get(None) | |
209 | if stack is None: | |
210 | stack = [item] | |
211 | self._context_stack.set(stack) | |
212 | else: | |
213 | stack.append(item) | |
214 | ||
215 | def pop_context(self): | |
216 | self._cache.pop(context_get_ident(), None) | |
217 | stack = self._context_stack.get(None) | |
218 | assert stack, 'no objects on stack' | |
219 | return stack.pop()[1] | |
220 | ||
175 | 221 | def push_thread(self, obj): |
176 | 222 | self._thread_context_lock.acquire() |
177 | 223 | try: |
0 | 0 | # -*- coding: utf-8 -*- |
1 | # cython: language_level=2 | |
1 | 2 | """ |
2 | 3 | logbook._speedups |
3 | 4 | ~~~~~~~~~~~~~~~~~ |
8 | 9 | :license: BSD, see LICENSE for more details. |
9 | 10 | """ |
10 | 11 | |
11 | import platform | |
12 | ||
12 | 13 | from logbook.concurrency import (is_gevent_enabled, thread_get_ident, greenlet_get_ident, thread_local, |
13 | GreenletRLock, greenlet_local) | |
14 | GreenletRLock, greenlet_local, ContextVar, context_get_ident, is_context_enabled) | |
14 | 15 | |
15 | 16 | from cpython.dict cimport PyDict_Clear, PyDict_SetItem |
16 | from cpython.list cimport PyList_New, PyList_Append, PyList_Sort, \ | |
17 | PyList_SET_ITEM, PyList_GET_SIZE | |
17 | from cpython.list cimport PyList_Append, PyList_Sort, PyList_GET_SIZE | |
18 | ||
18 | 19 | from cpython.pythread cimport PyThread_type_lock, PyThread_allocate_lock, \ |
19 | 20 | PyThread_release_lock, PyThread_acquire_lock, WAIT_LOCK |
20 | 21 | |
21 | cdef object _missing = object() | |
22 | _missing = object() | |
22 | 23 | |
23 | 24 | cdef enum: |
24 | 25 | _MAX_CONTEXT_OBJECT_CACHE = 256 |
39 | 40 | def __get__(self, obj, type): |
40 | 41 | if obj is None: |
41 | 42 | return self |
42 | rv = getattr3(obj, self._name, _missing) | |
43 | rv = getattr(obj, self._name, _missing) | |
43 | 44 | if rv is not _missing and rv != self.fallback: |
44 | 45 | return rv |
45 | 46 | if obj.group is None: |
95 | 96 | |
96 | 97 | |
97 | 98 | cdef class StackedObject: |
98 | """Baseclass for all objects that provide stack manipulation | |
99 | """Base class for all objects that provide stack manipulation | |
99 | 100 | operations. |
100 | 101 | """ |
102 | cpdef push_context(self): | |
103 | """Pushes the stacked object to the asyncio (via contextvar) stack.""" | |
104 | raise NotImplementedError() | |
105 | ||
106 | cpdef pop_context(self): | |
107 | """Pops the stacked object from the asyncio (via contextvar) stack.""" | |
108 | raise NotImplementedError() | |
101 | 109 | |
102 | 110 | cpdef push_greenlet(self): |
103 | 111 | """Pushes the stacked object to the greenlet stack.""" |
153 | 161 | execute code while the object is bound to the application. |
154 | 162 | """ |
155 | 163 | return _StackBound(self, self.push_application, self.pop_application) |
164 | ||
165 | cpdef contextbound(self): | |
166 | """Can be used in combination with the `with` statement to | |
167 | execute code while the object is bound to the asyncio context. | |
168 | """ | |
169 | return _StackBound(self, self.push_context, self.pop_context) | |
156 | 170 | |
157 | 171 | |
158 | 172 | cdef class ContextStackManager: |
161 | 175 | cdef object _thread_context |
162 | 176 | cdef object _greenlet_context_lock |
163 | 177 | cdef object _greenlet_context |
178 | cdef object _context_stack | |
164 | 179 | cdef dict _cache |
165 | 180 | cdef int _stackcnt |
166 | 181 | |
170 | 185 | self._thread_context = thread_local() |
171 | 186 | self._greenlet_context_lock = GreenletRLock() |
172 | 187 | self._greenlet_context = greenlet_local() |
188 | self._context_stack = ContextVar('stack') | |
173 | 189 | self._cache = {} |
174 | 190 | self._stackcnt = 0 |
175 | 191 | |
179 | 195 | |
180 | 196 | cpdef iter_context_objects(self): |
181 | 197 | use_gevent = is_gevent_enabled() |
182 | tid = greenlet_get_ident() if use_gevent else thread_get_ident() | |
198 | use_context = is_context_enabled() | |
199 | ||
200 | if use_gevent: | |
201 | tid = greenlet_get_ident() | |
202 | elif use_context: | |
203 | tid = context_get_ident() | |
204 | else: | |
205 | tid = thread_get_ident() | |
206 | ||
183 | 207 | objects = self._cache.get(tid) |
184 | 208 | if objects is None: |
185 | 209 | if PyList_GET_SIZE(self._cache) > _MAX_CONTEXT_OBJECT_CACHE: |
186 | 210 | PyDict_Clear(self._cache) |
187 | 211 | objects = self._global[:] |
188 | objects.extend(getattr3(self._thread_context, 'stack', ())) | |
212 | objects.extend(getattr(self._thread_context, 'stack', ())) | |
213 | ||
189 | 214 | if use_gevent: |
190 | objects.extend(getattr3(self._greenlet_context, 'stack', ())) | |
215 | objects.extend(getattr(self._greenlet_context, 'stack', ())) | |
216 | ||
217 | if use_context: | |
218 | objects.extend(self._context_stack.get([])) | |
219 | ||
191 | 220 | PyList_Sort(objects) |
192 | 221 | objects = [(<_StackItem>x).val for x in objects] |
193 | 222 | PyDict_SetItem(self._cache, tid, objects) |
198 | 227 | try: |
199 | 228 | self._cache.pop(greenlet_get_ident(), None) |
200 | 229 | item = _StackItem(self._stackop(), obj) |
201 | stack = getattr3(self._greenlet_context, 'stack', None) | |
230 | stack = getattr(self._greenlet_context, 'stack', None) | |
202 | 231 | if stack is None: |
203 | 232 | self._greenlet_context.stack = [item] |
204 | 233 | else: |
210 | 239 | self._greenlet_context_lock.acquire() |
211 | 240 | try: |
212 | 241 | self._cache.pop(greenlet_get_ident(), None) |
213 | stack = getattr3(self._greenlet_context, 'stack', None) | |
242 | stack = getattr(self._greenlet_context, 'stack', None) | |
214 | 243 | assert stack, 'no objects on stack' |
215 | 244 | return (<_StackItem>stack.pop()).val |
216 | 245 | finally: |
217 | 246 | self._greenlet_context_lock.release() |
218 | 247 | |
248 | cpdef push_context(self, obj): | |
249 | self._cache.pop(context_get_ident(), None) | |
250 | item = _StackItem(self._stackop(), obj) | |
251 | stack = self._context_stack.get(None) | |
252 | ||
253 | if stack is None: | |
254 | stack = [item] | |
255 | self._context_stack.set(stack) | |
256 | else: | |
257 | PyList_Append(stack, item) | |
258 | ||
259 | cpdef pop_context(self): | |
260 | self._cache.pop(context_get_ident(), None) | |
261 | stack = self._context_stack.get(None) | |
262 | assert stack, 'no objects on stack' | |
263 | return (<_StackItem>stack.pop()).val | |
264 | ||
219 | 265 | cpdef push_thread(self, obj): |
220 | 266 | PyThread_acquire_lock(self._thread_context_lock, WAIT_LOCK) |
221 | 267 | try: |
222 | 268 | self._cache.pop(thread_get_ident(), None) |
223 | 269 | item = _StackItem(self._stackop(), obj) |
224 | stack = getattr3(self._thread_context, 'stack', None) | |
270 | stack = getattr(self._thread_context, 'stack', None) | |
225 | 271 | if stack is None: |
226 | 272 | self._thread_context.stack = [item] |
227 | 273 | else: |
233 | 279 | PyThread_acquire_lock(self._thread_context_lock, WAIT_LOCK) |
234 | 280 | try: |
235 | 281 | self._cache.pop(thread_get_ident(), None) |
236 | stack = getattr3(self._thread_context, 'stack', None) | |
282 | stack = getattr(self._thread_context, 'stack', None) | |
237 | 283 | assert stack, 'no objects on stack' |
238 | 284 | return (<_StackItem>stack.pop()).val |
239 | 285 | finally: |
22 | 22 | parse_iso8601, string_types, to_safe_json, u, |
23 | 23 | xrange) |
24 | 24 | |
25 | _has_speedups = False | |
25 | 26 | try: |
27 | if os.environ.get('DISABLE_LOGBOOK_CEXT_AT_RUNTIME'): | |
28 | raise ImportError("Speedups disabled via DISABLE_LOGBOOK_CEXT_AT_RUNTIME") | |
29 | ||
26 | 30 | from logbook._speedups import ( |
27 | 31 | _missing, group_reflected_property, ContextStackManager, StackedObject) |
32 | ||
33 | _has_speedups = True | |
28 | 34 | except ImportError: |
29 | 35 | from logbook._fallback import ( |
30 | 36 | _missing, group_reflected_property, ContextStackManager, StackedObject) |
72 | 78 | logbook.set_datetime_format("local") |
73 | 79 | |
74 | 80 | Other uses rely on your supplied :py:obj:`datetime_format`. |
75 | Using `pytz <https://pypi.python.org/pypi/pytz>`_ for example:: | |
81 | Using `pytz <https://pypi.org/pypi/pytz>`_ for example:: | |
76 | 82 | |
77 | 83 | from datetime import datetime |
78 | 84 | import logbook |
205 | 211 | popped = self.stack_manager.pop_greenlet() |
206 | 212 | assert popped is self, 'popped unexpected object' |
207 | 213 | |
214 | def push_context(self): | |
215 | """Pushes the context object to the context stack.""" | |
216 | self.stack_manager.push_context(self) | |
217 | ||
218 | def pop_context(self): | |
219 | """Pops the context object from the stack.""" | |
220 | popped = self.stack_manager.pop_context() | |
221 | assert popped is self, 'popped unexpected object' | |
222 | ||
208 | 223 | def push_thread(self): |
209 | 224 | """Pushes the context object to the thread stack.""" |
210 | 225 | self.stack_manager.push_thread(self) |
255 | 270 | def pop_greenlet(self): |
256 | 271 | for obj in reversed(self.objects): |
257 | 272 | obj.pop_greenlet() |
273 | ||
274 | def push_context(self): | |
275 | for obj in self.objects: | |
276 | obj.push_context() | |
277 | ||
278 | def pop_context(self): | |
279 | for obj in reversed(self.objects): | |
280 | obj.pop_context() | |
258 | 281 | |
259 | 282 | |
260 | 283 | class Processor(ContextObject): |
15 | 15 | from datetime import date, datetime |
16 | 16 | |
17 | 17 | import logbook |
18 | from logbook.helpers import u, string_types, iteritems | |
18 | from logbook.helpers import u, string_types, iteritems, collections_abc | |
19 | 19 | |
20 | 20 | _epoch_ord = date(1970, 1, 1).toordinal() |
21 | 21 | |
132 | 132 | kwargs = None |
133 | 133 | |
134 | 134 | # Logging allows passing a mapping object, in which case args will be a mapping. |
135 | if isinstance(args, collections.Mapping): | |
135 | if isinstance(args, collections_abc.Mapping): | |
136 | 136 | kwargs = args |
137 | 137 | args = None |
138 | 138 | record = LoggingCompatRecord(old_record.name, |
163 | 163 | return GreenletRLock() |
164 | 164 | else: |
165 | 165 | return ThreadRLock() |
166 | ||
167 | ||
168 | has_contextvars = True | |
169 | try: | |
170 | import contextvars | |
171 | except ImportError: | |
172 | has_contextvars = False | |
173 | ||
174 | if has_contextvars: | |
175 | from contextvars import ContextVar | |
176 | from itertools import count | |
177 | ||
178 | context_ident_counter = count() | |
179 | context_ident = ContextVar('context_ident') | |
180 | ||
181 | def context_get_ident(): | |
182 | try: | |
183 | return context_ident.get() | |
184 | except LookupError: | |
185 | ident = 'context-%s' % next(context_ident_counter) | |
186 | context_ident.set(ident) | |
187 | return ident | |
188 | ||
189 | def is_context_enabled(): | |
190 | try: | |
191 | context_ident.get() | |
192 | return True | |
193 | except LookupError: | |
194 | return False | |
195 | ||
196 | else: | |
197 | class ContextVar(object): | |
198 | def __init__(self, name): | |
199 | self.name = name | |
200 | self.local = thread_local() | |
201 | ||
202 | def set(self, value): | |
203 | self.local = value | |
204 | ||
205 | def get(self, default=None): | |
206 | if self.local is None: | |
207 | return default | |
208 | ||
209 | return default | |
210 | ||
211 | def context_get_ident(): | |
212 | return 1 | |
213 | ||
214 | def is_context_enabled(): | |
215 | return False |
31 | 31 | _missing, lookup_level, Flags, ContextObject, ContextStackManager, |
32 | 32 | _datetime_factory) |
33 | 33 | from logbook.helpers import ( |
34 | rename, b, _is_text_stream, is_unicode, PY2, zip, xrange, string_types, | |
34 | rename, b, _is_text_stream, is_unicode, PY2, zip, xrange, string_types, collections_abc, | |
35 | 35 | integer_types, reraise, u, with_metaclass) |
36 | 36 | from logbook.concurrency import new_fine_grained_lock |
37 | 37 | |
1354 | 1354 | # - tuple to be unpacked to variables keyfile and certfile. |
1355 | 1355 | # - secure=() equivalent to secure=True for backwards compatibility. |
1356 | 1356 | # - secure=False equivalent to secure=None to disable. |
1357 | if isinstance(self.secure, collections.Mapping): | |
1357 | if isinstance(self.secure, collections_abc.Mapping): | |
1358 | 1358 | keyfile = self.secure.get('keyfile', None) |
1359 | 1359 | certfile = self.secure.get('certfile', None) |
1360 | elif isinstance(self.secure, collections.Iterable): | |
1360 | elif isinstance(self.secure, collections_abc.Iterable): | |
1361 | 1361 | # Allow empty tuple for backwards compatibility |
1362 | 1362 | if len(self.secure) == 0: |
1363 | 1363 | keyfile = certfile = None |
1380 | 1380 | con.ehlo() |
1381 | 1381 | |
1382 | 1382 | # Allow credentials to be a tuple or dict. |
1383 | if isinstance(self.credentials, collections.Mapping): | |
1383 | if isinstance(self.credentials, collections_abc.Mapping): | |
1384 | 1384 | credentials_args = () |
1385 | 1385 | credentials_kwargs = self.credentials |
1386 | 1386 | else: |
1899 | 1899 | Handler.pop_thread(self) |
1900 | 1900 | self.rollover() |
1901 | 1901 | |
1902 | def pop_context(self): | |
1903 | Handler.pop_context(self) | |
1904 | self.rollover() | |
1905 | ||
1902 | 1906 | def pop_greenlet(self): |
1903 | 1907 | Handler.pop_greenlet(self) |
1904 | 1908 | self.rollover() |
19 | 19 | |
20 | 20 | if PY2: |
21 | 21 | import __builtin__ as _builtins |
22 | import collections as collections_abc | |
22 | 23 | else: |
23 | 24 | import builtins as _builtins |
25 | import collections.abc as collections_abc | |
24 | 26 | |
25 | 27 | try: |
26 | 28 | import json |
326 | 326 | .. versionchanged:: 1.0.0 |
327 | 327 | Added Windows support if `colorama`_ is installed. |
328 | 328 | |
329 | .. _`colorama`: https://pypi.python.org/pypi/colorama | |
329 | .. _`colorama`: https://pypi.org/pypi/colorama | |
330 | 330 | """ |
331 | 331 | _use_color = None |
332 | 332 | |
382 | 382 | .. versionchanged:: 1.0 |
383 | 383 | Added Windows support if `colorama`_ is installed. |
384 | 384 | |
385 | .. _`colorama`: https://pypi.python.org/pypi/colorama | |
385 | .. _`colorama`: https://pypi.org/pypi/colorama | |
386 | 386 | """ |
387 | 387 | def __init__(self, *args, **kwargs): |
388 | 388 | StderrHandler.__init__(self, *args, **kwargs) |
470 | 470 | |
471 | 471 | def pop_thread(self): |
472 | 472 | Handler.pop_thread(self) |
473 | self.flush() | |
474 | ||
475 | def pop_context(self): | |
476 | Handler.pop_context(self) | |
473 | 477 | self.flush() |
474 | 478 | |
475 | 479 | def pop_greenlet(self): |
603 | 603 | queue and sends it to a handler. Both queue and handler are |
604 | 604 | taken from the passed :class:`ThreadedWrapperHandler`. |
605 | 605 | """ |
606 | _sentinel = object() | |
606 | class Command(object): | |
607 | stop = object() | |
608 | emit = object() | |
609 | emit_batch = object() | |
607 | 610 | |
608 | 611 | def __init__(self, wrapper_handler): |
609 | 612 | self.wrapper_handler = wrapper_handler |
620 | 623 | def stop(self): |
621 | 624 | """Stops the task thread.""" |
622 | 625 | if self.running: |
623 | self.wrapper_handler.queue.put_nowait(self._sentinel) | |
626 | self.wrapper_handler.queue.put_nowait((self.Command.stop, )) | |
624 | 627 | self._thread.join() |
625 | 628 | self._thread = None |
626 | 629 | |
627 | 630 | def _target(self): |
628 | 631 | while 1: |
629 | record = self.wrapper_handler.queue.get() | |
630 | if record is self._sentinel: | |
632 | item = self.wrapper_handler.queue.get() | |
633 | command, data = item[0], item[1:] | |
634 | if command is self.Command.stop: | |
631 | 635 | self.running = False |
632 | 636 | break |
633 | self.wrapper_handler.handler.handle(record) | |
637 | elif command is self.Command.emit: | |
638 | (record, ) = data | |
639 | self.wrapper_handler.handler.emit(record) | |
640 | elif command is self.Command.emit_batch: | |
641 | record, reason = data | |
642 | self.wrapper_handler.handler.emit_batch(record, reason) | |
634 | 643 | |
635 | 644 | |
636 | 645 | class ThreadedWrapperHandler(WrapperHandler): |
662 | 671 | self.handler.close() |
663 | 672 | |
664 | 673 | def emit(self, record): |
665 | try: | |
666 | self.queue.put_nowait(record) | |
674 | item = (TWHThreadController.Command.emit, record) | |
675 | try: | |
676 | self.queue.put_nowait(item) | |
677 | except Full: | |
678 | # silently drop | |
679 | pass | |
680 | ||
681 | def emit_batch(self, records, reason): | |
682 | item = (TWHThreadController.Command.emit_batch, records, reason) | |
683 | try: | |
684 | self.queue.put_nowait(item) | |
667 | 685 | except Full: |
668 | 686 | # silently drop |
669 | 687 | pass |
0 | 0 | #! /usr/bin/python |
1 | import pip | |
1 | from pip._internal import main as pip_main | |
2 | 2 | import sys |
3 | 3 | |
4 | 4 | if __name__ == '__main__': |
13 | 13 | ] |
14 | 14 | |
15 | 15 | print("Setting up dependencies...") |
16 | result = pip.main(["install"] + deps) | |
16 | result = pip_main(["install"] + deps) | |
17 | 17 | sys.exit(result) |
0 | import sys | |
1 | ||
0 | 2 | import logbook |
1 | 3 | import pytest |
2 | 4 | |
105 | 107 | @request.addfinalizer |
106 | 108 | def fin(): |
107 | 109 | _disable_gevent() |
110 | ||
111 | ||
112 | def pytest_ignore_collect(path, config): | |
113 | if 'test_asyncio.py' in path.basename and (sys.version_info.major < 3 or sys.version_info.minor < 5): | |
114 | return True | |
115 | ||
116 | return False |
0 | import pytest | |
1 | import logbook | |
2 | import asyncio | |
3 | from logbook.concurrency import has_contextvars | |
4 | ||
5 | ITERATIONS = 100 | |
6 | ||
7 | ||
8 | @pytest.mark.skipif(not has_contextvars, reason="Contexvars not available") | |
9 | def test_asyncio_context_management(logger): | |
10 | h1 = logbook.TestHandler() | |
11 | h2 = logbook.TestHandler() | |
12 | ||
13 | async def task(handler, msg): | |
14 | for _ in range(ITERATIONS): | |
15 | with handler.contextbound(): | |
16 | logger.info(msg) | |
17 | ||
18 | await asyncio.sleep(0) # allow for context switch | |
19 | ||
20 | asyncio.get_event_loop().run_until_complete(asyncio.gather(task(h1, 'task1'), task(h2, 'task2'))) | |
21 | ||
22 | assert len(h1.records) == ITERATIONS | |
23 | assert all(['task1' == r.msg for r in h1.records]) | |
24 | ||
25 | assert len(h2.records) == ITERATIONS | |
26 | assert all(['task2' == r.msg for r in h2.records]) |
88 | 88 | assert test_handler.has_warning('Hello World') |
89 | 89 | |
90 | 90 | |
91 | class BatchTestHandler(logbook.TestHandler): | |
92 | def __init__(self, *args, **kwargs): | |
93 | super(BatchTestHandler, self).__init__(*args, **kwargs) | |
94 | self.batches = [] | |
95 | ||
96 | def emit(self, record): | |
97 | super(BatchTestHandler, self).emit(record) | |
98 | self.batches.append([record]) | |
99 | ||
100 | def emit_batch(self, records, reason): | |
101 | for record in records: | |
102 | super(BatchTestHandler, self).emit(record) | |
103 | self.batches.append(records) | |
104 | ||
105 | ||
91 | 106 | def test_threaded_wrapper_handler(logger): |
92 | 107 | from logbook.queues import ThreadedWrapperHandler |
93 | test_handler = logbook.TestHandler() | |
108 | test_handler = BatchTestHandler() | |
94 | 109 | with ThreadedWrapperHandler(test_handler) as handler: |
95 | 110 | logger.warn('Just testing') |
96 | 111 | logger.error('More testing') |
99 | 114 | handler.close() |
100 | 115 | |
101 | 116 | assert (not handler.controller.running) |
117 | assert len(test_handler.records) == 2 | |
118 | assert len(test_handler.batches) == 2 | |
119 | assert all((len(records) == 1 for records in test_handler.batches)) | |
120 | assert test_handler.has_warning('Just testing') | |
121 | assert test_handler.has_error('More testing') | |
122 | ||
123 | ||
124 | def test_threaded_wrapper_handler_emit(): | |
125 | from logbook.queues import ThreadedWrapperHandler | |
126 | test_handler = BatchTestHandler() | |
127 | with ThreadedWrapperHandler(test_handler) as handler: | |
128 | lr = logbook.LogRecord('Test Logger', logbook.WARNING, 'Just testing') | |
129 | test_handler.emit(lr) | |
130 | lr = logbook.LogRecord('Test Logger', logbook.ERROR, 'More testing') | |
131 | test_handler.emit(lr) | |
132 | ||
133 | # give it some time to sync up | |
134 | handler.close() | |
135 | ||
136 | assert (not handler.controller.running) | |
137 | assert len(test_handler.records) == 2 | |
138 | assert len(test_handler.batches) == 2 | |
139 | assert all((len(records) == 1 for records in test_handler.batches)) | |
140 | assert test_handler.has_warning('Just testing') | |
141 | assert test_handler.has_error('More testing') | |
142 | ||
143 | ||
144 | def test_threaded_wrapper_handler_emit_batched(): | |
145 | from logbook.queues import ThreadedWrapperHandler | |
146 | test_handler = BatchTestHandler() | |
147 | with ThreadedWrapperHandler(test_handler) as handler: | |
148 | test_handler.emit_batch([ | |
149 | logbook.LogRecord('Test Logger', logbook.WARNING, 'Just testing'), | |
150 | logbook.LogRecord('Test Logger', logbook.ERROR, 'More testing'), | |
151 | ], 'group') | |
152 | ||
153 | # give it some time to sync up | |
154 | handler.close() | |
155 | ||
156 | assert (not handler.controller.running) | |
157 | assert len(test_handler.records) == 2 | |
158 | assert len(test_handler.batches) == 1 | |
159 | (records, ) = test_handler.batches | |
160 | assert len(records) == 2 | |
102 | 161 | assert test_handler.has_warning('Just testing') |
103 | 162 | assert test_handler.has_error('More testing') |
104 | 163 | |
163 | 222 | import redis |
164 | 223 | from logbook.queues import RedisHandler |
165 | 224 | |
166 | KEY = 'redis' | |
225 | KEY = 'redis-{}'.format(os.getpid()) | |
167 | 226 | FIELDS = ['message', 'host'] |
168 | 227 | r = redis.Redis(decode_responses=True) |
169 | redis_handler = RedisHandler(level=logbook.INFO, bubble=True) | |
228 | redis_handler = RedisHandler(key=KEY, level=logbook.INFO, bubble=True) | |
170 | 229 | # We don't want output for the tests, so we can wrap everything in a |
171 | 230 | # NullHandler |
172 | 231 | null_handler = logbook.NullHandler() |
184 | 243 | assert message.find(LETTERS) |
185 | 244 | |
186 | 245 | # Change the key of the handler and check on redis |
187 | KEY = 'test_another_key' | |
246 | KEY = 'test_another_key-{}'.format(os.getpid()) | |
188 | 247 | redis_handler.key = KEY |
189 | 248 | |
190 | 249 | with null_handler.applicationbound(): |
233 | 292 | from logbook.queues import RedisHandler |
234 | 293 | null_handler = logbook.NullHandler() |
235 | 294 | |
236 | redis_handler = RedisHandler(key='lpushed', push_method='lpush', | |
295 | KEY = 'lpushed-'.format(os.getpid()) | |
296 | redis_handler = RedisHandler(key=KEY, push_method='lpush', | |
237 | 297 | level=logbook.INFO, bubble=True) |
238 | 298 | |
239 | 299 | with null_handler.applicationbound(): |
244 | 304 | time.sleep(1.5) |
245 | 305 | |
246 | 306 | r = redis.Redis(decode_responses=True) |
247 | logs = r.lrange('lpushed', 0, -1) | |
307 | logs = r.lrange(KEY, 0, -1) | |
248 | 308 | assert logs |
249 | 309 | assert "new item" in logs[0] |
250 | r.delete('lpushed') | |
310 | r.delete(KEY) | |
251 | 311 | |
252 | 312 | |
253 | 313 | @require_module('redis') |
260 | 320 | from logbook.queues import RedisHandler |
261 | 321 | null_handler = logbook.NullHandler() |
262 | 322 | |
263 | redis_handler = RedisHandler(key='rpushed', push_method='rpush', | |
323 | KEY = 'rpushed-' + str(os.getpid()) | |
324 | redis_handler = RedisHandler(key=KEY, push_method='rpush', | |
264 | 325 | level=logbook.INFO, bubble=True) |
265 | 326 | |
266 | 327 | with null_handler.applicationbound(): |
271 | 332 | time.sleep(1.5) |
272 | 333 | |
273 | 334 | r = redis.Redis(decode_responses=True) |
274 | logs = r.lrange('rpushed', 0, -1) | |
335 | logs = r.lrange(KEY, 0, -1) | |
275 | 336 | assert logs |
276 | 337 | assert "old item" in logs[0] |
277 | r.delete('rpushed') | |
338 | r.delete(KEY) | |
278 | 339 | |
279 | 340 | |
280 | 341 | @pytest.fixture |
3 | 3 | from contextlib import closing |
4 | 4 | |
5 | 5 | import logbook |
6 | from logbook.helpers import u | |
7 | ||
8 | 6 | import pytest |
9 | 7 | |
10 | unix_socket = "/tmp/__unixsock_logbook.test" | |
8 | UNIX_SOCKET = "/tmp/__unixsock_logbook.test" | |
11 | 9 | |
12 | to_test = [ | |
10 | DELIMITERS = { | |
11 | socket.AF_INET: '\n' | |
12 | } | |
13 | ||
14 | TO_TEST = [ | |
13 | 15 | (socket.AF_INET, socket.SOCK_DGRAM, ('127.0.0.1', 0)), |
14 | 16 | (socket.AF_INET, socket.SOCK_STREAM, ('127.0.0.1', 0)), |
15 | 17 | ] |
16 | if hasattr(socket, 'AF_UNIX'): | |
17 | to_test.append((socket.AF_UNIX, socket.SOCK_DGRAM, unix_socket)) | |
18 | ||
19 | UNIX_SOCKET_AVAILABLE = hasattr(socket, 'AF_UNIX') | |
20 | ||
21 | if UNIX_SOCKET_AVAILABLE: | |
22 | DELIMITERS[socket.AF_UNIX] = '\x00' | |
23 | TO_TEST.append((socket.AF_UNIX, socket.SOCK_DGRAM, UNIX_SOCKET)) | |
24 | ||
18 | 25 | |
19 | 26 | @pytest.mark.usefixtures("unix_sock_path") |
20 | @pytest.mark.parametrize("sock_family,socktype,address", to_test) | |
21 | def test_syslog_handler(logger, activation_strategy, | |
22 | sock_family, socktype, address): | |
23 | delimiter = {socket.AF_UNIX: '\x00', | |
24 | socket.AF_INET: '\n'}[sock_family] | |
27 | @pytest.mark.parametrize("sock_family,socktype,address", TO_TEST) | |
28 | @pytest.mark.parametrize("app_name", [None, 'Testing']) | |
29 | def test_syslog_handler(logger, activation_strategy, sock_family, socktype, address, app_name): | |
30 | delimiter = DELIMITERS[sock_family] | |
25 | 31 | with closing(socket.socket(sock_family, socktype)) as inc: |
26 | 32 | inc.bind(address) |
33 | ||
27 | 34 | if socktype == socket.SOCK_STREAM: |
28 | 35 | inc.listen(0) |
36 | ||
29 | 37 | inc.settimeout(1) |
30 | for app_name in [None, 'Testing']: | |
31 | if sock_family == socket.AF_UNIX: | |
32 | expected = (r'^<12>%stestlogger: Syslog is weird%s$' % | |
33 | (app_name + ':' if app_name else '', | |
34 | delimiter)) | |
35 | else: | |
36 | expected = (r'^<12>1 \d{4}-\d\d-\d\dT\d\d:\d\d:\d\d(\.\d+)?Z %s %s %d ' | |
37 | '- - %sSyslog is weird%s$' % | |
38 | (socket.gethostname(), | |
39 | app_name if app_name else 'testlogger', | |
40 | os.getpid(), 'testlogger: ' if app_name else '', | |
41 | delimiter)) | |
42 | 38 | |
43 | handler = logbook.SyslogHandler(app_name, inc.getsockname(), | |
44 | socktype=socktype) | |
45 | with activation_strategy(handler): | |
46 | logger.warn('Syslog is weird') | |
47 | try: | |
48 | if socktype == socket.SOCK_STREAM: | |
49 | with closing(inc.accept()[0]) as inc2: | |
50 | rv = inc2.recv(1024) | |
51 | else: | |
52 | rv = inc.recvfrom(1024)[0] | |
53 | except socket.error: | |
54 | assert False, 'got timeout on socket' | |
55 | rv = rv.decode('utf-8') | |
56 | assert re.match(expected, rv), \ | |
57 | 'expected {}, got {}'.format(expected, rv) | |
39 | if UNIX_SOCKET_AVAILABLE and sock_family == socket.AF_UNIX: | |
40 | expected = (r'^<12>%stestlogger: Syslog is weird%s$' % (app_name + ':' if app_name else '', delimiter)) | |
41 | else: | |
42 | expected = (r'^<12>1 \d{4}-\d\d-\d\dT\d\d:\d\d:\d\d(\.\d+)?Z %s %s %d - - %sSyslog is weird%s$' % ( | |
43 | socket.gethostname(), | |
44 | app_name if app_name else 'testlogger', | |
45 | os.getpid(), 'testlogger: ' if app_name else '', | |
46 | delimiter)) | |
47 | ||
48 | handler = logbook.SyslogHandler(app_name, inc.getsockname(), socktype=socktype) | |
49 | ||
50 | with activation_strategy(handler): | |
51 | logger.warn('Syslog is weird') | |
52 | ||
53 | if socktype == socket.SOCK_STREAM: | |
54 | with closing(inc.accept()[0]) as inc2: | |
55 | rv = inc2.recv(1024) | |
56 | else: | |
57 | rv = inc.recvfrom(1024)[0] | |
58 | ||
59 | rv = rv.decode('utf-8') | |
60 | assert re.match(expected, rv), \ | |
61 | 'expected {}, got {}'.format(expected, rv) | |
58 | 62 | |
59 | 63 | |
60 | 64 | @pytest.fixture |
61 | def unix_sock_path(request): | |
62 | returned = unix_socket | |
63 | ||
64 | @request.addfinalizer | |
65 | def cleanup(): | |
66 | if os.path.exists(returned): | |
67 | os.unlink(returned) | |
68 | return returned | |
65 | def unix_sock_path(): | |
66 | try: | |
67 | yield UNIX_SOCKET | |
68 | finally: | |
69 | if os.path.exists(UNIX_SOCKET): | |
70 | os.unlink(UNIX_SOCKET) |
0 | 0 | [tox] |
1 | envlist=py27,py34,py35,py36,pypy,docs | |
2 | skipsdist=True | |
1 | envlist = py{27,35,36,37}{,-speedups},pypy,py37-docs | |
2 | skipsdist = True | |
3 | 3 | |
4 | 4 | [testenv] |
5 | whitelist_externals= | |
6 | rm | |
7 | deps= | |
8 | py{27}: mock | |
9 | pytest | |
10 | Cython | |
11 | changedir={toxinidir} | |
12 | commands= | |
13 | {envbindir}/cython logbook/_speedups.pyx | |
14 | {envpython} {toxinidir}/setup.py develop | |
15 | {envpython} {toxinidir}/scripts/test_setup.py | |
16 | py.test {toxinidir}/tests | |
17 | rm -f {toxinidir}/logbook/_speedups.\{so,c\} | |
5 | whitelist_externals = | |
6 | rm | |
7 | deps = | |
8 | py{27}: mock | |
9 | pytest | |
10 | speedups: Cython | |
11 | setenv = | |
12 | !speedups: DISABLE_LOGBOOK_CEXT=1 | |
13 | !speedups: DISABLE_LOGBOOK_CEXT_AT_RUNTIME=1 | |
14 | changedir = {toxinidir} | |
15 | commands = | |
16 | {envpython} -m pip install -e {toxinidir}[all] | |
18 | 17 | |
19 | [testenv:pypy] | |
20 | deps= | |
21 | mock | |
22 | pytest | |
23 | commands= | |
24 | {envpython} {toxinidir}/setup.py develop | |
25 | {envpython} {toxinidir}/scripts/test_setup.py | |
26 | py.test {toxinidir}/tests | |
18 | # Make sure that speedups are available/not available, as needed. | |
19 | speedups: {envpython} -c "from logbook.base import _has_speedups; exit(0 if _has_speedups else 1)" | |
20 | !speedups: {envpython} -c "from logbook.base import _has_speedups; exit(1 if _has_speedups else 0)" | |
27 | 21 | |
28 | [testenv:docs] | |
29 | deps= | |
30 | Sphinx==1.1.3 | |
31 | changedir=docs | |
32 | commands= | |
33 | sphinx-build -W -b html . _build/html | |
34 | sphinx-build -W -b linkcheck . _build/linkcheck | |
22 | {envpython} {toxinidir}/scripts/test_setup.py | |
23 | py.test {toxinidir}/tests | |
24 | ||
25 | [testenv:py37-docs] | |
26 | deps = | |
27 | Sphinx>=1.3 | |
28 | changedir = docs | |
29 | commands = | |
30 | sphinx-build -W -b html . _build/html | |
31 | sphinx-build -W -b linkcheck . _build/linkcheck |