Merge tag 'upstream/0.7.0'
Upstream version 0.7.0
Agustin Henze
9 years ago
0 | 0 | language: python |
1 | ||
2 | services: | |
3 | - redis-server | |
4 | ||
1 | 5 | python: |
2 | 6 | - "2.6" |
3 | 7 | - "2.7" |
8 | - "3.2" | |
4 | 9 | - "3.3" |
10 | - "3.4" | |
5 | 11 | - "pypy" |
6 | 12 | |
7 | 13 | install: |
8 | 14 | # this fixes SemLock issues on travis |
9 | 15 | - "sudo rm -rf /dev/shm && sudo ln -s /run/shm /dev/shm" |
10 | - "sudo apt-get install libzmq3-dev redis-server" | |
11 | - "python scripts/pypi_mirror_setup.py http://a.pypi.python.org/simple" | |
16 | - "sudo apt-add-repository -y ppa:chris-lea/zeromq" | |
17 | - "sudo apt-get update" | |
18 | - "sudo apt-get install -y libzmq3-dev" | |
12 | 19 | - "pip install cython redis" |
20 | - "easy_install pyzmq" | |
13 | 21 | - "make test_setup" |
14 | 22 | - "python setup.py develop" |
15 | 23 |
1 | 1 | ================= |
2 | 2 | |
3 | 3 | Here you can see the full list of changes between each Logbook release. |
4 | ||
5 | Version 0.7.0 | |
6 | ------------- | |
7 | ||
8 | Released on May 12th 2014. Codename "not_just_yet" | |
9 | ||
10 | - Restored Python 3.2 support (thanks @rnortman) | |
11 | - NullHandlers now respect filters - allows to only drop/mute certain records (#73) | |
12 | - redirect_logging now sets the legacy root logger's level to DEBUG by default. This can be changed by specifying `set_root_logger_level=False` (#96) | |
13 | - Bugfixes | |
4 | 14 | |
5 | 15 | Version 0.6.0 |
6 | 16 | ------------- |
29 | 29 | .. autoclass:: ExceptionHandler |
30 | 30 | :members: |
31 | 31 | |
32 | .. autoclass:: DedupHandler | |
33 | :members: | |
34 | ||
32 | 35 | Colorized Handlers |
33 | 36 | ------------------ |
34 | 37 |
47 | 47 | # built documents. |
48 | 48 | # |
49 | 49 | # The short X.Y version. |
50 | version = '0.6.1-dev' | |
50 | version = '0.7.0' | |
51 | 51 | # The full version, including alpha/beta/rc tags. |
52 | release = '0.6.1-dev' | |
52 | release = '0.7.0' | |
53 | 53 | |
54 | 54 | # The language for content autogenerated by Sphinx. Refer to documentation |
55 | 55 | # for a list of supported languages. |
22 | 22 | LimitingHandlerMixin, WrapperHandler, FingersCrossedHandler, \ |
23 | 23 | GroupHandler |
24 | 24 | |
25 | __version__ = '0.6.1-dev' | |
25 | __version__ = '0.7.0' | |
26 | 26 | |
27 | 27 | # create an anonymous default logger and provide all important |
28 | 28 | # methods of that logger as global functions |
146 | 146 | try: |
147 | 147 | return dict.__getitem__(self, key) |
148 | 148 | except KeyError: |
149 | return u'' | |
149 | return u('') | |
150 | 150 | else: |
151 | 151 | def __missing__(self, key): |
152 | return u'' | |
152 | return u('') | |
153 | 153 | |
154 | 154 | def copy(self): |
155 | 155 | return self.__class__(self) |
473 | 473 | self._channel = None |
474 | 474 | if isinstance(self.time, string_types): |
475 | 475 | self.time = parse_iso8601(self.time) |
476 | self.extra = ExtraDict(self.extra) | |
476 | 477 | return self |
477 | 478 | |
478 | 479 | @cached_property |
841 | 842 | if not handler.should_handle(record): |
842 | 843 | continue |
843 | 844 | |
845 | # a filter can still veto the handling of the record. This | |
846 | # however is already operating on an initialized and processed | |
847 | # record. The impact is that filters are slower than the | |
848 | # handler's should_handle function in case there is no default | |
849 | # handler that would handle the record (delayed init). | |
850 | if handler.filter is not None \ | |
851 | and not handler.filter(record, handler): | |
852 | continue | |
853 | ||
844 | 854 | # if this is a blackhole handler, don't even try to |
845 | 855 | # do further processing, stop right away. Technically |
846 | 856 | # speaking this is not 100% correct because if the handler |
862 | 872 | self.process_record(record) |
863 | 873 | record_initialized = True |
864 | 874 | |
865 | # a filter can still veto the handling of the record. This | |
866 | # however is already operating on an initialized and processed | |
867 | # record. The impact is that filters are slower than the | |
868 | # handler's should_handle function in case there is no default | |
869 | # handler that would handle the record (delayed init). | |
870 | if handler.filter is not None \ | |
871 | and not handler.filter(record, handler): | |
872 | continue | |
873 | ||
874 | 875 | # handle the record. If the record was handled and |
875 | 876 | # the record is not bubbling we can abort now. |
876 | 877 | if handler.handle(record) and not handler.bubble: |
19 | 19 | _epoch_ord = date(1970, 1, 1).toordinal() |
20 | 20 | |
21 | 21 | |
22 | def redirect_logging(): | |
22 | def redirect_logging(set_root_logger_level=True): | |
23 | 23 | """Permanently redirects logging to the stdlib. This also |
24 | 24 | removes all otherwise registered handlers on root logger of |
25 | 25 | the logging system but leaves the other loggers untouched. |
26 | ||
27 | :param set_root_logger_level: controls of the default level of the legacy root logger is changed | |
28 | so that all legacy log messages get redirected to Logbook | |
26 | 29 | """ |
27 | 30 | del logging.root.handlers[:] |
28 | 31 | logging.root.addHandler(RedirectLoggingHandler()) |
32 | if set_root_logger_level: | |
33 | logging.root.setLevel(logging.DEBUG) | |
29 | 34 | |
30 | 35 | |
31 | 36 | class redirected_logging(object): |
37 | 42 | with redirected_logging(): |
38 | 43 | ... |
39 | 44 | """ |
40 | def __init__(self): | |
45 | def __init__(self, set_root_logger_level=True): | |
41 | 46 | self.old_handlers = logging.root.handlers[:] |
47 | self.old_level = logging.root.level | |
48 | self.set_root_logger_level = set_root_logger_level | |
42 | 49 | |
43 | 50 | def start(self): |
44 | redirect_logging() | |
51 | redirect_logging(self.set_root_logger_level) | |
45 | 52 | |
46 | 53 | def end(self, etype=None, evalue=None, tb=None): |
47 | 54 | logging.root.handlers[:] = self.old_handlers |
55 | logging.root.setLevel(self.old_level) | |
48 | 56 | |
49 | 57 | __enter__ = start |
50 | 58 | __exit__ = end |
27 | 27 | NOTSET, level_name_property, _missing, lookup_level, \ |
28 | 28 | Flags, ContextObject, ContextStackManager |
29 | 29 | from logbook.helpers import rename, b, _is_text_stream, is_unicode, PY2, \ |
30 | zip, xrange, string_types, integer_types, iteritems, reraise | |
30 | zip, xrange, string_types, integer_types, reraise, u | |
31 | 31 | |
32 | 32 | |
33 | 33 | DEFAULT_FORMAT_STRING = ( |
34 | u'[{record.time:%Y-%m-%d %H:%M}] ' | |
35 | u'{record.level_name}: {record.channel}: {record.message}' | |
34 | u('[{record.time:%Y-%m-%d %H:%M}] ') + | |
35 | u('{record.level_name}: {record.channel}: {record.message}') | |
36 | 36 | ) |
37 | SYSLOG_FORMAT_STRING = u'{record.channel}: {record.message}' | |
38 | NTLOG_FORMAT_STRING = u'''\ | |
37 | SYSLOG_FORMAT_STRING = u('{record.channel}: {record.message}') | |
38 | NTLOG_FORMAT_STRING = u('''\ | |
39 | 39 | Message Level: {record.level_name} |
40 | 40 | Location: {record.filename}:{record.lineno} |
41 | 41 | Module: {record.module} |
45 | 45 | Event provided Message: |
46 | 46 | |
47 | 47 | {record.message} |
48 | ''' | |
48 | ''') | |
49 | 49 | TEST_FORMAT_STRING = \ |
50 | u'[{record.level_name}] {record.channel}: {record.message}' | |
51 | MAIL_FORMAT_STRING = u'''\ | |
50 | u('[{record.level_name}] {record.channel}: {record.message}') | |
51 | MAIL_FORMAT_STRING = u('''\ | |
52 | 52 | Subject: {handler.subject} |
53 | 53 | |
54 | 54 | Message type: {record.level_name} |
60 | 60 | Message: |
61 | 61 | |
62 | 62 | {record.message} |
63 | ''' | |
64 | MAIL_RELATED_FORMAT_STRING = u'''\ | |
63 | ''') | |
64 | MAIL_RELATED_FORMAT_STRING = u('''\ | |
65 | 65 | Message type: {record.level_name} |
66 | 66 | Location: {record.filename}:{record.lineno} |
67 | 67 | Module: {record.module} |
68 | 68 | Function: {record.func_name} |
69 | 69 | {record.message} |
70 | ''' | |
70 | ''') | |
71 | 71 | |
72 | 72 | SYSLOG_PORT = 514 |
73 | 73 | |
124 | 124 | # all here goes to that handler |
125 | 125 | handler.pop_application() |
126 | 126 | |
127 | By default messages send to that handler will not go to a handler on | |
127 | By default messages sent to that handler will not go to a handler on | |
128 | 128 | an outer level on the stack, if handled. This can be changed by |
129 | 129 | setting bubbling to `True`. This setup for example would not have |
130 | 130 | any effect:: |
131 | 131 | |
132 | handler = NullHandler(bubble=False) | |
132 | handler = NullHandler(bubble=True) | |
133 | 133 | handler.push_application() |
134 | 134 | |
135 | 135 | Whereas this setup disables all logging for the application:: |
373 | 373 | line = self.format_record(record, handler) |
374 | 374 | exc = self.format_exception(record) |
375 | 375 | if exc: |
376 | line += u'\n' + exc | |
376 | line += u('\n') + exc | |
377 | 377 | return line |
378 | 378 | |
379 | 379 | |
419 | 419 | """Returns a hashlib object with the hash of the record.""" |
420 | 420 | hash = sha1() |
421 | 421 | hash.update(('%d\x00' % record.level).encode('ascii')) |
422 | hash.update((record.channel or u'').encode('utf-8') + b('\x00')) | |
422 | hash.update((record.channel or u('')).encode('utf-8') + b('\x00')) | |
423 | 423 | hash.update(record.filename.encode('utf-8') + b('\x00')) |
424 | 424 | hash.update(b(str(record.lineno))) |
425 | 425 | return hash |
1031 | 1031 | """ |
1032 | 1032 | default_format_string = MAIL_FORMAT_STRING |
1033 | 1033 | default_related_format_string = MAIL_RELATED_FORMAT_STRING |
1034 | default_subject = u'Server Error in Application' | |
1034 | default_subject = u('Server Error in Application') | |
1035 | 1035 | |
1036 | 1036 | #: the maximum number of record hashes in the cache for the limiting |
1037 | 1037 | #: feature. Afterwards, record_cache_prune percent of the oldest |
1155 | 1155 | """ |
1156 | 1156 | from smtplib import SMTP, SMTP_PORT, SMTP_SSL_PORT |
1157 | 1157 | if self.server_addr is None: |
1158 | host = 'localhost' | |
1158 | host = '127.0.0.1' | |
1159 | 1159 | port = self.secure and SMTP_SSL_PORT or SMTP_PORT |
1160 | 1160 | else: |
1161 | 1161 | host, port = self.server_addr |
1352 | 1352 | return (facility << 3) | priority |
1353 | 1353 | |
1354 | 1354 | def emit(self, record): |
1355 | prefix = u'' | |
1355 | prefix = u('') | |
1356 | 1356 | if self.application_name is not None: |
1357 | prefix = self.application_name + u':' | |
1358 | self.send_to_socket((u'<%d>%s%s\x00' % ( | |
1357 | prefix = self.application_name + u(':') | |
1358 | self.send_to_socket((u('<%d>%s%s\x00') % ( | |
1359 | 1359 | self.encode_priority(record), |
1360 | 1360 | prefix, |
1361 | 1361 | self.format(record) |
9 | 9 | """ |
10 | 10 | import re |
11 | 11 | import os |
12 | from collections import defaultdict | |
12 | 13 | from cgi import parse_qsl |
13 | 14 | |
14 | from logbook.base import RecordDispatcher, NOTSET, ERROR, NOTICE | |
15 | from logbook.base import RecordDispatcher, dispatch_record, NOTSET, ERROR, NOTICE | |
15 | 16 | from logbook.handlers import Handler, StringFormatter, \ |
16 | 17 | StringFormatterHandlerMixin, StderrHandler |
17 | 18 | from logbook._termcolors import colorize |
18 | from logbook.helpers import PY2, string_types, iteritems | |
19 | from logbook.helpers import PY2, string_types, iteritems, u | |
19 | 20 | |
20 | 21 | from logbook.ticketing import TicketingHandler as DatabaseHandler |
21 | 22 | from logbook.ticketing import BackendBase |
27 | 28 | |
28 | 29 | _ws_re = re.compile(r'(\s+)(?u)') |
29 | 30 | TWITTER_FORMAT_STRING = \ |
30 | u'[{record.channel}] {record.level_name}: {record.message}' | |
31 | u('[{record.channel}] {record.level_name}: {record.message}') | |
31 | 32 | TWITTER_ACCESS_TOKEN_URL = 'https://twitter.com/oauth/access_token' |
32 | 33 | NEW_TWEET_URL = 'https://api.twitter.com/1/statuses/update.json' |
33 | 34 | |
38 | 39 | def setup_backend(self): |
39 | 40 | from couchdb import Server |
40 | 41 | |
41 | uri = self.options.pop('uri', u'') | |
42 | uri = self.options.pop('uri', u('')) | |
42 | 43 | couch = Server(uri) |
43 | 44 | db_name = self.options.pop('db') |
44 | 45 | self.database = couch[db_name] |
62 | 63 | max_length = 140 |
63 | 64 | |
64 | 65 | def format_exception(self, record): |
65 | return u'%s: %s' % (record.exception_shortname, | |
66 | record.exception_message) | |
66 | return u('%s: %s') % (record.exception_shortname, | |
67 | record.exception_message) | |
67 | 68 | |
68 | 69 | def __call__(self, record, handler): |
69 | 70 | formatted = StringFormatter.__call__(self, record, handler) |
73 | 74 | length += len(piece) |
74 | 75 | if length > self.max_length: |
75 | 76 | if length - len(piece) < self.max_length: |
76 | rv.append(u'…') | |
77 | rv.append(u('…')) | |
77 | 78 | break |
78 | 79 | rv.append(piece) |
79 | return u''.join(rv) | |
80 | return u('').join(rv) | |
80 | 81 | |
81 | 82 | |
82 | 83 | class TaggingLogger(RecordDispatcher): |
356 | 357 | if self.should_handle(record): |
357 | 358 | raise self.exc_type(self.format(record)) |
358 | 359 | return False |
360 | ||
361 | class DedupHandler(Handler): | |
362 | """A handler that deduplicates log messages. | |
363 | ||
364 | It emits each unique log record once, along with the number of times it was emitted. | |
365 | Example::: | |
366 | ||
367 | with logbook.more.DedupHandler(): | |
368 | logbook.error('foo') | |
369 | logbook.error('bar') | |
370 | logbook.error('foo') | |
371 | ||
372 | The expected output::: | |
373 | ||
374 | message repeated 2 times: foo | |
375 | message repeated 1 times: bar | |
376 | """ | |
377 | def __init__(self, format_string='message repeated {count} times: {message}', *args, **kwargs): | |
378 | Handler.__init__(self, bubble=False, *args, **kwargs) | |
379 | self._format_string = format_string | |
380 | self.clear() | |
381 | ||
382 | def clear(self): | |
383 | self._message_to_count = defaultdict(int) | |
384 | self._unique_ordered_records = [] | |
385 | ||
386 | def pop_application(self): | |
387 | Handler.pop_application(self) | |
388 | self.flush() | |
389 | ||
390 | def pop_thread(self): | |
391 | Handler.pop_thread(self) | |
392 | self.flush() | |
393 | ||
394 | def handle(self, record): | |
395 | if not record.message in self._message_to_count: | |
396 | self._unique_ordered_records.append(record) | |
397 | self._message_to_count[record.message] += 1 | |
398 | return True | |
399 | ||
400 | def flush(self): | |
401 | for record in self._unique_ordered_records: | |
402 | record.message = self._format_string.format(message=record.message, count=self._message_to_count[record.message]) | |
403 | # record.dispatcher is the logger who created the message, it's sometimes supressed (by logbook.info for example) | |
404 | dispatch = record.dispatcher.call_handlers if record.dispatcher is not None else dispatch_record | |
405 | dispatch(record) | |
406 | self.clear() | |
407 |
44 | 44 | |
45 | 45 | def make_title(self, record): |
46 | 46 | """Called to get the title from the record.""" |
47 | return u'%s: %s' % (record.channel, record.level_name.title()) | |
47 | return u('%s: %s') % (record.channel, record.level_name.title()) | |
48 | 48 | |
49 | 49 | def make_text(self, record): |
50 | 50 | """Called to get the text of the record.""" |
218 | 218 | con = http_client.HTTPSConnection('boxcar.io') |
219 | 219 | con.request('POST', '/notifications/', headers={ |
220 | 220 | 'Authorization': 'Basic ' + |
221 | base64.b64encode((u'%s:%s' % | |
221 | base64.b64encode((u('%s:%s') % | |
222 | 222 | (self.email, self.password)).encode('utf-8')).strip(), |
223 | 223 | }, body=body) |
224 | 224 | con.close() |
13 | 13 | import platform |
14 | 14 | from logbook.base import NOTSET, LogRecord, dispatch_record |
15 | 15 | from logbook.handlers import Handler, WrapperHandler |
16 | from logbook.helpers import PY2 | |
16 | from logbook.helpers import PY2, u | |
17 | 17 | |
18 | 18 | if PY2: |
19 | 19 | from Queue import Empty, Queue as ThreadQueue |
30 | 30 | |
31 | 31 | Example setup:: |
32 | 32 | |
33 | handler = RedisHandler('http://localhost', port='9200', key='redis') | |
33 | handler = RedisHandler('http://127.0.0.1', port='9200', key='redis') | |
34 | 34 | |
35 | 35 | If your Redis instance is password protected, you can securely connect passing |
36 | 36 | your password when creating a RedisHandler object. |
41 | 41 | |
42 | 42 | More info about the default buffer size: wp.me/p3tYJu-3b |
43 | 43 | """ |
44 | def __init__(self, host='localhost', port=6379, key='redis', extra_fields={}, | |
44 | def __init__(self, host='127.0.0.1', port=6379, key='redis', extra_fields={}, | |
45 | 45 | flush_threshold=128, flush_time=1, level=NOTSET, filter=None, |
46 | 46 | password=False, bubble=True, context=None): |
47 | 47 | Handler.__init__(self, level, filter, bubble) |
119 | 119 | self._flush_buffer() |
120 | 120 | |
121 | 121 | |
122 | class RabbitMQHandler(Handler): | |
123 | """A handler that acts as a RabbitMQ publisher, which publishes each record | |
124 | as json dump. Requires the kombu module. | |
122 | class MessageQueueHandler(Handler): | |
123 | """A handler that acts as a message queue publisher, which publishes each | |
124 | record as json dump. Requires the kombu module. | |
125 | 125 | |
126 | 126 | The queue will be filled with JSON exported log records. To receive such |
127 | log records from a queue you can use the :class:`RabbitMQSubscriber`. | |
128 | ||
127 | log records from a queue you can use the :class:`MessageQueueSubscriber`. | |
129 | 128 | |
130 | 129 | Example setup:: |
131 | 130 | |
132 | handler = RabbitMQHandler('amqp://guest:guest@localhost//', queue='my_log') | |
133 | """ | |
131 | handler = MessageQueueHandler('mongodb://localhost:27017/logging') | |
132 | """ | |
133 | ||
134 | 134 | def __init__(self, uri=None, queue='logging', level=NOTSET, |
135 | filter=None, bubble=False, context=None): | |
135 | filter=None, bubble=False, context=None): | |
136 | 136 | Handler.__init__(self, level, filter, bubble) |
137 | 137 | try: |
138 | 138 | import kombu |
156 | 156 | self.queue.close() |
157 | 157 | |
158 | 158 | |
159 | RabbitMQHandler = MessageQueueHandler | |
160 | ||
161 | ||
159 | 162 | class ZeroMQHandler(Handler): |
160 | 163 | """A handler that acts as a ZeroMQ publisher, which publishes each record |
161 | 164 | as json dump. Requires the pyzmq library. |
163 | 166 | The queue will be filled with JSON exported log records. To receive such |
164 | 167 | log records from a queue you can use the :class:`ZeroMQSubscriber`. |
165 | 168 | |
169 | If `multi` is set to `True`, the handler will use a `PUSH` socket to | |
170 | publish the records. This allows multiple handlers to use the same `uri`. | |
171 | The records can be received by using the :class:`ZeroMQSubscriber` with | |
172 | `multi` set to `True`. | |
173 | ||
166 | 174 | |
167 | 175 | Example setup:: |
168 | 176 | |
170 | 178 | """ |
171 | 179 | |
172 | 180 | def __init__(self, uri=None, level=NOTSET, filter=None, bubble=False, |
173 | context=None): | |
181 | context=None, multi=False): | |
174 | 182 | Handler.__init__(self, level, filter, bubble) |
175 | 183 | try: |
176 | 184 | import zmq |
179 | 187 | 'the ZeroMQHandler.') |
180 | 188 | #: the zero mq context |
181 | 189 | self.context = context or zmq.Context() |
182 | #: the zero mq socket. | |
183 | self.socket = self.context.socket(zmq.PUB) | |
184 | if uri is not None: | |
185 | self.socket.bind(uri) | |
190 | ||
191 | if multi: | |
192 | #: the zero mq socket. | |
193 | self.socket = self.context.socket(zmq.PUSH) | |
194 | if uri is not None: | |
195 | self.socket.connect(uri) | |
196 | else: | |
197 | #: the zero mq socket. | |
198 | self.socket = self.context.socket(zmq.PUB) | |
199 | if uri is not None: | |
200 | self.socket.bind(uri) | |
201 | ||
186 | 202 | |
187 | 203 | def export_record(self, record): |
188 | 204 | """Exports the record into a dictionary ready for JSON dumping.""" |
274 | 290 | return controller |
275 | 291 | |
276 | 292 | |
277 | class RabbitMQSubscriber(SubscriberBase): | |
278 | """A helper that acts as RabbitMQ subscriber and will dispatch received | |
279 | log records to the active handler setup. There are multiple ways to | |
280 | use this class. | |
293 | class MessageQueueSubscriber(SubscriberBase): | |
294 | """A helper that acts as a message queue subscriber and will dispatch | |
295 | received log records to the active handler setup. There are multiple ways | |
296 | to use this class. | |
281 | 297 | |
282 | 298 | It can be used to receive log records from a queue:: |
283 | 299 | |
284 | subscriber = RabbitMQSubscriber('amqp://guest:guest@localhost//') | |
300 | subscriber = MessageQueueSubscriber('mongodb://localhost:27017/logging') | |
285 | 301 | record = subscriber.recv() |
286 | 302 | |
287 | 303 | But it can also be used to receive and dispatch these in one go:: |
288 | 304 | |
289 | 305 | with target_handler: |
290 | subscriber = RabbitMQSubscriber('amqp://guest:guest@localhost//') | |
306 | subscriber = MessageQueueSubscriber('mongodb://localhost:27017/logging') | |
291 | 307 | subscriber.dispatch_forever() |
292 | 308 | |
293 | 309 | This will take all the log records from that queue and dispatch them |
294 | 310 | over to `target_handler`. If you want you can also do that in the |
295 | 311 | background:: |
296 | 312 | |
297 | subscriber = RabbitMQSubscriber('amqp://guest:guest@localhost//') | |
313 | subscriber = MessageQueueSubscriber('mongodb://localhost:27017/logging') | |
298 | 314 | controller = subscriber.dispatch_in_background(target_handler) |
299 | 315 | |
300 | 316 | The controller returned can be used to shut down the background |
302 | 318 | |
303 | 319 | controller.stop() |
304 | 320 | """ |
305 | ||
306 | 321 | def __init__(self, uri=None, queue='logging'): |
307 | 322 | try: |
308 | 323 | import kombu |
309 | 324 | except ImportError: |
310 | raise RuntimeError('The kombu library is required for ' | |
311 | 'the RabbitMQSubscriber.') | |
325 | raise RuntimeError('The kombu library is required.') | |
312 | 326 | if uri: |
313 | 327 | connection = kombu.Connection(uri) |
314 | 328 | |
343 | 357 | return LogRecord.from_dict(log_record) |
344 | 358 | |
345 | 359 | |
360 | RabbitMQSubscriber = MessageQueueSubscriber | |
361 | ||
362 | ||
346 | 363 | class ZeroMQSubscriber(SubscriberBase): |
347 | 364 | """A helper that acts as ZeroMQ subscriber and will dispatch received |
348 | 365 | log records to the active handler setup. There are multiple ways to |
370 | 387 | thread:: |
371 | 388 | |
372 | 389 | controller.stop() |
373 | """ | |
374 | ||
375 | def __init__(self, uri=None, context=None): | |
390 | ||
391 | If `multi` is set to `True`, the subscriber will use a `PULL` socket | |
392 | and listen to records published by a `PUSH` socket (usually via a | |
393 | :class:`ZeroMQHandler` with `multi` set to `True`). This allows a | |
394 | single subscriber to dispatch multiple handlers. | |
395 | """ | |
396 | ||
397 | def __init__(self, uri=None, context=None, multi=False): | |
376 | 398 | try: |
377 | 399 | import zmq |
378 | 400 | except ImportError: |
382 | 404 | |
383 | 405 | #: the zero mq context |
384 | 406 | self.context = context or zmq.Context() |
385 | #: the zero mq socket. | |
386 | self.socket = self.context.socket(zmq.SUB) | |
387 | if uri is not None: | |
388 | self.socket.connect(uri) | |
389 | self.socket.setsockopt_unicode(zmq.SUBSCRIBE, u'') | |
407 | ||
408 | if multi: | |
409 | #: the zero mq socket. | |
410 | self.socket = self.context.socket(zmq.PULL) | |
411 | if uri is not None: | |
412 | self.socket.bind(uri) | |
413 | else: | |
414 | #: the zero mq socket. | |
415 | self.socket = self.context.socket(zmq.SUB) | |
416 | if uri is not None: | |
417 | self.socket.connect(uri) | |
418 | self.socket.setsockopt_unicode(zmq.SUBSCRIBE, u('')) | |
390 | 419 | |
391 | 420 | def __del__(self): |
392 | 421 | try: |
528 | 557 | def __init__(self, channel): |
529 | 558 | self.channel = channel |
530 | 559 | |
531 | def recv(self, timeout=-1): | |
560 | def recv(self, timeout=None): | |
532 | 561 | try: |
533 | 562 | rv = self.channel.receive(timeout=timeout) |
534 | 563 | except self.channel.RemoteError: |
638 | 667 | |
639 | 668 | subscribers = SubscriberGroup([ |
640 | 669 | MultiProcessingSubscriber(queue), |
641 | ZeroMQSubscriber('tcp://localhost:5000') | |
670 | ZeroMQSubscriber('tcp://127.0.0.1:5000') | |
642 | 671 | ]) |
643 | 672 | with target_handler: |
644 | 673 | subscribers.dispatch_forever() |
12 | 12 | import json |
13 | 13 | from logbook.base import NOTSET, level_name_property, LogRecord |
14 | 14 | from logbook.handlers import Handler, HashingHandlerMixin |
15 | from logbook.helpers import cached_property, b, PY2 | |
15 | from logbook.helpers import cached_property, b, PY2, u | |
16 | 16 | |
17 | 17 | class Ticket(object): |
18 | 18 | """Represents a ticket from the database.""" |
191 | 191 | row = cnx.execute(self.tickets.insert().values( |
192 | 192 | record_hash=hash, |
193 | 193 | level=record.level, |
194 | channel=record.channel or u'', | |
195 | location=u'%s:%d' % (record.filename, record.lineno), | |
196 | module=record.module or u'<unknown>', | |
194 | channel=record.channel or u(''), | |
195 | location=u('%s:%d') % (record.filename, record.lineno), | |
196 | module=record.module or u('<unknown>'), | |
197 | 197 | occurrence_count=0, |
198 | 198 | solved=False, |
199 | 199 | app_id=app_id |
286 | 286 | from pymongo.errors import AutoReconnect |
287 | 287 | |
288 | 288 | _connection = None |
289 | uri = self.options.pop('uri', u'') | |
289 | uri = self.options.pop('uri', u('')) | |
290 | 290 | _connection_attempts = 0 |
291 | 291 | |
292 | 292 | parsed_uri = parse_uri(uri, Connection.PORT) |
335 | 335 | doc = { |
336 | 336 | 'record_hash': hash, |
337 | 337 | 'level': record.level, |
338 | 'channel': record.channel or u'', | |
339 | 'location': u'%s:%d' % (record.filename, record.lineno), | |
340 | 'module': record.module or u'<unknown>', | |
338 | 'channel': record.channel or u(''), | |
339 | 'location': u('%s:%d') % (record.filename, record.lineno), | |
340 | 'module': record.module or u('<unknown>'), | |
341 | 341 | 'occurrence_count': 0, |
342 | 342 | 'solved': False, |
343 | 343 | 'app_id': app_id, |
447 | 447 | filter=None, bubble=False, hash_salt=None, backend=None, |
448 | 448 | **db_options): |
449 | 449 | if hash_salt is None: |
450 | hash_salt = u'apphash-' + app_id | |
450 | hash_salt = u('apphash-') + app_id | |
451 | 451 | TicketingBaseHandler.__init__(self, hash_salt, level, filter, bubble) |
452 | 452 | if backend is None: |
453 | 453 | backend = self.default_backend |
0 | #! /usr/bin/python | |
1 | import os | |
2 | import sys | |
3 | ||
4 | ||
5 | if __name__ == '__main__': | |
6 | mirror = sys.argv[1] | |
7 | f = open(os.path.expanduser("~/.pydistutils.cfg"), "w") | |
8 | f.write(""" | |
9 | [easy_install] | |
10 | index_url = %s | |
11 | """ % mirror) | |
12 | f.close() | |
13 | pip_dir = os.path.expanduser("~/.pip") | |
14 | if not os.path.isdir(pip_dir): | |
15 | os.makedirs(pip_dir) | |
16 | f = open(os.path.join(pip_dir, "pip.conf"), "w") | |
17 | f.write(""" | |
18 | [global] | |
19 | index-url = %s | |
20 | ||
21 | [install] | |
22 | use-mirrors = true | |
23 | """ % mirror) | |
24 | f.close() |
0 | 0 | #! /usr/bin/python |
1 | import platform | |
2 | 1 | import subprocess |
2 | import os | |
3 | 3 | import sys |
4 | 4 | |
5 | 5 | def _execute(*args, **kwargs): |
8 | 8 | sys.exit(result) |
9 | 9 | |
10 | 10 | if __name__ == '__main__': |
11 | python_version = platform.python_version() | |
11 | python_version = sys.version_info | |
12 | 12 | |
13 | 13 | deps = [ |
14 | "execnet", | |
15 | "Jinja2", | |
14 | "execnet>=1.0.9", | |
16 | 15 | "nose", |
17 | 16 | "pyzmq", |
18 | 17 | "sqlalchemy", |
19 | 18 | ] |
20 | 19 | |
21 | if python_version < "2.7": | |
20 | if python_version < (2, 7): | |
22 | 21 | deps.append("unittest2") |
22 | if (3, 2) <= python_version < (3, 3): | |
23 | deps.append("markupsafe==0.15") | |
24 | deps.append("Jinja2==2.6") | |
25 | else: | |
26 | deps.append("Jinja2") | |
23 | 27 | print("Setting up dependencies...") |
24 | _execute("pip install %s" % " ".join(deps), shell=True) | |
28 | _execute([os.path.join(os.path.dirname(sys.executable), "pip"), "install"] + deps, shell=False) |
105 | 105 | features['speedups'] = speedups |
106 | 106 | setup( |
107 | 107 | name='Logbook', |
108 | version='0.6.1-dev', | |
108 | version='0.7.0', | |
109 | 109 | license='BSD', |
110 | 110 | url='http://logbook.pocoo.org/', |
111 | 111 | author='Armin Ronacher, Georg Brandl', |
27 | 27 | from thread import get_ident |
28 | 28 | except ImportError: |
29 | 29 | from _thread import get_ident |
30 | import base64 | |
30 | 31 | |
31 | 32 | __file_without_pyc__ = __file__ |
32 | 33 | if __file_without_pyc__.endswith(".pyc"): |
252 | 253 | def test_file_handler_unicode(self): |
253 | 254 | with capturing_stderr_context() as captured: |
254 | 255 | with self.thread_activation_strategy(logbook.FileHandler(self.filename)) as h: |
255 | self.log.info(u'\u0431') | |
256 | self.log.info(u('\u0431')) | |
256 | 257 | self.assertFalse(captured.getvalue()) |
257 | 258 | |
258 | 259 | def test_file_handler_delay(self): |
351 | 352 | self.assertEqual(f.readline().rstrip(), '[02:00] Third One') |
352 | 353 | |
353 | 354 | def test_mail_handler(self): |
354 | subject = u'\xf8nicode' | |
355 | subject = u('\xf8nicode') | |
355 | 356 | handler = make_fake_mail_handler(subject=subject) |
356 | 357 | with capturing_stderr_context() as fallback: |
357 | 358 | with self.thread_activation_strategy(handler): |
359 | 360 | try: |
360 | 361 | 1 / 0 |
361 | 362 | except Exception: |
362 | self.log.exception(u'Viva la Espa\xf1a') | |
363 | self.log.exception(u('Viva la Espa\xf1a')) | |
363 | 364 | |
364 | 365 | if not handler.mails: |
365 | 366 | # if sending the mail failed, the reason should be on stderr |
370 | 371 | mail = mail.replace("\r", "") |
371 | 372 | self.assertEqual(sender, handler.from_addr) |
372 | 373 | self.assert_('=?utf-8?q?=C3=B8nicode?=' in mail) |
373 | self.assertRegexpMatches(mail, 'Message type:\s+ERROR') | |
374 | self.assertRegexpMatches(mail, 'Location:.*%s' % __file_without_pyc__) | |
375 | self.assertRegexpMatches(mail, 'Module:\s+%s' % __name__) | |
376 | self.assertRegexpMatches(mail, 'Function:\s+test_mail_handler') | |
377 | body = u'Message:\n\nViva la Espa\xf1a' | |
374 | header, data = mail.split("\n\n", 1) | |
375 | if "Content-Transfer-Encoding: base64" in header: | |
376 | data = base64.b64decode(data).decode("utf-8") | |
377 | self.assertRegexpMatches(data, 'Message type:\s+ERROR') | |
378 | self.assertRegexpMatches(data, 'Location:.*%s' % __file_without_pyc__) | |
379 | self.assertRegexpMatches(data, 'Module:\s+%s' % __name__) | |
380 | self.assertRegexpMatches(data, 'Function:\s+test_mail_handler') | |
381 | body = u('Viva la Espa\xf1a') | |
378 | 382 | if sys.version_info < (3, 0): |
379 | 383 | body = body.encode('utf-8') |
380 | self.assertIn(body, mail) | |
381 | self.assertIn('\n\nTraceback (most', mail) | |
382 | self.assertIn('1 / 0', mail) | |
384 | self.assertIn(body, data) | |
385 | self.assertIn('\nTraceback (most', data) | |
386 | self.assertIn('1 / 0', data) | |
383 | 387 | self.assertIn('This is not mailed', fallback.getvalue()) |
384 | 388 | |
385 | 389 | def test_mail_handler_record_limits(self): |
477 | 481 | except socket.error: |
478 | 482 | self.fail('got timeout on socket') |
479 | 483 | self.assertEqual(rv, ( |
480 | u'<12>%stestlogger: Syslog is weird\x00' % | |
481 | (app_name and app_name + u':' or u'')).encode('utf-8')) | |
484 | u('<12>%stestlogger: Syslog is weird\x00') % | |
485 | (app_name and app_name + u(':') or u(''))).encode('utf-8')) | |
482 | 486 | |
483 | 487 | def test_handler_processors(self): |
484 | 488 | handler = make_fake_mail_handler(format_string='''\ |
678 | 682 | self.assertFalse(handler.has_warning('bar', channel='Logger2')) |
679 | 683 | self.assertFalse(outer_handler.has_warning('foo', channel='Logger1')) |
680 | 684 | self.assert_(outer_handler.has_warning('bar', channel='Logger2')) |
685 | ||
686 | def test_null_handler_filtering(self): | |
687 | logger1 = logbook.Logger("1") | |
688 | logger2 = logbook.Logger("2") | |
689 | outer = logbook.TestHandler() | |
690 | inner = logbook.NullHandler() | |
691 | ||
692 | inner.filter = lambda record, handler: record.dispatcher is logger1 | |
693 | ||
694 | with self.thread_activation_strategy(outer): | |
695 | with self.thread_activation_strategy(inner): | |
696 | logger1.warn("1") | |
697 | logger2.warn("2") | |
698 | ||
699 | self.assertTrue(outer.has_warning("2", channel="2")) | |
700 | self.assertFalse(outer.has_warning("1", channel="1")) | |
681 | 701 | |
682 | 702 | def test_different_context_pushing(self): |
683 | 703 | h1 = logbook.TestHandler(level=logbook.DEBUG) |
889 | 909 | |
890 | 910 | class LoggingCompatTestCase(LogbookTestCase): |
891 | 911 | |
892 | def test_basic_compat(self): | |
893 | from logging import getLogger | |
912 | def test_basic_compat_with_level_setting(self): | |
913 | self._test_basic_compat(True) | |
914 | def test_basic_compat_without_level_setting(self): | |
915 | self._test_basic_compat(False) | |
916 | ||
917 | def _test_basic_compat(self, set_root_logger_level): | |
918 | import logging | |
894 | 919 | from logbook.compat import redirected_logging |
895 | 920 | |
921 | # mimic the default logging setting | |
922 | self.addCleanup(logging.root.setLevel, logging.root.level) | |
923 | logging.root.setLevel(logging.WARNING) | |
924 | ||
896 | 925 | name = 'test_logbook-%d' % randrange(1 << 32) |
897 | logger = getLogger(name) | |
898 | with capturing_stderr_context() as captured: | |
899 | redirector = redirected_logging() | |
900 | redirector.start() | |
901 | try: | |
902 | logger.debug('This is from the old system') | |
903 | logger.info('This is from the old system') | |
904 | logger.warn('This is from the old system') | |
905 | logger.error('This is from the old system') | |
906 | logger.critical('This is from the old system') | |
907 | finally: | |
908 | redirector.end() | |
926 | logger = logging.getLogger(name) | |
927 | ||
928 | with logbook.TestHandler(bubble=True) as handler: | |
929 | with capturing_stderr_context() as captured: | |
930 | with redirected_logging(set_root_logger_level): | |
931 | logger.debug('This is from the old system') | |
932 | logger.info('This is from the old system') | |
933 | logger.warn('This is from the old system') | |
934 | logger.error('This is from the old system') | |
935 | logger.critical('This is from the old system') | |
909 | 936 | self.assertIn(('WARNING: %s: This is from the old system' % name), |
910 | 937 | captured.getvalue()) |
938 | if set_root_logger_level: | |
939 | self.assertEquals(handler.records[0].level, logbook.DEBUG) | |
940 | else: | |
941 | self.assertEquals(handler.records[0].level, logbook.WARNING) | |
911 | 942 | |
912 | 943 | def test_redirect_logbook(self): |
913 | 944 | import logging |
1080 | 1111 | self.assertIn('WARNING: testlogger: here i am', caught.exception.args[0]) |
1081 | 1112 | self.assertIn('this is irrelevant', test_handler.records[0].message) |
1082 | 1113 | |
1114 | def test_dedup_handler(self): | |
1115 | from logbook.more import DedupHandler | |
1116 | with logbook.TestHandler() as test_handler: | |
1117 | with DedupHandler(): | |
1118 | self.log.info('foo') | |
1119 | self.log.info('bar') | |
1120 | self.log.info('foo') | |
1121 | self.assertEqual(2, len(test_handler.records)) | |
1122 | self.assertIn('message repeated 2 times: foo', test_handler.records[0].message) | |
1123 | self.assertIn('message repeated 1 times: bar', test_handler.records[1].message) | |
1124 | ||
1083 | 1125 | class QueuesTestCase(LogbookTestCase): |
1084 | def _get_zeromq(self): | |
1126 | def _get_zeromq(self, multi=False): | |
1085 | 1127 | from logbook.queues import ZeroMQHandler, ZeroMQSubscriber |
1086 | 1128 | |
1087 | 1129 | # Get an unused port |
1088 | 1130 | tempsock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) |
1089 | tempsock.bind(('localhost', 0)) | |
1131 | tempsock.bind(('127.0.0.1', 0)) | |
1090 | 1132 | host, unused_port = tempsock.getsockname() |
1091 | 1133 | tempsock.close() |
1092 | 1134 | |
1093 | 1135 | # Retrieve the ZeroMQ handler and subscriber |
1094 | 1136 | uri = 'tcp://%s:%d' % (host, unused_port) |
1095 | handler = ZeroMQHandler(uri) | |
1096 | subscriber = ZeroMQSubscriber(uri) | |
1137 | if multi: | |
1138 | handler = [ZeroMQHandler(uri, multi=True) for _ in range(3)] | |
1139 | else: | |
1140 | handler = ZeroMQHandler(uri) | |
1141 | subscriber = ZeroMQSubscriber(uri, multi=multi) | |
1097 | 1142 | # Enough time to start |
1098 | 1143 | time.sleep(0.1) |
1099 | 1144 | return handler, subscriber |
1101 | 1146 | @require_module('zmq') |
1102 | 1147 | def test_zeromq_handler(self): |
1103 | 1148 | tests = [ |
1104 | u'Logging something', | |
1105 | u'Something with umlauts äöü', | |
1106 | u'Something else for good measure', | |
1149 | u('Logging something'), | |
1150 | u('Something with umlauts äöü'), | |
1151 | u('Something else for good measure'), | |
1107 | 1152 | ] |
1108 | 1153 | handler, subscriber = self._get_zeromq() |
1109 | 1154 | for test in tests: |
1114 | 1159 | self.assertEqual(record.channel, self.log.name) |
1115 | 1160 | |
1116 | 1161 | @require_module('zmq') |
1162 | def test_multi_zeromq_handler(self): | |
1163 | tests = [ | |
1164 | u('Logging something'), | |
1165 | u('Something with umlauts äöü'), | |
1166 | u('Something else for good measure'), | |
1167 | ] | |
1168 | handlers, subscriber = self._get_zeromq(multi=True) | |
1169 | for handler in handlers: | |
1170 | for test in tests: | |
1171 | with handler: | |
1172 | self.log.warn(test) | |
1173 | record = subscriber.recv() | |
1174 | self.assertEqual(record.message, test) | |
1175 | self.assertEqual(record.channel, self.log.name) | |
1176 | ||
1177 | @require_module('zmq') | |
1117 | 1178 | def test_zeromq_background_thread(self): |
1118 | 1179 | handler, subscriber = self._get_zeromq() |
1119 | 1180 | test_handler = logbook.TestHandler() |
1126 | 1187 | # stop the controller. This will also stop the loop and join the |
1127 | 1188 | # background process. Before that we give it a fraction of a second |
1128 | 1189 | # to get all results |
1129 | time.sleep(0.1) | |
1190 | time.sleep(0.2) | |
1130 | 1191 | controller.stop() |
1131 | 1192 | |
1132 | 1193 | self.assertTrue(test_handler.has_warning('This is a warning')) |
1346 | 1407 | rv = to_safe_json([ |
1347 | 1408 | None, |
1348 | 1409 | 'foo', |
1349 | u'jäger', | |
1410 | u('jäger'), | |
1350 | 1411 | 1, |
1351 | 1412 | datetime(2000, 1, 1), |
1352 | {'jäger1': 1, u'jäger2': 2, Bogus(): 3, 'invalid': object()}, | |
1413 | {'jäger1': 1, u('jäger2'): 2, Bogus(): 3, 'invalid': object()}, | |
1353 | 1414 | object() # invalid |
1354 | 1415 | ]) |
1355 | 1416 | self.assertEqual( |
1356 | rv, [None, u'foo', u'jäger', 1, '2000-01-01T00:00:00Z', | |
1357 | {u('jäger1'): 1, u'jäger2': 2, u'bogus': 3, | |
1358 | u'invalid': None}, None]) | |
1417 | rv, [None, u('foo'), u('jäger'), 1, '2000-01-01T00:00:00Z', | |
1418 | {u('jäger1'): 1, u('jäger2'): 2, u('bogus'): 3, | |
1419 | u('invalid'): None}, None]) | |
1359 | 1420 | |
1360 | 1421 | def test_datehelpers(self): |
1361 | 1422 | from logbook.helpers import format_iso8601, parse_iso8601 |