Package list logbook / c2895a0
Imported Upstream version 0.7.0 Agustin Henze 7 years ago
17 changed file(s) with 316 addition(s) and 168 deletion(s). Raw diff Collapse all Expand all
00 language: python
1
2 services:
3 - redis-server
4
15 python:
26 - "2.6"
37 - "2.7"
8 - "3.2"
49 - "3.3"
10 - "3.4"
511 - "pypy"
612
713 install:
814 # this fixes SemLock issues on travis
915 - "sudo rm -rf /dev/shm && sudo ln -s /run/shm /dev/shm"
10 - "sudo apt-get install libzmq3-dev redis-server"
11 - "python scripts/pypi_mirror_setup.py http://a.pypi.python.org/simple"
16 - "sudo apt-add-repository -y ppa:chris-lea/zeromq"
17 - "sudo apt-get update"
18 - "sudo apt-get install -y libzmq3-dev"
1219 - "pip install cython redis"
20 - "easy_install pyzmq"
1321 - "make test_setup"
1422 - "python setup.py develop"
1523
11 =================
22
33 Here you can see the full list of changes between each Logbook release.
4
5 Version 0.7.0
6 -------------
7
8 Released on May 12th 2014. Codename "not_just_yet"
9
10 - Restored Python 3.2 support (thanks @rnortman)
11 - NullHandlers now respect filters - allows to only drop/mute certain records (#73)
12 - redirect_logging now sets the legacy root logger's level to DEBUG by default. This can be changed by specifying `set_root_logger_level=False` (#96)
13 - Bugfixes
414
515 Version 0.6.0
616 -------------
2929 .. autoclass:: ExceptionHandler
3030 :members:
3131
32 .. autoclass:: DedupHandler
33 :members:
34
3235 Colorized Handlers
3336 ------------------
3437
4747 # built documents.
4848 #
4949 # The short X.Y version.
50 version = '0.6.1-dev'
50 version = '0.7.0'
5151 # The full version, including alpha/beta/rc tags.
52 release = '0.6.1-dev'
52 release = '0.7.0'
5353
5454 # The language for content autogenerated by Sphinx. Refer to documentation
5555 # for a list of supported languages.
2222 LimitingHandlerMixin, WrapperHandler, FingersCrossedHandler, \
2323 GroupHandler
2424
25 __version__ = '0.6.1-dev'
25 __version__ = '0.7.0'
2626
2727 # create an anonymous default logger and provide all important
2828 # methods of that logger as global functions
146146 try:
147147 return dict.__getitem__(self, key)
148148 except KeyError:
149 return u''
149 return u('')
150150 else:
151151 def __missing__(self, key):
152 return u''
152 return u('')
153153
154154 def copy(self):
155155 return self.__class__(self)
473473 self._channel = None
474474 if isinstance(self.time, string_types):
475475 self.time = parse_iso8601(self.time)
476 self.extra = ExtraDict(self.extra)
476477 return self
477478
478479 @cached_property
841842 if not handler.should_handle(record):
842843 continue
843844
845 # a filter can still veto the handling of the record. This
846 # however is already operating on an initialized and processed
847 # record. The impact is that filters are slower than the
848 # handler's should_handle function in case there is no default
849 # handler that would handle the record (delayed init).
850 if handler.filter is not None \
851 and not handler.filter(record, handler):
852 continue
853
844854 # if this is a blackhole handler, don't even try to
845855 # do further processing, stop right away. Technically
846856 # speaking this is not 100% correct because if the handler
862872 self.process_record(record)
863873 record_initialized = True
864874
865 # a filter can still veto the handling of the record. This
866 # however is already operating on an initialized and processed
867 # record. The impact is that filters are slower than the
868 # handler's should_handle function in case there is no default
869 # handler that would handle the record (delayed init).
870 if handler.filter is not None \
871 and not handler.filter(record, handler):
872 continue
873
874875 # handle the record. If the record was handled and
875876 # the record is not bubbling we can abort now.
876877 if handler.handle(record) and not handler.bubble:
1919 _epoch_ord = date(1970, 1, 1).toordinal()
2020
2121
22 def redirect_logging():
22 def redirect_logging(set_root_logger_level=True):
2323 """Permanently redirects logging to the stdlib. This also
2424 removes all otherwise registered handlers on root logger of
2525 the logging system but leaves the other loggers untouched.
26
27 :param set_root_logger_level: controls of the default level of the legacy root logger is changed
28 so that all legacy log messages get redirected to Logbook
2629 """
2730 del logging.root.handlers[:]
2831 logging.root.addHandler(RedirectLoggingHandler())
32 if set_root_logger_level:
33 logging.root.setLevel(logging.DEBUG)
2934
3035
3136 class redirected_logging(object):
3742 with redirected_logging():
3843 ...
3944 """
40 def __init__(self):
45 def __init__(self, set_root_logger_level=True):
4146 self.old_handlers = logging.root.handlers[:]
47 self.old_level = logging.root.level
48 self.set_root_logger_level = set_root_logger_level
4249
4350 def start(self):
44 redirect_logging()
51 redirect_logging(self.set_root_logger_level)
4552
4653 def end(self, etype=None, evalue=None, tb=None):
4754 logging.root.handlers[:] = self.old_handlers
55 logging.root.setLevel(self.old_level)
4856
4957 __enter__ = start
5058 __exit__ = end
2727 NOTSET, level_name_property, _missing, lookup_level, \
2828 Flags, ContextObject, ContextStackManager
2929 from logbook.helpers import rename, b, _is_text_stream, is_unicode, PY2, \
30 zip, xrange, string_types, integer_types, iteritems, reraise
30 zip, xrange, string_types, integer_types, reraise, u
3131
3232
3333 DEFAULT_FORMAT_STRING = (
34 u'[{record.time:%Y-%m-%d %H:%M}] '
35 u'{record.level_name}: {record.channel}: {record.message}'
34 u('[{record.time:%Y-%m-%d %H:%M}] ') +
35 u('{record.level_name}: {record.channel}: {record.message}')
3636 )
37 SYSLOG_FORMAT_STRING = u'{record.channel}: {record.message}'
38 NTLOG_FORMAT_STRING = u'''\
37 SYSLOG_FORMAT_STRING = u('{record.channel}: {record.message}')
38 NTLOG_FORMAT_STRING = u('''\
3939 Message Level: {record.level_name}
4040 Location: {record.filename}:{record.lineno}
4141 Module: {record.module}
4545 Event provided Message:
4646
4747 {record.message}
48 '''
48 ''')
4949 TEST_FORMAT_STRING = \
50 u'[{record.level_name}] {record.channel}: {record.message}'
51 MAIL_FORMAT_STRING = u'''\
50 u('[{record.level_name}] {record.channel}: {record.message}')
51 MAIL_FORMAT_STRING = u('''\
5252 Subject: {handler.subject}
5353
5454 Message type: {record.level_name}
6060 Message:
6161
6262 {record.message}
63 '''
64 MAIL_RELATED_FORMAT_STRING = u'''\
63 ''')
64 MAIL_RELATED_FORMAT_STRING = u('''\
6565 Message type: {record.level_name}
6666 Location: {record.filename}:{record.lineno}
6767 Module: {record.module}
6868 Function: {record.func_name}
6969 {record.message}
70 '''
70 ''')
7171
7272 SYSLOG_PORT = 514
7373
124124 # all here goes to that handler
125125 handler.pop_application()
126126
127 By default messages send to that handler will not go to a handler on
127 By default messages sent to that handler will not go to a handler on
128128 an outer level on the stack, if handled. This can be changed by
129129 setting bubbling to `True`. This setup for example would not have
130130 any effect::
131131
132 handler = NullHandler(bubble=False)
132 handler = NullHandler(bubble=True)
133133 handler.push_application()
134134
135135 Whereas this setup disables all logging for the application::
373373 line = self.format_record(record, handler)
374374 exc = self.format_exception(record)
375375 if exc:
376 line += u'\n' + exc
376 line += u('\n') + exc
377377 return line
378378
379379
419419 """Returns a hashlib object with the hash of the record."""
420420 hash = sha1()
421421 hash.update(('%d\x00' % record.level).encode('ascii'))
422 hash.update((record.channel or u'').encode('utf-8') + b('\x00'))
422 hash.update((record.channel or u('')).encode('utf-8') + b('\x00'))
423423 hash.update(record.filename.encode('utf-8') + b('\x00'))
424424 hash.update(b(str(record.lineno)))
425425 return hash
10311031 """
10321032 default_format_string = MAIL_FORMAT_STRING
10331033 default_related_format_string = MAIL_RELATED_FORMAT_STRING
1034 default_subject = u'Server Error in Application'
1034 default_subject = u('Server Error in Application')
10351035
10361036 #: the maximum number of record hashes in the cache for the limiting
10371037 #: feature. Afterwards, record_cache_prune percent of the oldest
11551155 """
11561156 from smtplib import SMTP, SMTP_PORT, SMTP_SSL_PORT
11571157 if self.server_addr is None:
1158 host = 'localhost'
1158 host = '127.0.0.1'
11591159 port = self.secure and SMTP_SSL_PORT or SMTP_PORT
11601160 else:
11611161 host, port = self.server_addr
13521352 return (facility << 3) | priority
13531353
13541354 def emit(self, record):
1355 prefix = u''
1355 prefix = u('')
13561356 if self.application_name is not None:
1357 prefix = self.application_name + u':'
1358 self.send_to_socket((u'<%d>%s%s\x00' % (
1357 prefix = self.application_name + u(':')
1358 self.send_to_socket((u('<%d>%s%s\x00') % (
13591359 self.encode_priority(record),
13601360 prefix,
13611361 self.format(record)
99 """
1010 import re
1111 import os
12 from collections import defaultdict
1213 from cgi import parse_qsl
1314
14 from logbook.base import RecordDispatcher, NOTSET, ERROR, NOTICE
15 from logbook.base import RecordDispatcher, dispatch_record, NOTSET, ERROR, NOTICE
1516 from logbook.handlers import Handler, StringFormatter, \
1617 StringFormatterHandlerMixin, StderrHandler
1718 from logbook._termcolors import colorize
18 from logbook.helpers import PY2, string_types, iteritems
19 from logbook.helpers import PY2, string_types, iteritems, u
1920
2021 from logbook.ticketing import TicketingHandler as DatabaseHandler
2122 from logbook.ticketing import BackendBase
2728
2829 _ws_re = re.compile(r'(\s+)(?u)')
2930 TWITTER_FORMAT_STRING = \
30 u'[{record.channel}] {record.level_name}: {record.message}'
31 u('[{record.channel}] {record.level_name}: {record.message}')
3132 TWITTER_ACCESS_TOKEN_URL = 'https://twitter.com/oauth/access_token'
3233 NEW_TWEET_URL = 'https://api.twitter.com/1/statuses/update.json'
3334
3839 def setup_backend(self):
3940 from couchdb import Server
4041
41 uri = self.options.pop('uri', u'')
42 uri = self.options.pop('uri', u(''))
4243 couch = Server(uri)
4344 db_name = self.options.pop('db')
4445 self.database = couch[db_name]
6263 max_length = 140
6364
6465 def format_exception(self, record):
65 return u'%s: %s' % (record.exception_shortname,
66 record.exception_message)
66 return u('%s: %s') % (record.exception_shortname,
67 record.exception_message)
6768
6869 def __call__(self, record, handler):
6970 formatted = StringFormatter.__call__(self, record, handler)
7374 length += len(piece)
7475 if length > self.max_length:
7576 if length - len(piece) < self.max_length:
76 rv.append(u'…')
77 rv.append(u('…'))
7778 break
7879 rv.append(piece)
79 return u''.join(rv)
80 return u('').join(rv)
8081
8182
8283 class TaggingLogger(RecordDispatcher):
356357 if self.should_handle(record):
357358 raise self.exc_type(self.format(record))
358359 return False
360
361 class DedupHandler(Handler):
362 """A handler that deduplicates log messages.
363
364 It emits each unique log record once, along with the number of times it was emitted.
365 Example:::
366
367 with logbook.more.DedupHandler():
368 logbook.error('foo')
369 logbook.error('bar')
370 logbook.error('foo')
371
372 The expected output:::
373
374 message repeated 2 times: foo
375 message repeated 1 times: bar
376 """
377 def __init__(self, format_string='message repeated {count} times: {message}', *args, **kwargs):
378 Handler.__init__(self, bubble=False, *args, **kwargs)
379 self._format_string = format_string
380 self.clear()
381
382 def clear(self):
383 self._message_to_count = defaultdict(int)
384 self._unique_ordered_records = []
385
386 def pop_application(self):
387 Handler.pop_application(self)
388 self.flush()
389
390 def pop_thread(self):
391 Handler.pop_thread(self)
392 self.flush()
393
394 def handle(self, record):
395 if not record.message in self._message_to_count:
396 self._unique_ordered_records.append(record)
397 self._message_to_count[record.message] += 1
398 return True
399
400 def flush(self):
401 for record in self._unique_ordered_records:
402 record.message = self._format_string.format(message=record.message, count=self._message_to_count[record.message])
403 # record.dispatcher is the logger who created the message, it's sometimes supressed (by logbook.info for example)
404 dispatch = record.dispatcher.call_handlers if record.dispatcher is not None else dispatch_record
405 dispatch(record)
406 self.clear()
407
4444
4545 def make_title(self, record):
4646 """Called to get the title from the record."""
47 return u'%s: %s' % (record.channel, record.level_name.title())
47 return u('%s: %s') % (record.channel, record.level_name.title())
4848
4949 def make_text(self, record):
5050 """Called to get the text of the record."""
218218 con = http_client.HTTPSConnection('boxcar.io')
219219 con.request('POST', '/notifications/', headers={
220220 'Authorization': 'Basic ' +
221 base64.b64encode((u'%s:%s' %
221 base64.b64encode((u('%s:%s') %
222222 (self.email, self.password)).encode('utf-8')).strip(),
223223 }, body=body)
224224 con.close()
1313 import platform
1414 from logbook.base import NOTSET, LogRecord, dispatch_record
1515 from logbook.handlers import Handler, WrapperHandler
16 from logbook.helpers import PY2
16 from logbook.helpers import PY2, u
1717
1818 if PY2:
1919 from Queue import Empty, Queue as ThreadQueue
3030
3131 Example setup::
3232
33 handler = RedisHandler('http://localhost', port='9200', key='redis')
33 handler = RedisHandler('http://127.0.0.1', port='9200', key='redis')
3434
3535 If your Redis instance is password protected, you can securely connect passing
3636 your password when creating a RedisHandler object.
4141
4242 More info about the default buffer size: wp.me/p3tYJu-3b
4343 """
44 def __init__(self, host='localhost', port=6379, key='redis', extra_fields={},
44 def __init__(self, host='127.0.0.1', port=6379, key='redis', extra_fields={},
4545 flush_threshold=128, flush_time=1, level=NOTSET, filter=None,
4646 password=False, bubble=True, context=None):
4747 Handler.__init__(self, level, filter, bubble)
119119 self._flush_buffer()
120120
121121
122 class RabbitMQHandler(Handler):
123 """A handler that acts as a RabbitMQ publisher, which publishes each record
124 as json dump. Requires the kombu module.
122 class MessageQueueHandler(Handler):
123 """A handler that acts as a message queue publisher, which publishes each
124 record as json dump. Requires the kombu module.
125125
126126 The queue will be filled with JSON exported log records. To receive such
127 log records from a queue you can use the :class:`RabbitMQSubscriber`.
128
127 log records from a queue you can use the :class:`MessageQueueSubscriber`.
129128
130129 Example setup::
131130
132 handler = RabbitMQHandler('amqp://guest:guest@localhost//', queue='my_log')
133 """
131 handler = MessageQueueHandler('mongodb://localhost:27017/logging')
132 """
133
134134 def __init__(self, uri=None, queue='logging', level=NOTSET,
135 filter=None, bubble=False, context=None):
135 filter=None, bubble=False, context=None):
136136 Handler.__init__(self, level, filter, bubble)
137137 try:
138138 import kombu
156156 self.queue.close()
157157
158158
159 RabbitMQHandler = MessageQueueHandler
160
161
159162 class ZeroMQHandler(Handler):
160163 """A handler that acts as a ZeroMQ publisher, which publishes each record
161164 as json dump. Requires the pyzmq library.
163166 The queue will be filled with JSON exported log records. To receive such
164167 log records from a queue you can use the :class:`ZeroMQSubscriber`.
165168
169 If `multi` is set to `True`, the handler will use a `PUSH` socket to
170 publish the records. This allows multiple handlers to use the same `uri`.
171 The records can be received by using the :class:`ZeroMQSubscriber` with
172 `multi` set to `True`.
173
166174
167175 Example setup::
168176
170178 """
171179
172180 def __init__(self, uri=None, level=NOTSET, filter=None, bubble=False,
173 context=None):
181 context=None, multi=False):
174182 Handler.__init__(self, level, filter, bubble)
175183 try:
176184 import zmq
179187 'the ZeroMQHandler.')
180188 #: the zero mq context
181189 self.context = context or zmq.Context()
182 #: the zero mq socket.
183 self.socket = self.context.socket(zmq.PUB)
184 if uri is not None:
185 self.socket.bind(uri)
190
191 if multi:
192 #: the zero mq socket.
193 self.socket = self.context.socket(zmq.PUSH)
194 if uri is not None:
195 self.socket.connect(uri)
196 else:
197 #: the zero mq socket.
198 self.socket = self.context.socket(zmq.PUB)
199 if uri is not None:
200 self.socket.bind(uri)
201
186202
187203 def export_record(self, record):
188204 """Exports the record into a dictionary ready for JSON dumping."""
274290 return controller
275291
276292
277 class RabbitMQSubscriber(SubscriberBase):
278 """A helper that acts as RabbitMQ subscriber and will dispatch received
279 log records to the active handler setup. There are multiple ways to
280 use this class.
293 class MessageQueueSubscriber(SubscriberBase):
294 """A helper that acts as a message queue subscriber and will dispatch
295 received log records to the active handler setup. There are multiple ways
296 to use this class.
281297
282298 It can be used to receive log records from a queue::
283299
284 subscriber = RabbitMQSubscriber('amqp://guest:guest@localhost//')
300 subscriber = MessageQueueSubscriber('mongodb://localhost:27017/logging')
285301 record = subscriber.recv()
286302
287303 But it can also be used to receive and dispatch these in one go::
288304
289305 with target_handler:
290 subscriber = RabbitMQSubscriber('amqp://guest:guest@localhost//')
306 subscriber = MessageQueueSubscriber('mongodb://localhost:27017/logging')
291307 subscriber.dispatch_forever()
292308
293309 This will take all the log records from that queue and dispatch them
294310 over to `target_handler`. If you want you can also do that in the
295311 background::
296312
297 subscriber = RabbitMQSubscriber('amqp://guest:guest@localhost//')
313 subscriber = MessageQueueSubscriber('mongodb://localhost:27017/logging')
298314 controller = subscriber.dispatch_in_background(target_handler)
299315
300316 The controller returned can be used to shut down the background
302318
303319 controller.stop()
304320 """
305
306321 def __init__(self, uri=None, queue='logging'):
307322 try:
308323 import kombu
309324 except ImportError:
310 raise RuntimeError('The kombu library is required for '
311 'the RabbitMQSubscriber.')
325 raise RuntimeError('The kombu library is required.')
312326 if uri:
313327 connection = kombu.Connection(uri)
314328
343357 return LogRecord.from_dict(log_record)
344358
345359
360 RabbitMQSubscriber = MessageQueueSubscriber
361
362
346363 class ZeroMQSubscriber(SubscriberBase):
347364 """A helper that acts as ZeroMQ subscriber and will dispatch received
348365 log records to the active handler setup. There are multiple ways to
370387 thread::
371388
372389 controller.stop()
373 """
374
375 def __init__(self, uri=None, context=None):
390
391 If `multi` is set to `True`, the subscriber will use a `PULL` socket
392 and listen to records published by a `PUSH` socket (usually via a
393 :class:`ZeroMQHandler` with `multi` set to `True`). This allows a
394 single subscriber to dispatch multiple handlers.
395 """
396
397 def __init__(self, uri=None, context=None, multi=False):
376398 try:
377399 import zmq
378400 except ImportError:
382404
383405 #: the zero mq context
384406 self.context = context or zmq.Context()
385 #: the zero mq socket.
386 self.socket = self.context.socket(zmq.SUB)
387 if uri is not None:
388 self.socket.connect(uri)
389 self.socket.setsockopt_unicode(zmq.SUBSCRIBE, u'')
407
408 if multi:
409 #: the zero mq socket.
410 self.socket = self.context.socket(zmq.PULL)
411 if uri is not None:
412 self.socket.bind(uri)
413 else:
414 #: the zero mq socket.
415 self.socket = self.context.socket(zmq.SUB)
416 if uri is not None:
417 self.socket.connect(uri)
418 self.socket.setsockopt_unicode(zmq.SUBSCRIBE, u(''))
390419
391420 def __del__(self):
392421 try:
528557 def __init__(self, channel):
529558 self.channel = channel
530559
531 def recv(self, timeout=-1):
560 def recv(self, timeout=None):
532561 try:
533562 rv = self.channel.receive(timeout=timeout)
534563 except self.channel.RemoteError:
638667
639668 subscribers = SubscriberGroup([
640669 MultiProcessingSubscriber(queue),
641 ZeroMQSubscriber('tcp://localhost:5000')
670 ZeroMQSubscriber('tcp://127.0.0.1:5000')
642671 ])
643672 with target_handler:
644673 subscribers.dispatch_forever()
1212 import json
1313 from logbook.base import NOTSET, level_name_property, LogRecord
1414 from logbook.handlers import Handler, HashingHandlerMixin
15 from logbook.helpers import cached_property, b, PY2
15 from logbook.helpers import cached_property, b, PY2, u
1616
1717 class Ticket(object):
1818 """Represents a ticket from the database."""
191191 row = cnx.execute(self.tickets.insert().values(
192192 record_hash=hash,
193193 level=record.level,
194 channel=record.channel or u'',
195 location=u'%s:%d' % (record.filename, record.lineno),
196 module=record.module or u'<unknown>',
194 channel=record.channel or u(''),
195 location=u('%s:%d') % (record.filename, record.lineno),
196 module=record.module or u('<unknown>'),
197197 occurrence_count=0,
198198 solved=False,
199199 app_id=app_id
286286 from pymongo.errors import AutoReconnect
287287
288288 _connection = None
289 uri = self.options.pop('uri', u'')
289 uri = self.options.pop('uri', u(''))
290290 _connection_attempts = 0
291291
292292 parsed_uri = parse_uri(uri, Connection.PORT)
335335 doc = {
336336 'record_hash': hash,
337337 'level': record.level,
338 'channel': record.channel or u'',
339 'location': u'%s:%d' % (record.filename, record.lineno),
340 'module': record.module or u'<unknown>',
338 'channel': record.channel or u(''),
339 'location': u('%s:%d') % (record.filename, record.lineno),
340 'module': record.module or u('<unknown>'),
341341 'occurrence_count': 0,
342342 'solved': False,
343343 'app_id': app_id,
447447 filter=None, bubble=False, hash_salt=None, backend=None,
448448 **db_options):
449449 if hash_salt is None:
450 hash_salt = u'apphash-' + app_id
450 hash_salt = u('apphash-') + app_id
451451 TicketingBaseHandler.__init__(self, hash_salt, level, filter, bubble)
452452 if backend is None:
453453 backend = self.default_backend
+0
-25
scripts/pypi_mirror_setup.py less more
0 #! /usr/bin/python
1 import os
2 import sys
3
4
5 if __name__ == '__main__':
6 mirror = sys.argv[1]
7 f = open(os.path.expanduser("~/.pydistutils.cfg"), "w")
8 f.write("""
9 [easy_install]
10 index_url = %s
11 """ % mirror)
12 f.close()
13 pip_dir = os.path.expanduser("~/.pip")
14 if not os.path.isdir(pip_dir):
15 os.makedirs(pip_dir)
16 f = open(os.path.join(pip_dir, "pip.conf"), "w")
17 f.write("""
18 [global]
19 index-url = %s
20
21 [install]
22 use-mirrors = true
23 """ % mirror)
24 f.close()
00 #! /usr/bin/python
1 import platform
21 import subprocess
2 import os
33 import sys
44
55 def _execute(*args, **kwargs):
88 sys.exit(result)
99
1010 if __name__ == '__main__':
11 python_version = platform.python_version()
11 python_version = sys.version_info
1212
1313 deps = [
14 "execnet",
15 "Jinja2",
14 "execnet>=1.0.9",
1615 "nose",
1716 "pyzmq",
1817 "sqlalchemy",
1918 ]
2019
21 if python_version < "2.7":
20 if python_version < (2, 7):
2221 deps.append("unittest2")
22 if (3, 2) <= python_version < (3, 3):
23 deps.append("markupsafe==0.15")
24 deps.append("Jinja2==2.6")
25 else:
26 deps.append("Jinja2")
2327 print("Setting up dependencies...")
24 _execute("pip install %s" % " ".join(deps), shell=True)
28 _execute([os.path.join(os.path.dirname(sys.executable), "pip"), "install"] + deps, shell=False)
105105 features['speedups'] = speedups
106106 setup(
107107 name='Logbook',
108 version='0.6.1-dev',
108 version='0.7.0',
109109 license='BSD',
110110 url='http://logbook.pocoo.org/',
111111 author='Armin Ronacher, Georg Brandl',
2727 from thread import get_ident
2828 except ImportError:
2929 from _thread import get_ident
30 import base64
3031
3132 __file_without_pyc__ = __file__
3233 if __file_without_pyc__.endswith(".pyc"):
252253 def test_file_handler_unicode(self):
253254 with capturing_stderr_context() as captured:
254255 with self.thread_activation_strategy(logbook.FileHandler(self.filename)) as h:
255 self.log.info(u'\u0431')
256 self.log.info(u('\u0431'))
256257 self.assertFalse(captured.getvalue())
257258
258259 def test_file_handler_delay(self):
351352 self.assertEqual(f.readline().rstrip(), '[02:00] Third One')
352353
353354 def test_mail_handler(self):
354 subject = u'\xf8nicode'
355 subject = u('\xf8nicode')
355356 handler = make_fake_mail_handler(subject=subject)
356357 with capturing_stderr_context() as fallback:
357358 with self.thread_activation_strategy(handler):
359360 try:
360361 1 / 0
361362 except Exception:
362 self.log.exception(u'Viva la Espa\xf1a')
363 self.log.exception(u('Viva la Espa\xf1a'))
363364
364365 if not handler.mails:
365366 # if sending the mail failed, the reason should be on stderr
370371 mail = mail.replace("\r", "")
371372 self.assertEqual(sender, handler.from_addr)
372373 self.assert_('=?utf-8?q?=C3=B8nicode?=' in mail)
373 self.assertRegexpMatches(mail, 'Message type:\s+ERROR')
374 self.assertRegexpMatches(mail, 'Location:.*%s' % __file_without_pyc__)
375 self.assertRegexpMatches(mail, 'Module:\s+%s' % __name__)
376 self.assertRegexpMatches(mail, 'Function:\s+test_mail_handler')
377 body = u'Message:\n\nViva la Espa\xf1a'
374 header, data = mail.split("\n\n", 1)
375 if "Content-Transfer-Encoding: base64" in header:
376 data = base64.b64decode(data).decode("utf-8")
377 self.assertRegexpMatches(data, 'Message type:\s+ERROR')
378 self.assertRegexpMatches(data, 'Location:.*%s' % __file_without_pyc__)
379 self.assertRegexpMatches(data, 'Module:\s+%s' % __name__)
380 self.assertRegexpMatches(data, 'Function:\s+test_mail_handler')
381 body = u('Viva la Espa\xf1a')
378382 if sys.version_info < (3, 0):
379383 body = body.encode('utf-8')
380 self.assertIn(body, mail)
381 self.assertIn('\n\nTraceback (most', mail)
382 self.assertIn('1 / 0', mail)
384 self.assertIn(body, data)
385 self.assertIn('\nTraceback (most', data)
386 self.assertIn('1 / 0', data)
383387 self.assertIn('This is not mailed', fallback.getvalue())
384388
385389 def test_mail_handler_record_limits(self):
477481 except socket.error:
478482 self.fail('got timeout on socket')
479483 self.assertEqual(rv, (
480 u'<12>%stestlogger: Syslog is weird\x00' %
481 (app_name and app_name + u':' or u'')).encode('utf-8'))
484 u('<12>%stestlogger: Syslog is weird\x00') %
485 (app_name and app_name + u(':') or u(''))).encode('utf-8'))
482486
483487 def test_handler_processors(self):
484488 handler = make_fake_mail_handler(format_string='''\
678682 self.assertFalse(handler.has_warning('bar', channel='Logger2'))
679683 self.assertFalse(outer_handler.has_warning('foo', channel='Logger1'))
680684 self.assert_(outer_handler.has_warning('bar', channel='Logger2'))
685
686 def test_null_handler_filtering(self):
687 logger1 = logbook.Logger("1")
688 logger2 = logbook.Logger("2")
689 outer = logbook.TestHandler()
690 inner = logbook.NullHandler()
691
692 inner.filter = lambda record, handler: record.dispatcher is logger1
693
694 with self.thread_activation_strategy(outer):
695 with self.thread_activation_strategy(inner):
696 logger1.warn("1")
697 logger2.warn("2")
698
699 self.assertTrue(outer.has_warning("2", channel="2"))
700 self.assertFalse(outer.has_warning("1", channel="1"))
681701
682702 def test_different_context_pushing(self):
683703 h1 = logbook.TestHandler(level=logbook.DEBUG)
889909
890910 class LoggingCompatTestCase(LogbookTestCase):
891911
892 def test_basic_compat(self):
893 from logging import getLogger
912 def test_basic_compat_with_level_setting(self):
913 self._test_basic_compat(True)
914 def test_basic_compat_without_level_setting(self):
915 self._test_basic_compat(False)
916
917 def _test_basic_compat(self, set_root_logger_level):
918 import logging
894919 from logbook.compat import redirected_logging
895920
921 # mimic the default logging setting
922 self.addCleanup(logging.root.setLevel, logging.root.level)
923 logging.root.setLevel(logging.WARNING)
924
896925 name = 'test_logbook-%d' % randrange(1 << 32)
897 logger = getLogger(name)
898 with capturing_stderr_context() as captured:
899 redirector = redirected_logging()
900 redirector.start()
901 try:
902 logger.debug('This is from the old system')
903 logger.info('This is from the old system')
904 logger.warn('This is from the old system')
905 logger.error('This is from the old system')
906 logger.critical('This is from the old system')
907 finally:
908 redirector.end()
926 logger = logging.getLogger(name)
927
928 with logbook.TestHandler(bubble=True) as handler:
929 with capturing_stderr_context() as captured:
930 with redirected_logging(set_root_logger_level):
931 logger.debug('This is from the old system')
932 logger.info('This is from the old system')
933 logger.warn('This is from the old system')
934 logger.error('This is from the old system')
935 logger.critical('This is from the old system')
909936 self.assertIn(('WARNING: %s: This is from the old system' % name),
910937 captured.getvalue())
938 if set_root_logger_level:
939 self.assertEquals(handler.records[0].level, logbook.DEBUG)
940 else:
941 self.assertEquals(handler.records[0].level, logbook.WARNING)
911942
912943 def test_redirect_logbook(self):
913944 import logging
10801111 self.assertIn('WARNING: testlogger: here i am', caught.exception.args[0])
10811112 self.assertIn('this is irrelevant', test_handler.records[0].message)
10821113
1114 def test_dedup_handler(self):
1115 from logbook.more import DedupHandler
1116 with logbook.TestHandler() as test_handler:
1117 with DedupHandler():
1118 self.log.info('foo')
1119 self.log.info('bar')
1120 self.log.info('foo')
1121 self.assertEqual(2, len(test_handler.records))
1122 self.assertIn('message repeated 2 times: foo', test_handler.records[0].message)
1123 self.assertIn('message repeated 1 times: bar', test_handler.records[1].message)
1124
10831125 class QueuesTestCase(LogbookTestCase):
1084 def _get_zeromq(self):
1126 def _get_zeromq(self, multi=False):
10851127 from logbook.queues import ZeroMQHandler, ZeroMQSubscriber
10861128
10871129 # Get an unused port
10881130 tempsock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
1089 tempsock.bind(('localhost', 0))
1131 tempsock.bind(('127.0.0.1', 0))
10901132 host, unused_port = tempsock.getsockname()
10911133 tempsock.close()
10921134
10931135 # Retrieve the ZeroMQ handler and subscriber
10941136 uri = 'tcp://%s:%d' % (host, unused_port)
1095 handler = ZeroMQHandler(uri)
1096 subscriber = ZeroMQSubscriber(uri)
1137 if multi:
1138 handler = [ZeroMQHandler(uri, multi=True) for _ in range(3)]
1139 else:
1140 handler = ZeroMQHandler(uri)
1141 subscriber = ZeroMQSubscriber(uri, multi=multi)
10971142 # Enough time to start
10981143 time.sleep(0.1)
10991144 return handler, subscriber
11011146 @require_module('zmq')
11021147 def test_zeromq_handler(self):
11031148 tests = [
1104 u'Logging something',
1105 u'Something with umlauts äöü',
1106 u'Something else for good measure',
1149 u('Logging something'),
1150 u('Something with umlauts äöü'),
1151 u('Something else for good measure'),
11071152 ]
11081153 handler, subscriber = self._get_zeromq()
11091154 for test in tests:
11141159 self.assertEqual(record.channel, self.log.name)
11151160
11161161 @require_module('zmq')
1162 def test_multi_zeromq_handler(self):
1163 tests = [
1164 u('Logging something'),
1165 u('Something with umlauts äöü'),
1166 u('Something else for good measure'),
1167 ]
1168 handlers, subscriber = self._get_zeromq(multi=True)
1169 for handler in handlers:
1170 for test in tests:
1171 with handler:
1172 self.log.warn(test)
1173 record = subscriber.recv()
1174 self.assertEqual(record.message, test)
1175 self.assertEqual(record.channel, self.log.name)
1176
1177 @require_module('zmq')
11171178 def test_zeromq_background_thread(self):
11181179 handler, subscriber = self._get_zeromq()
11191180 test_handler = logbook.TestHandler()
11261187 # stop the controller. This will also stop the loop and join the
11271188 # background process. Before that we give it a fraction of a second
11281189 # to get all results
1129 time.sleep(0.1)
1190 time.sleep(0.2)
11301191 controller.stop()
11311192
11321193 self.assertTrue(test_handler.has_warning('This is a warning'))
13461407 rv = to_safe_json([
13471408 None,
13481409 'foo',
1349 u'jäger',
1410 u('jäger'),
13501411 1,
13511412 datetime(2000, 1, 1),
1352 {'jäger1': 1, u'jäger2': 2, Bogus(): 3, 'invalid': object()},
1413 {'jäger1': 1, u('jäger2'): 2, Bogus(): 3, 'invalid': object()},
13531414 object() # invalid
13541415 ])
13551416 self.assertEqual(
1356 rv, [None, u'foo', u'jäger', 1, '2000-01-01T00:00:00Z',
1357 {u('jäger1'): 1, u'jäger2': 2, u'bogus': 3,
1358 u'invalid': None}, None])
1417 rv, [None, u('foo'), u('jäger'), 1, '2000-01-01T00:00:00Z',
1418 {u('jäger1'): 1, u('jäger2'): 2, u('bogus'): 3,
1419 u('invalid'): None}, None])
13591420
13601421 def test_datehelpers(self):
13611422 from logbook.helpers import format_iso8601, parse_iso8601
00 [tox]
1 envlist=py26,py27,py33,pypy,docs
1 envlist=py26,py27,py32,py33,py34,pypy,docs
22
33 [testenv]
44 commands=
5 python {toxinidir}/scripts/test_setup.py
5 {envpython} {toxinidir}/scripts/test_setup.py
66 nosetests -w tests
77 deps=
88 nose