Package list logbook / 48f77d5
Update upstream source from tag 'upstream/1.4.1' Update to upstream version '1.4.1' with Debian dir a560e35502267f300770598284ad018e460407b9 IƱaki Malerba 2 years ago
8 changed file(s) with 129 addition(s) and 32 deletion(s). Raw diff Collapse all Expand all
1818 env:
1919 - DISABLE_LOGBOOK_CEXT=True
2020 - CYBUILD=True
21
2122 script:
2223 - pip install -e .[all]
24 - if [[ $GEVENT == 'True' ]] ; then pip install gevent; fi
2325 - py.test --cov=logbook -r s tests
26
2427 matrix:
2528 exclude:
2629 - python: pypy
2730 env: CYBUILD=True
2831 - python: pypy3
2932 env: CYBUILD=True
33 include:
34 - python: "3.6"
35 env: GEVENT=True CYBUILD=True
36 - python: "2.7"
37 env: GEVENT=True CYBUILD=True
38
3039 after_success:
3140 - coveralls
41
3242 notifications:
3343 email:
3444 recipients:
11 =================
22
33 Here you can see the full list of changes between each Logbook release.
4
5 Version 1.4.1
6
7 Released on October 14th, 2018
8
9 - Fixed deprecated regular expression pattern (thanks Tsuyoshi Hombashi)
10 - Fixed TimedRotatingFileHandler rotation (thanks Tucker Beck)
11
12 Version 1.4.0
13 -------------
14
15 Released on May 15th, 2018
16
17 - Added support for checking if trace logs have been emitted in TestHandler (thanks @thedrow)
18
419
520 Version 1.3.0
621 -------------
0 __version__ = "1.3.0"
0 __version__ = "1.4.1"
2727
2828
2929 if has_gevent:
30 from gevent._threading import (Lock as ThreadLock,
31 RLock as ThreadRLock,
32 get_ident as thread_get_ident,
33 local as thread_local)
30 from gevent.monkey import get_original as _get_original
31 ThreadLock = _get_original('threading', 'Lock')
32 ThreadRLock = _get_original('threading', 'RLock')
33 try:
34 thread_get_ident = _get_original('threading', 'get_ident')
35 except AttributeError:
36 # In 2.7, this is called _get_ident
37 thread_get_ident = _get_original('threading', '_get_ident')
38 thread_local = _get_original('threading', 'local')
39
3440 from gevent.thread import get_ident as greenlet_get_ident
3541 from gevent.local import local as greenlet_local
3642 from gevent.lock import BoundedSemaphore
2727 from textwrap import dedent
2828
2929 from logbook.base import (
30 CRITICAL, ERROR, WARNING, NOTICE, INFO, DEBUG, NOTSET, level_name_property,
30 CRITICAL, ERROR, WARNING, NOTICE, INFO, DEBUG, TRACE, NOTSET, level_name_property,
3131 _missing, lookup_level, Flags, ContextObject, ContextStackManager,
3232 _datetime_factory)
3333 from logbook.helpers import (
900900 self.timed_filename_for_current = timed_filename_for_current
901901
902902 self._timestamp = self._get_timestamp(_datetime_factory())
903 timed_filename = self.generate_timed_filename(self._timestamp)
904
905903 if self.timed_filename_for_current:
906 filename = timed_filename
904 filename = self.generate_timed_filename(self._timestamp)
905 elif os.path.exists(filename):
906 self._timestamp = self._get_timestamp(
907 datetime.fromtimestamp(
908 os.stat(filename).st_mtime
909 )
910 )
907911
908912 FileHandler.__init__(self, filename, mode, encoding, level,
909913 format_string, True, filter, bubble)
931935 """
932936 directory = os.path.dirname(self._filename)
933937 files = []
938 rollover_regex = re.compile(self.rollover_format.format(
939 basename=re.escape(self.basename),
940 timestamp='.+',
941 ext=re.escape(self.ext),
942 ))
934943 for filename in os.listdir(directory):
935944 filename = os.path.join(directory, filename)
936 regex = self.rollover_format.format(
937 basename=re.escape(self.basename),
938 timestamp='.+',
939 ext=re.escape(self.ext),
940 )
941 if re.match(regex, filename):
945 if rollover_regex.match(filename):
942946 files.append((os.path.getmtime(filename), filename))
943947 files.sort()
944948 if self.backup_count > 1:
950954 if self.stream is not None:
951955 self.stream.close()
952956
957 if (
958 not self.timed_filename_for_current
959 and os.path.exists(self._filename)
960 ):
961 filename = self.generate_timed_filename(self._timestamp)
962 os.rename(self._filename, filename)
963
953964 if self.backup_count > 0:
954965 for time, filename in self.files_to_delete():
955966 os.remove(filename)
956967
957968 if self.timed_filename_for_current:
958969 self._filename = self.generate_timed_filename(new_timestamp)
959 else:
960 filename = self.generate_timed_filename(self._timestamp)
961 os.rename(self._filename, filename)
962970 self._timestamp = new_timestamp
963971
964972 self._open('w')
10541062 """`True` if any :data:`DEBUG` records were found."""
10551063 return any(r.level == DEBUG for r in self.records)
10561064
1065 @property
1066 def has_traces(self):
1067 """`True` if any :data:`TRACE` records were found."""
1068 return any(r.level == TRACE for r in self.records)
1069
10571070 def has_critical(self, *args, **kwargs):
10581071 """`True` if a specific :data:`CRITICAL` log record exists.
10591072
11001113 See :ref:`probe-log-records` for more information.
11011114 """
11021115 kwargs['level'] = DEBUG
1116 return self._test_for(*args, **kwargs)
1117
1118 def has_trace(self, *args, **kwargs):
1119 """`True` if a specific :data:`TRACE` log record exists.
1120
1121 See :ref:`probe-log-records` for more information.
1122 """
1123 kwargs['level'] = TRACE
11031124 return self._test_for(*args, **kwargs)
11041125
11051126 def _test_for(self, message=None, channel=None, level=None):
3737 else:
3838 from urllib.parse import parse_qsl, urlencode
3939
40 _ws_re = re.compile(r'(\s+)(?u)')
40 _ws_re = re.compile(r'(\s+)', re.UNICODE)
4141 TWITTER_FORMAT_STRING = u(
4242 '[{record.channel}] {record.level_name}: {record.message}')
4343 TWITTER_ACCESS_TOKEN_URL = 'https://twitter.com/oauth/access_token'
00 import os
11 import pytest
2 import time
23 from datetime import datetime
34
45 import logbook
166167 assert f.readline().rstrip() == '[01:00] Third One'
167168 assert f.readline().rstrip() == '[02:00] Third One'
168169
170
169171 @pytest.mark.parametrize("backup_count", [1, 3])
170 def test_timed_rotating_file_handler__not_timed_filename_for_current(tmpdir, activation_strategy, backup_count):
172 @pytest.mark.parametrize("preexisting_file", [True, False])
173 def test_timed_rotating_file_handler__not_timed_filename_for_current(
174 tmpdir, activation_strategy, backup_count, preexisting_file
175 ):
171176 basename = str(tmpdir.join('trot.log'))
177
178 if preexisting_file:
179 with open(basename, 'w') as file:
180 file.write('contents')
181 jan_first = time.mktime(datetime(2010, 1, 1).timetuple())
182 os.utime(basename, (jan_first, jan_first))
183
172184 handler = logbook.TimedRotatingFileHandler(
173 basename, backup_count=backup_count,
185 basename,
186 format_string='[{record.time:%H:%M}] {record.message}',
187 backup_count=backup_count,
174188 rollover_format='{basename}{ext}.{timestamp}',
175189 timed_filename_for_current=False,
176190 )
177 handler._timestamp = handler._get_timestamp(datetime(2010, 1, 5))
178 handler.format_string = '[{record.time:%H:%M}] {record.message}'
179191
180192 def fake_record(message, year, month, day, hour=0,
181193 minute=0, second=0):
194206 for x in xrange(20):
195207 handler.handle(fake_record('Last One', 2010, 1, 8, x + 1))
196208
197 files = sorted(x for x in os.listdir(str(tmpdir)) if x.startswith('trot'))
198
199 assert files == ['trot.log'] + ['trot.log.2010-01-0{0}'.format(i)
200 for i in xrange(5, 8)][-backup_count:]
209 computed_files = [x for x in os.listdir(str(tmpdir)) if x.startswith('trot')]
210
211 expected_files = ['trot.log.2010-01-01'] if preexisting_file else []
212 expected_files += ['trot.log.2010-01-0{0}'.format(i) for i in xrange(5, 8)]
213 expected_files += ['trot.log']
214 expected_files = expected_files[-backup_count:]
215
216 assert sorted(computed_files) == sorted(expected_files)
217
201218 with open(str(tmpdir.join('trot.log'))) as f:
202219 assert f.readline().rstrip() == '[01:00] Last One'
203220 assert f.readline().rstrip() == '[02:00] Last One'
00 import re
11
2 import pytest
23
3 def test_regex_matching(active_handler, logger):
4 logger.warn('Hello World!')
5 assert active_handler.has_warning(re.compile('^Hello'))
6 assert (not active_handler.has_warning(re.compile('world$')))
7 assert (not active_handler.has_warning('^Hello World'))
4
5 @pytest.mark.parametrize("level, method", [
6 ("trace", "has_traces"),
7 ("debug", "has_debugs"),
8 ("info", "has_infos"),
9 ("notice", "has_notices"),
10 ("warning", "has_warnings"),
11 ("error", "has_errors"),
12 ("critical", "has_criticals"),
13 ])
14 def test_has_level(active_handler, logger, level, method):
15 log = getattr(logger, level)
16 log('Hello World')
17 assert getattr(active_handler, method)
18
19
20 @pytest.mark.parametrize("level, method", [
21 ("trace", "has_trace"),
22 ("debug", "has_debug"),
23 ("info", "has_info"),
24 ("notice", "has_notice"),
25 ("warning", "has_warning"),
26 ("error", "has_error"),
27 ("critical", "has_critical"),
28 ])
29 def test_regex_matching(active_handler, logger, level, method):
30 log = getattr(logger, level)
31 log('Hello World')
32 has_level_method = getattr(active_handler, method)
33 assert has_level_method(re.compile('^Hello'))
34 assert (not has_level_method(re.compile('world$')))
35 assert (not has_level_method('^Hello World'))
836
937
1038 def test_test_handler_cache(active_handler, logger):