New upstream version 1.4.1
IƱaki Malerba
4 years ago
1 | 1 | ================= |
2 | 2 | |
3 | 3 | Here you can see the full list of changes between each Logbook release. |
4 | ||
5 | Version 1.4.1 | |
6 | ||
7 | Released on October 14th, 2018 | |
8 | ||
9 | - Fixed deprecated regular expression pattern (thanks Tsuyoshi Hombashi) | |
10 | - Fixed TimedRotatingFileHandler rotation (thanks Tucker Beck) | |
4 | 11 | |
5 | 12 | Version 1.4.0 |
6 | 13 | ------------- |
900 | 900 | self.timed_filename_for_current = timed_filename_for_current |
901 | 901 | |
902 | 902 | self._timestamp = self._get_timestamp(_datetime_factory()) |
903 | timed_filename = self.generate_timed_filename(self._timestamp) | |
904 | ||
905 | 903 | if self.timed_filename_for_current: |
906 | filename = timed_filename | |
904 | filename = self.generate_timed_filename(self._timestamp) | |
905 | elif os.path.exists(filename): | |
906 | self._timestamp = self._get_timestamp( | |
907 | datetime.fromtimestamp( | |
908 | os.stat(filename).st_mtime | |
909 | ) | |
910 | ) | |
907 | 911 | |
908 | 912 | FileHandler.__init__(self, filename, mode, encoding, level, |
909 | 913 | format_string, True, filter, bubble) |
931 | 935 | """ |
932 | 936 | directory = os.path.dirname(self._filename) |
933 | 937 | files = [] |
938 | rollover_regex = re.compile(self.rollover_format.format( | |
939 | basename=re.escape(self.basename), | |
940 | timestamp='.+', | |
941 | ext=re.escape(self.ext), | |
942 | )) | |
934 | 943 | for filename in os.listdir(directory): |
935 | 944 | filename = os.path.join(directory, filename) |
936 | regex = self.rollover_format.format( | |
937 | basename=re.escape(self.basename), | |
938 | timestamp='.+', | |
939 | ext=re.escape(self.ext), | |
940 | ) | |
941 | if re.match(regex, filename): | |
945 | if rollover_regex.match(filename): | |
942 | 946 | files.append((os.path.getmtime(filename), filename)) |
943 | 947 | files.sort() |
944 | 948 | if self.backup_count > 1: |
950 | 954 | if self.stream is not None: |
951 | 955 | self.stream.close() |
952 | 956 | |
957 | if ( | |
958 | not self.timed_filename_for_current | |
959 | and os.path.exists(self._filename) | |
960 | ): | |
961 | filename = self.generate_timed_filename(self._timestamp) | |
962 | os.rename(self._filename, filename) | |
963 | ||
953 | 964 | if self.backup_count > 0: |
954 | 965 | for time, filename in self.files_to_delete(): |
955 | 966 | os.remove(filename) |
956 | 967 | |
957 | 968 | if self.timed_filename_for_current: |
958 | 969 | self._filename = self.generate_timed_filename(new_timestamp) |
959 | else: | |
960 | filename = self.generate_timed_filename(self._timestamp) | |
961 | os.rename(self._filename, filename) | |
962 | 970 | self._timestamp = new_timestamp |
963 | 971 | |
964 | 972 | self._open('w') |
1053 | 1061 | def has_debugs(self): |
1054 | 1062 | """`True` if any :data:`DEBUG` records were found.""" |
1055 | 1063 | return any(r.level == DEBUG for r in self.records) |
1056 | ||
1064 | ||
1057 | 1065 | @property |
1058 | 1066 | def has_traces(self): |
1059 | 1067 | """`True` if any :data:`TRACE` records were found.""" |
1106 | 1114 | """ |
1107 | 1115 | kwargs['level'] = DEBUG |
1108 | 1116 | return self._test_for(*args, **kwargs) |
1109 | ||
1117 | ||
1110 | 1118 | def has_trace(self, *args, **kwargs): |
1111 | 1119 | """`True` if a specific :data:`TRACE` log record exists. |
1112 | 1120 |
37 | 37 | else: |
38 | 38 | from urllib.parse import parse_qsl, urlencode |
39 | 39 | |
40 | _ws_re = re.compile(r'(\s+)(?u)') | |
40 | _ws_re = re.compile(r'(\s+)', re.UNICODE) | |
41 | 41 | TWITTER_FORMAT_STRING = u( |
42 | 42 | '[{record.channel}] {record.level_name}: {record.message}') |
43 | 43 | TWITTER_ACCESS_TOKEN_URL = 'https://twitter.com/oauth/access_token' |
0 | 0 | import os |
1 | 1 | import pytest |
2 | import time | |
2 | 3 | from datetime import datetime |
3 | 4 | |
4 | 5 | import logbook |
166 | 167 | assert f.readline().rstrip() == '[01:00] Third One' |
167 | 168 | assert f.readline().rstrip() == '[02:00] Third One' |
168 | 169 | |
170 | ||
169 | 171 | @pytest.mark.parametrize("backup_count", [1, 3]) |
170 | def test_timed_rotating_file_handler__not_timed_filename_for_current(tmpdir, activation_strategy, backup_count): | |
172 | @pytest.mark.parametrize("preexisting_file", [True, False]) | |
173 | def test_timed_rotating_file_handler__not_timed_filename_for_current( | |
174 | tmpdir, activation_strategy, backup_count, preexisting_file | |
175 | ): | |
171 | 176 | basename = str(tmpdir.join('trot.log')) |
177 | ||
178 | if preexisting_file: | |
179 | with open(basename, 'w') as file: | |
180 | file.write('contents') | |
181 | jan_first = time.mktime(datetime(2010, 1, 1).timetuple()) | |
182 | os.utime(basename, (jan_first, jan_first)) | |
183 | ||
172 | 184 | handler = logbook.TimedRotatingFileHandler( |
173 | basename, backup_count=backup_count, | |
185 | basename, | |
186 | format_string='[{record.time:%H:%M}] {record.message}', | |
187 | backup_count=backup_count, | |
174 | 188 | rollover_format='{basename}{ext}.{timestamp}', |
175 | 189 | timed_filename_for_current=False, |
176 | 190 | ) |
177 | handler._timestamp = handler._get_timestamp(datetime(2010, 1, 5)) | |
178 | handler.format_string = '[{record.time:%H:%M}] {record.message}' | |
179 | 191 | |
180 | 192 | def fake_record(message, year, month, day, hour=0, |
181 | 193 | minute=0, second=0): |
194 | 206 | for x in xrange(20): |
195 | 207 | handler.handle(fake_record('Last One', 2010, 1, 8, x + 1)) |
196 | 208 | |
197 | files = sorted(x for x in os.listdir(str(tmpdir)) if x.startswith('trot')) | |
198 | ||
199 | assert files == ['trot.log'] + ['trot.log.2010-01-0{0}'.format(i) | |
200 | for i in xrange(5, 8)][-backup_count:] | |
209 | computed_files = [x for x in os.listdir(str(tmpdir)) if x.startswith('trot')] | |
210 | ||
211 | expected_files = ['trot.log.2010-01-01'] if preexisting_file else [] | |
212 | expected_files += ['trot.log.2010-01-0{0}'.format(i) for i in xrange(5, 8)] | |
213 | expected_files += ['trot.log'] | |
214 | expected_files = expected_files[-backup_count:] | |
215 | ||
216 | assert sorted(computed_files) == sorted(expected_files) | |
217 | ||
201 | 218 | with open(str(tmpdir.join('trot.log'))) as f: |
202 | 219 | assert f.readline().rstrip() == '[01:00] Last One' |
203 | 220 | assert f.readline().rstrip() == '[02:00] Last One' |