Imported Upstream version 0.12.3
Agustin Henze
7 years ago
0 | IF DEFINED CYBUILD ( | |
1 | %WITH_COMPILER% python setup.py bdist_wheel | |
2 | IF "%APPVEYOR_REPO_TAG%"=="true" ( | |
3 | twine upload -u %PYPI_USERNAME% -p %PYPI_PASSWORD% dist\*.whl | |
4 | ) | |
5 | )⏎ |
0 | # Sample script to install Python and pip under Windows | |
1 | # Authors: Olivier Grisel, Jonathan Helmus, Kyle Kastner, and Alex Willmer | |
2 | # License: CC0 1.0 Universal: http://creativecommons.org/publicdomain/zero/1.0/ | |
3 | ||
4 | $MINICONDA_URL = "http://repo.continuum.io/miniconda/" | |
5 | $BASE_URL = "https://www.python.org/ftp/python/" | |
6 | $GET_PIP_URL = "https://bootstrap.pypa.io/get-pip.py" | |
7 | $GET_PIP_PATH = "C:\get-pip.py" | |
8 | ||
9 | $PYTHON_PRERELEASE_REGEX = @" | |
10 | (?x) | |
11 | (?<major>\d+) | |
12 | \. | |
13 | (?<minor>\d+) | |
14 | \. | |
15 | (?<micro>\d+) | |
16 | (?<prerelease>[a-z]{1,2}\d+) | |
17 | "@ | |
18 | ||
19 | ||
20 | function Download ($filename, $url) { | |
21 | $webclient = New-Object System.Net.WebClient | |
22 | ||
23 | $basedir = $pwd.Path + "\" | |
24 | $filepath = $basedir + $filename | |
25 | if (Test-Path $filename) { | |
26 | Write-Host "Reusing" $filepath | |
27 | return $filepath | |
28 | } | |
29 | ||
30 | # Download and retry up to 3 times in case of network transient errors. | |
31 | Write-Host "Downloading" $filename "from" $url | |
32 | $retry_attempts = 2 | |
33 | for ($i = 0; $i -lt $retry_attempts; $i++) { | |
34 | try { | |
35 | $webclient.DownloadFile($url, $filepath) | |
36 | break | |
37 | } | |
38 | Catch [Exception]{ | |
39 | Start-Sleep 1 | |
40 | } | |
41 | } | |
42 | if (Test-Path $filepath) { | |
43 | Write-Host "File saved at" $filepath | |
44 | } else { | |
45 | # Retry once to get the error message if any at the last try | |
46 | $webclient.DownloadFile($url, $filepath) | |
47 | } | |
48 | return $filepath | |
49 | } | |
50 | ||
51 | ||
52 | function ParsePythonVersion ($python_version) { | |
53 | if ($python_version -match $PYTHON_PRERELEASE_REGEX) { | |
54 | return ([int]$matches.major, [int]$matches.minor, [int]$matches.micro, | |
55 | $matches.prerelease) | |
56 | } | |
57 | $version_obj = [version]$python_version | |
58 | return ($version_obj.major, $version_obj.minor, $version_obj.build, "") | |
59 | } | |
60 | ||
61 | ||
62 | function DownloadPython ($python_version, $platform_suffix) { | |
63 | $major, $minor, $micro, $prerelease = ParsePythonVersion $python_version | |
64 | ||
65 | if (($major -le 2 -and $micro -eq 0) ` | |
66 | -or ($major -eq 3 -and $minor -le 2 -and $micro -eq 0) ` | |
67 | ) { | |
68 | $dir = "$major.$minor" | |
69 | $python_version = "$major.$minor$prerelease" | |
70 | } else { | |
71 | $dir = "$major.$minor.$micro" | |
72 | } | |
73 | ||
74 | if ($prerelease) { | |
75 | if (($major -le 2) ` | |
76 | -or ($major -eq 3 -and $minor -eq 1) ` | |
77 | -or ($major -eq 3 -and $minor -eq 2) ` | |
78 | -or ($major -eq 3 -and $minor -eq 3) ` | |
79 | ) { | |
80 | $dir = "$dir/prev" | |
81 | } | |
82 | } | |
83 | ||
84 | if (($major -le 2) -or ($major -le 3 -and $minor -le 4)) { | |
85 | $ext = "msi" | |
86 | if ($platform_suffix) { | |
87 | $platform_suffix = ".$platform_suffix" | |
88 | } | |
89 | } else { | |
90 | $ext = "exe" | |
91 | if ($platform_suffix) { | |
92 | $platform_suffix = "-$platform_suffix" | |
93 | } | |
94 | } | |
95 | ||
96 | $filename = "python-$python_version$platform_suffix.$ext" | |
97 | $url = "$BASE_URL$dir/$filename" | |
98 | $filepath = Download $filename $url | |
99 | return $filepath | |
100 | } | |
101 | ||
102 | ||
103 | function InstallPython ($python_version, $architecture, $python_home) { | |
104 | Write-Host "Installing Python" $python_version "for" $architecture "bit architecture to" $python_home | |
105 | if (Test-Path $python_home) { | |
106 | Write-Host $python_home "already exists, skipping." | |
107 | return $false | |
108 | } | |
109 | if ($architecture -eq "32") { | |
110 | $platform_suffix = "" | |
111 | } else { | |
112 | $platform_suffix = "amd64" | |
113 | } | |
114 | $installer_path = DownloadPython $python_version $platform_suffix | |
115 | $installer_ext = [System.IO.Path]::GetExtension($installer_path) | |
116 | Write-Host "Installing $installer_path to $python_home" | |
117 | $install_log = $python_home + ".log" | |
118 | if ($installer_ext -eq '.msi') { | |
119 | InstallPythonMSI $installer_path $python_home $install_log | |
120 | } else { | |
121 | InstallPythonEXE $installer_path $python_home $install_log | |
122 | } | |
123 | if (Test-Path $python_home) { | |
124 | Write-Host "Python $python_version ($architecture) installation complete" | |
125 | } else { | |
126 | Write-Host "Failed to install Python in $python_home" | |
127 | Get-Content -Path $install_log | |
128 | Exit 1 | |
129 | } | |
130 | } | |
131 | ||
132 | ||
133 | function InstallPythonEXE ($exepath, $python_home, $install_log) { | |
134 | $install_args = "/quiet InstallAllUsers=1 TargetDir=$python_home" | |
135 | RunCommand $exepath $install_args | |
136 | } | |
137 | ||
138 | ||
139 | function InstallPythonMSI ($msipath, $python_home, $install_log) { | |
140 | $install_args = "/qn /log $install_log /i $msipath TARGETDIR=$python_home" | |
141 | $uninstall_args = "/qn /x $msipath" | |
142 | RunCommand "msiexec.exe" $install_args | |
143 | if (-not(Test-Path $python_home)) { | |
144 | Write-Host "Python seems to be installed else-where, reinstalling." | |
145 | RunCommand "msiexec.exe" $uninstall_args | |
146 | RunCommand "msiexec.exe" $install_args | |
147 | } | |
148 | } | |
149 | ||
150 | function RunCommand ($command, $command_args) { | |
151 | Write-Host $command $command_args | |
152 | Start-Process -FilePath $command -ArgumentList $command_args -Wait -Passthru | |
153 | } | |
154 | ||
155 | ||
156 | function InstallPip ($python_home) { | |
157 | $pip_path = $python_home + "\Scripts\pip.exe" | |
158 | $python_path = $python_home + "\python.exe" | |
159 | if (-not(Test-Path $pip_path)) { | |
160 | Write-Host "Installing pip..." | |
161 | $webclient = New-Object System.Net.WebClient | |
162 | $webclient.DownloadFile($GET_PIP_URL, $GET_PIP_PATH) | |
163 | Write-Host "Executing:" $python_path $GET_PIP_PATH | |
164 | & $python_path $GET_PIP_PATH | |
165 | } else { | |
166 | Write-Host "pip already installed." | |
167 | } | |
168 | } | |
169 | ||
170 | ||
171 | function DownloadMiniconda ($python_version, $platform_suffix) { | |
172 | if ($python_version -eq "3.4") { | |
173 | $filename = "Miniconda3-3.5.5-Windows-" + $platform_suffix + ".exe" | |
174 | } else { | |
175 | $filename = "Miniconda-3.5.5-Windows-" + $platform_suffix + ".exe" | |
176 | } | |
177 | $url = $MINICONDA_URL + $filename | |
178 | $filepath = Download $filename $url | |
179 | return $filepath | |
180 | } | |
181 | ||
182 | ||
183 | function InstallMiniconda ($python_version, $architecture, $python_home) { | |
184 | Write-Host "Installing Python" $python_version "for" $architecture "bit architecture to" $python_home | |
185 | if (Test-Path $python_home) { | |
186 | Write-Host $python_home "already exists, skipping." | |
187 | return $false | |
188 | } | |
189 | if ($architecture -eq "32") { | |
190 | $platform_suffix = "x86" | |
191 | } else { | |
192 | $platform_suffix = "x86_64" | |
193 | } | |
194 | $filepath = DownloadMiniconda $python_version $platform_suffix | |
195 | Write-Host "Installing" $filepath "to" $python_home | |
196 | $install_log = $python_home + ".log" | |
197 | $args = "/S /D=$python_home" | |
198 | Write-Host $filepath $args | |
199 | Start-Process -FilePath $filepath -ArgumentList $args -Wait -Passthru | |
200 | if (Test-Path $python_home) { | |
201 | Write-Host "Python $python_version ($architecture) installation complete" | |
202 | } else { | |
203 | Write-Host "Failed to install Python in $python_home" | |
204 | Get-Content -Path $install_log | |
205 | Exit 1 | |
206 | } | |
207 | } | |
208 | ||
209 | ||
210 | function InstallMinicondaPip ($python_home) { | |
211 | $pip_path = $python_home + "\Scripts\pip.exe" | |
212 | $conda_path = $python_home + "\Scripts\conda.exe" | |
213 | if (-not(Test-Path $pip_path)) { | |
214 | Write-Host "Installing pip..." | |
215 | $args = "install --yes pip" | |
216 | Write-Host $conda_path $args | |
217 | Start-Process -FilePath "$conda_path" -ArgumentList $args -Wait -Passthru | |
218 | } else { | |
219 | Write-Host "pip already installed." | |
220 | } | |
221 | } | |
222 | ||
223 | function main () { | |
224 | InstallPython $env:PYTHON_VERSION $env:PYTHON_ARCH $env:PYTHON | |
225 | InstallPip $env:PYTHON | |
226 | } | |
227 | ||
228 | main⏎ |
0 | pip install wheel | |
1 | nuget install redis-64 -excludeversion | |
2 | redis-64\redis-server.exe --service-install | |
3 | redis-64\redis-server.exe --service-start | |
4 | nuget install ZeroMQ | |
5 | %WITH_COMPILER% pip install cython pyzmq | |
6 | python scripts\test_setup.py | |
7 | python setup.py develop | |
8 | IF DEFINED CYBUILD ( | |
9 | cython logbook\_speedups.pyx | |
10 | %WITH_COMPILER% python setup.py build | |
11 | pip install twine | |
12 | ) |
0 | :: To build extensions for 64 bit Python 3, we need to configure environment | |
1 | :: variables to use the MSVC 2010 C++ compilers from GRMSDKX_EN_DVD.iso of: | |
2 | :: MS Windows SDK for Windows 7 and .NET Framework 4 (SDK v7.1) | |
3 | :: | |
4 | :: To build extensions for 64 bit Python 2, we need to configure environment | |
5 | :: variables to use the MSVC 2008 C++ compilers from GRMSDKX_EN_DVD.iso of: | |
6 | :: MS Windows SDK for Windows 7 and .NET Framework 3.5 (SDK v7.0) | |
7 | :: | |
8 | :: 32 bit builds, and 64-bit builds for 3.5 and beyond, do not require specific | |
9 | :: environment configurations. | |
10 | :: | |
11 | :: Note: this script needs to be run with the /E:ON and /V:ON flags for the | |
12 | :: cmd interpreter, at least for (SDK v7.0) | |
13 | :: | |
14 | :: More details at: | |
15 | :: https://github.com/cython/cython/wiki/64BitCythonExtensionsOnWindows | |
16 | :: http://stackoverflow.com/a/13751649/163740 | |
17 | :: | |
18 | :: Author: Olivier Grisel | |
19 | :: License: CC0 1.0 Universal: http://creativecommons.org/publicdomain/zero/1.0/ | |
20 | :: | |
21 | :: Notes about batch files for Python people: | |
22 | :: | |
23 | :: Quotes in values are literally part of the values: | |
24 | :: SET FOO="bar" | |
25 | :: FOO is now five characters long: " b a r " | |
26 | :: If you don't want quotes, don't include them on the right-hand side. | |
27 | :: | |
28 | :: The CALL lines at the end of this file look redundant, but if you move them | |
29 | :: outside of the IF clauses, they do not run properly in the SET_SDK_64==Y | |
30 | :: case, I don't know why. | |
31 | @ECHO OFF | |
32 | ||
33 | SET COMMAND_TO_RUN=%* | |
34 | SET WIN_SDK_ROOT=C:\Program Files\Microsoft SDKs\Windows | |
35 | SET WIN_WDK=c:\Program Files (x86)\Windows Kits\10\Include\wdf | |
36 | ||
37 | :: Extract the major and minor versions, and allow for the minor version to be | |
38 | :: more than 9. This requires the version number to have two dots in it. | |
39 | SET MAJOR_PYTHON_VERSION=%PYTHON_VERSION:~0,1% | |
40 | IF "%PYTHON_VERSION:~3,1%" == "." ( | |
41 | SET MINOR_PYTHON_VERSION=%PYTHON_VERSION:~2,1% | |
42 | ) ELSE ( | |
43 | SET MINOR_PYTHON_VERSION=%PYTHON_VERSION:~2,2% | |
44 | ) | |
45 | ||
46 | :: Based on the Python version, determine what SDK version to use, and whether | |
47 | :: to set the SDK for 64-bit. | |
48 | IF %MAJOR_PYTHON_VERSION% == 2 ( | |
49 | SET WINDOWS_SDK_VERSION="v7.0" | |
50 | SET SET_SDK_64=Y | |
51 | ) ELSE ( | |
52 | IF %MAJOR_PYTHON_VERSION% == 3 ( | |
53 | SET WINDOWS_SDK_VERSION="v7.1" | |
54 | IF %MINOR_PYTHON_VERSION% LEQ 4 ( | |
55 | SET SET_SDK_64=Y | |
56 | ) ELSE ( | |
57 | SET SET_SDK_64=N | |
58 | IF EXIST "%WIN_WDK%" ( | |
59 | :: See: https://connect.microsoft.com/VisualStudio/feedback/details/1610302/ | |
60 | REN "%WIN_WDK%" 0wdf | |
61 | ) | |
62 | ) | |
63 | ) ELSE ( | |
64 | ECHO Unsupported Python version: "%MAJOR_PYTHON_VERSION%" | |
65 | EXIT 1 | |
66 | ) | |
67 | ) | |
68 | ||
69 | IF %PYTHON_ARCH% == 64 ( | |
70 | IF %SET_SDK_64% == Y ( | |
71 | ECHO Configuring Windows SDK %WINDOWS_SDK_VERSION% for Python %MAJOR_PYTHON_VERSION% on a 64 bit architecture | |
72 | SET DISTUTILS_USE_SDK=1 | |
73 | SET MSSdk=1 | |
74 | "%WIN_SDK_ROOT%\%WINDOWS_SDK_VERSION%\Setup\WindowsSdkVer.exe" -q -version:%WINDOWS_SDK_VERSION% | |
75 | "%WIN_SDK_ROOT%\%WINDOWS_SDK_VERSION%\Bin\SetEnv.cmd" /x64 /release | |
76 | ECHO Executing: %COMMAND_TO_RUN% | |
77 | call %COMMAND_TO_RUN% || EXIT 1 | |
78 | ) ELSE ( | |
79 | ECHO Using default MSVC build environment for 64 bit architecture | |
80 | ECHO Executing: %COMMAND_TO_RUN% | |
81 | call %COMMAND_TO_RUN% || EXIT 1 | |
82 | ) | |
83 | ) ELSE ( | |
84 | ECHO Using default MSVC build environment for 32 bit architecture | |
85 | ECHO Executing: %COMMAND_TO_RUN% | |
86 | call %COMMAND_TO_RUN% || EXIT 1 | |
87 | )⏎ |
0 | 0 | language: python |
1 | ||
2 | 1 | services: |
3 | - redis-server | |
4 | ||
2 | - redis-server | |
5 | 3 | python: |
6 | - "2.6" | |
7 | - "2.7" | |
8 | - "3.2" | |
9 | - "3.3" | |
10 | - "3.4" | |
11 | - "pypy" | |
12 | - "pypy3" | |
13 | ||
4 | - '2.6' | |
5 | - '2.7' | |
6 | - '3.2' | |
7 | - '3.3' | |
8 | - '3.4' | |
9 | - '3.5' | |
10 | - pypy | |
11 | - pypy3 | |
14 | 12 | install: |
15 | # this fixes SemLock issues on travis | |
16 | - "sudo rm -rf /dev/shm && sudo ln -s /run/shm /dev/shm" | |
17 | - "sudo apt-add-repository -y ppa:chris-lea/zeromq" | |
18 | - "sudo apt-get update" | |
19 | - "sudo apt-get install -y libzmq3-dev" | |
20 | - "pip install cython redis" | |
21 | - "easy_install pyzmq" | |
22 | - "make test_setup" | |
23 | - "python setup.py develop" | |
24 | ||
13 | - sudo rm -rf /dev/shm && sudo ln -s /run/shm /dev/shm | |
14 | - sudo apt-add-repository -y ppa:chris-lea/zeromq | |
15 | - sudo apt-get update | |
16 | - sudo apt-get install -y libzmq3-dev | |
17 | - pip install cython redis | |
18 | - easy_install pyzmq | |
19 | - make test_setup | |
20 | - python setup.py develop | |
25 | 21 | env: |
26 | - COMMAND="make test" | |
27 | - COMMAND="make cybuild test" | |
28 | ||
29 | script: "$COMMAND" | |
30 | ||
22 | - COMMAND="make test" | |
23 | - COMMAND="make cybuild test" | |
24 | script: $COMMAND | |
31 | 25 | matrix: |
32 | 26 | exclude: |
33 | - python: "pypy" | |
34 | env: COMMAND="make cybuild test" | |
35 | - python: "pypy3" | |
36 | env: COMMAND="make cybuild test" | |
37 | ||
27 | - python: pypy | |
28 | env: COMMAND="make cybuild test" | |
29 | - python: pypy3 | |
30 | env: COMMAND="make cybuild test" | |
38 | 31 | notifications: |
39 | email: | |
32 | email: | |
40 | 33 | recipients: |
41 | - vmalloc@gmail.com | |
34 | - vmalloc@gmail.com | |
42 | 35 | irc: |
43 | 36 | channels: |
44 | - "chat.freenode.net#pocoo" | |
37 | - chat.freenode.net#pocoo | |
45 | 38 | on_success: change |
46 | 39 | on_failure: always |
47 | 40 | use_notice: true |
48 | 41 | skip_join: true |
42 | before_deploy: | |
43 | - make logbook/_speedups.so | |
44 | deploy: | |
45 | provider: pypi | |
46 | user: vmalloc | |
47 | password: | |
48 | secure: WFmuAbtBDIkeZArIFQRCwyO1TdvF2PaZpo75r3mFgnY+aWm75cdgjZKoNqVprF/f+v9EsX2kDdQ7ZfuhMLgP8MNziB+ty7579ZDGwh64jGoi+DIoeblAFu5xNAqjvhie540uCE8KySk9s+Pq5EpOA5w18V4zxTw+h6tnBQ0M9cQ= | |
49 | on: | |
50 | tags: true | |
51 | repo: getlogbook/logbook | |
52 | distributions: "sdist bdist_egg" |
14 | 14 | - Roman Valls Guimera |
15 | 15 | - Guillermo Carrasco Hernández |
16 | 16 | - Raphaël Vinot |
17 | - Rotem Yaari | |
18 | - Frazer McLean | |
19 |
1 | 1 | ================= |
2 | 2 | |
3 | 3 | Here you can see the full list of changes between each Logbook release. |
4 | ||
5 | Version 0.12.0 | |
6 | -------------- | |
7 | ||
8 | Released on November 24th 2015 | |
9 | ||
10 | - Added logbook.utils.deprecated to automatically emit warnings when certain functions are called (Thanks Ayala Shachar) | |
11 | - Added logbook.utils.suppressed_deprecations context to temporarily suppress deprecations (Thanks Ayala Shachar) | |
12 | - Added logbook.utils.logged_if_slow_context to emit logs when certain operations exceed a time threshold (Thanks Ayala Shachar) | |
13 | - Many PEP8 fixes and code cleanups (thanks Taranjeet Singh and Frazer McLean) | |
14 | - TestHandler constructor now receives an optional `force_heavy_init=True`, forcing all records to heavy-initialize | |
15 | ||
16 | ||
17 | Version 0.11.3 | |
18 | -------------- | |
19 | ||
20 | Released on November 5th 2015 | |
21 | ||
22 | - Windows-specific fixes and CI configuration (Thanks Frazer McLean) | |
23 | - Several Unicode-specific fixes (Thanks Frazer McLean) | |
24 | - Documentation cleanups | |
25 | ||
26 | Version 0.11.2 | |
27 | -------------- | |
28 | ||
29 | Released on September 29th 2015 | |
30 | ||
31 | - Fix importing issue with SQLAlchemy ticketing handler | |
32 | ||
33 | Version 0.11.0 | |
34 | -------------- | |
35 | ||
36 | Released on September 29th 2015 | |
37 | ||
38 | - Added TRACE log level for enabling logs below DEBUG | |
39 | - Improved SQLAlchemy session management (thanks @fintan) | |
40 | - Removed the ``bubble`` argument from NullHandler, preventing many future confusions | |
41 | ||
42 | Version 0.10.1 | |
43 | -------------- | |
44 | ||
45 | Released on August 4th 2015 | |
46 | ||
47 | - Small bugfix supporting exc_info=False without breaking formatting | |
4 | 48 | |
5 | 49 | Version 0.10.0 |
6 | 50 | -------------- |
8 | 8 | @python scripts/test_setup.py |
9 | 9 | |
10 | 10 | test: |
11 | @py.test tests | |
11 | @py.test -r s tests | |
12 | 12 | |
13 | 13 | toxtest: |
14 | 14 | @tox |
20 | 20 | bench: |
21 | 21 | @python benchmark/run.py |
22 | 22 | |
23 | upload-docs: docs | |
24 | python setup.py upload_docs | |
25 | ||
26 | 23 | docs: |
27 | 24 | make -C docs html SPHINXOPTS=-Aonline=1 |
28 | 25 | |
29 | release: logbook/_speedups.so upload-docs | |
26 | release: logbook/_speedups.so | |
30 | 27 | python scripts/make-release.py |
31 | 28 | |
32 | 29 | logbook/_speedups.so: logbook/_speedups.pyx |
0 | # Welcome to Logbook | |
1 | ||
2 | | | | | |
3 | |--------------------|-----------------------------| | |
4 | | Travis | [![Build Status][ti]][tl] | | |
5 | | AppVeyor | [![Build Status][ai]][al] | | |
6 | | Supported Versions | ![Supported Versions][vi] | | |
7 | | Downloads | ![Downloads][di] | | |
8 | | Latest Version | [![Latest Version][pi]][pl] | | |
9 | ||
10 | ||
11 | Logbook is a nice logging replacement. | |
12 | ||
13 | It should be easy to setup, use and configure and support web applications :) | |
14 | ||
15 | For more information: http://logbook.readthedocs.org | |
16 | ||
17 | [ti]: https://secure.travis-ci.org/getlogbook/logbook.svg?branch=master | |
18 | [tl]: https://travis-ci.org/getlogbook/logbook | |
19 | [ai]: https://ci.appveyor.com/api/projects/status/quu99exa26e06npp?svg=true | |
20 | [vi]: https://img.shields.io/pypi/pyversions/logbook.svg | |
21 | [di]: https://img.shields.io/pypi/dm/logbook.svg | |
22 | [al]: https://ci.appveyor.com/project/vmalloc/logbook | |
23 | [pi]: https://img.shields.io/pypi/v/logbook.svg | |
24 | [pl]: https://pypi.python.org/pypi/Logbook |
0 | Welcome to Logbook | |
1 | ================== | |
2 | ||
3 | .. image:: https://secure.travis-ci.org/mitsuhiko/logbook.png | |
4 | :target: https://travis-ci.org/mitsuhiko/logbook | |
5 | ||
6 | .. image:: https://pypip.in/d/Logbook/badge.png | |
7 | :target: https://crate.io/packages/Logbook | |
8 | ||
9 | .. image:: https://pypip.in/v/Logbook/badge.png | |
10 | :target: https://crate.io/packages/Logbook | |
11 | ||
12 | Logbook is a nice logging replacement. | |
13 | ||
14 | It should be easy to setup, use and configure and support web applications :) | |
15 | ||
16 | For more information look at http://pythonhosted.org/Logbook |
0 | cache: | |
1 | - C:\Users\appveyor\AppData\Local\pip\Cache\wheels | |
2 | ||
3 | environment: | |
4 | global: | |
5 | # SDK v7.0 MSVC Express 2008's SetEnv.cmd script will fail if the | |
6 | # /E:ON and /V:ON options are not enabled in the batch script intepreter | |
7 | # See: http://stackoverflow.com/a/13751649/163740 | |
8 | WITH_COMPILER: "cmd /E:ON /V:ON /C .\\.appveyor\\run_with_compiler.cmd" | |
9 | PYPI_USERNAME: | |
10 | secure: ixvjwUN/HsSfGkU3OvtQ8Q== | |
11 | PYPI_PASSWORD: | |
12 | secure: KOr+oEHZJmo1el3bT+ivmQ== | |
13 | ||
14 | matrix: | |
15 | # Python 2.6.6 is the latest Python 2.6 with a Windows installer | |
16 | # See: https://github.com/ogrisel/python-appveyor-demo/issues/10 | |
17 | ||
18 | - PYTHON: "C:\\Python266" | |
19 | PYTHON_VERSION: "2.6.6" | |
20 | PYTHON_ARCH: "32" | |
21 | ||
22 | - PYTHON: "C:\\Python266" | |
23 | PYTHON_VERSION: "2.6.6" | |
24 | PYTHON_ARCH: "32" | |
25 | CYBUILD: "TRUE" | |
26 | ||
27 | - PYTHON: "C:\\Python266-x64" | |
28 | PYTHON_VERSION: "2.6.6" | |
29 | PYTHON_ARCH: "64" | |
30 | ||
31 | - PYTHON: "C:\\Python266-x64" | |
32 | PYTHON_VERSION: "2.6.6" | |
33 | PYTHON_ARCH: "64" | |
34 | CYBUILD: "TRUE" | |
35 | ||
36 | # Pre-installed Python versions, which Appveyor may upgrade to | |
37 | # a later point release. | |
38 | # See: http://www.appveyor.com/docs/installed-software#python | |
39 | ||
40 | - PYTHON: "C:\\Python27" | |
41 | PYTHON_VERSION: "2.7.x" | |
42 | PYTHON_ARCH: "32" | |
43 | ||
44 | - PYTHON: "C:\\Python27" | |
45 | PYTHON_VERSION: "2.7.x" | |
46 | PYTHON_ARCH: "32" | |
47 | CYBUILD: "TRUE" | |
48 | ||
49 | - PYTHON: "C:\\Python27-x64" | |
50 | PYTHON_VERSION: "2.7.x" | |
51 | PYTHON_ARCH: "64" | |
52 | ||
53 | - PYTHON: "C:\\Python27-x64" | |
54 | PYTHON_VERSION: "2.7.x" | |
55 | PYTHON_ARCH: "64" | |
56 | CYBUILD: "TRUE" | |
57 | ||
58 | # Python 3.2 isn't preinstalled | |
59 | ||
60 | - PYTHON: "C:\\Python325" | |
61 | PYTHON_VERSION: "3.2.5" | |
62 | PYTHON_ARCH: "32" | |
63 | ||
64 | - PYTHON: "C:\\Python325" | |
65 | PYTHON_VERSION: "3.2.5" | |
66 | PYTHON_ARCH: "32" | |
67 | CYBUILD: "TRUE" | |
68 | ||
69 | - PYTHON: "C:\\Python325-x64" | |
70 | PYTHON_VERSION: "3.2.5" | |
71 | PYTHON_ARCH: "64" | |
72 | ||
73 | - PYTHON: "C:\\Python325-x64" | |
74 | PYTHON_VERSION: "3.2.5" | |
75 | PYTHON_ARCH: "64" | |
76 | CYBUILD: "TRUE" | |
77 | ||
78 | # Pre-installed Python versions, which Appveyor may upgrade to | |
79 | # a later point release. | |
80 | # See: http://www.appveyor.com/docs/installed-software#python | |
81 | ||
82 | - PYTHON: "C:\\Python33" | |
83 | PYTHON_VERSION: "3.3.x" | |
84 | PYTHON_ARCH: "32" | |
85 | ||
86 | - PYTHON: "C:\\Python33" | |
87 | PYTHON_VERSION: "3.3.x" | |
88 | PYTHON_ARCH: "32" | |
89 | CYBUILD: "TRUE" | |
90 | ||
91 | - PYTHON: "C:\\Python33-x64" | |
92 | PYTHON_VERSION: "3.3.x" | |
93 | PYTHON_ARCH: "64" | |
94 | ||
95 | - PYTHON: "C:\\Python33-x64" | |
96 | PYTHON_VERSION: "3.3.x" | |
97 | PYTHON_ARCH: "64" | |
98 | CYBUILD: "TRUE" | |
99 | ||
100 | - PYTHON: "C:\\Python34" | |
101 | PYTHON_VERSION: "3.4.x" | |
102 | PYTHON_ARCH: "32" | |
103 | ||
104 | - PYTHON: "C:\\Python34" | |
105 | PYTHON_VERSION: "3.4.x" | |
106 | PYTHON_ARCH: "32" | |
107 | CYBUILD: "TRUE" | |
108 | ||
109 | - PYTHON: "C:\\Python34-x64" | |
110 | PYTHON_VERSION: "3.4.x" | |
111 | PYTHON_ARCH: "64" | |
112 | ||
113 | - PYTHON: "C:\\Python34-x64" | |
114 | PYTHON_VERSION: "3.4.x" | |
115 | PYTHON_ARCH: "64" | |
116 | CYBUILD: "TRUE" | |
117 | ||
118 | - PYTHON: "C:\\Python35" | |
119 | PYTHON_VERSION: "3.5.x" | |
120 | PYTHON_ARCH: "32" | |
121 | ||
122 | - PYTHON: "C:\\Python35" | |
123 | PYTHON_VERSION: "3.5.x" | |
124 | PYTHON_ARCH: "32" | |
125 | CYBUILD: "TRUE" | |
126 | ||
127 | - PYTHON: "C:\\Python35-x64" | |
128 | PYTHON_VERSION: "3.5.x" | |
129 | PYTHON_ARCH: "64" | |
130 | ||
131 | - PYTHON: "C:\\Python35-x64" | |
132 | PYTHON_VERSION: "3.5.x" | |
133 | PYTHON_ARCH: "64" | |
134 | CYBUILD: "TRUE" | |
135 | ||
136 | ||
137 | init: | |
138 | - echo %PYTHON% %PYTHON_VERSION% %PYTHON_ARCH% | |
139 | - set PATH=%PYTHON%;%PYTHON%\Scripts;%PATH% | |
140 | ||
141 | install: | |
142 | - powershell .appveyor\\install.ps1 | |
143 | - ".appveyor\\prepare.bat" | |
144 | - ps: if (Test-Path Env:\CYBUILD) {Copy-Item build\*\logbook\*.pyd logbook\} | |
145 | ||
146 | build: off | |
147 | ||
148 | test_script: | |
149 | - py.test -r s tests | |
150 | ||
151 | after_test: | |
152 | - ".appveyor\\after_test.bat" | |
153 | ||
154 | artifacts: | |
155 | # Archive the generated packages in the ci.appveyor.com build report. | |
156 | - path: dist\* | |
157 | ||
158 | deploy: | |
159 | description: '' | |
160 | provider: GitHub | |
161 | auth_token: | |
162 | secure: 0yLUo/V+wwSvSFk9nBW/77RN9iTjJA1B5p/TM1XgVLPPFEZWkH756jyJ0FOmtJPt | |
163 | artifact: /.*\.whl/ | |
164 | draft: true | |
165 | prerelease: false | |
166 | on: | |
167 | appveyor_repo_tag: true |
0 | 0 | """Tests basic stack manipulation performance""" |
1 | 1 | from logbook import Handler, NullHandler, StreamHandler, FileHandler, \ |
2 | ERROR, WARNING | |
2 | ERROR, WARNING | |
3 | 3 | from tempfile import NamedTemporaryFile |
4 | 4 | from cStringIO import StringIO |
5 | 5 |
39 | 39 | |
40 | 40 | def bench_wrapper(use_gevent=False): |
41 | 41 | print '=' * 80 |
42 | print 'Running benchmark with Logbook %s (gevent enabled=%s)' % (version, use_gevent) | |
42 | print 'Running benchmark with Logbook %s (gevent enabled=%s)' % \ | |
43 | (version, use_gevent) | |
43 | 44 | print '-' * 80 |
44 | 45 | os.chdir(bench_directory) |
45 | 46 | for bench in list_benchmarks(): |
17 | 17 | :members: |
18 | 18 | :inherited-members: |
19 | 19 | |
20 | Redis | |
21 | ----- | |
20 | AMQP Message Queues | |
21 | ------------------- | |
22 | 22 | |
23 | .. autoclass:: RedisHandler | |
23 | .. autoclass:: MessageQueueHandler | |
24 | :members: | |
25 | ||
26 | .. autoclass:: MessageQueueSubscriber | |
24 | 27 | :members: |
25 | 28 | |
26 | 29 | MultiProcessing |
0 | 0 | Utilities |
1 | 1 | ========= |
2 | ||
3 | Misc. Utilities | |
4 | --------------- | |
2 | 5 | |
3 | 6 | This documents general purpose utility functions available in Logbook. |
4 | 7 | |
25 | 28 | .. autofunction:: log |
26 | 29 | |
27 | 30 | .. autofunction:: set_datetime_format |
31 | ||
32 | Slow Operations Logging | |
33 | ----------------------- | |
34 | ||
35 | .. module:: logbook.utils | |
36 | .. autofunction:: logged_if_slow_context | |
37 | ||
38 | ||
39 | Deprecations | |
40 | ------------ | |
41 | ||
42 | .. autofunction:: deprecated | |
43 | ||
44 | .. autofunction:: suppressed_deprecations | |
45 |
18 | 18 | |
19 | 19 | This also means you don't have to call :func:`logging.basicConfig`: |
20 | 20 | |
21 | >>> from logbook.compat import redirect_logging | |
21 | >>> from logbook.compat import redirect_logging, StreamHandler | |
22 | >>> import sys | |
23 | >>> StreamHandler(sys.stdout).push_application() | |
22 | 24 | >>> redirect_logging() |
23 | 25 | >>> from logging import getLogger |
24 | 26 | >>> log = getLogger('My Logger') |
25 | 27 | >>> log.warn('This is a warning') |
26 | [2010-07-25 00:24] WARNING: My Logger: This is a warning | |
28 | [2015-10-05 19:13:37.524346] WARNING: My Logger: This is a warning | |
27 | 29 | |
28 | 30 | Advanced Setup |
29 | 31 | -------------- |
34 | 36 | active logbook handlers. This handler can then be added to specific logging |
35 | 37 | loggers if you want: |
36 | 38 | |
37 | >>> from logging import getLogger | |
39 | >>> from logging import getLogger, StreamHandler | |
40 | >>> import sys | |
41 | >>> StreamHandler(sys.stdout).push_application() | |
38 | 42 | >>> mylog = getLogger('My Log') |
39 | 43 | >>> from logbook.compat import RedirectLoggingHandler |
40 | 44 | >>> mylog.addHandler(RedirectLoggingHandler()) |
42 | 46 | >>> otherlog.warn('logging is deprecated') |
43 | 47 | No handlers could be found for logger "Other Log" |
44 | 48 | >>> mylog.warn('but logbook is awesome') |
45 | [2010-07-25 00:29] WARNING: My Log: but logbook is awesome | |
49 | [2015-10-05 19:13:37.524346] WARNING: My Log: but logbook is awesome | |
46 | 50 | |
47 | 51 | Reverse Redirects |
48 | 52 | ----------------- |
2 | 2 | # Logbook documentation build configuration file, created by |
3 | 3 | # sphinx-quickstart on Fri Jul 23 16:54:49 2010. |
4 | 4 | # |
5 | # This file is execfile()d with the current directory set to its containing dir. | |
5 | # This file is execfile()d with the current directory set to its containing | |
6 | # dir. | |
6 | 7 | # |
7 | 8 | # Note that not all possible configuration values are present in this |
8 | 9 | # autogenerated file. |
10 | 11 | # All configuration values have a default; values that are commented out |
11 | 12 | # serve to show the default. |
12 | 13 | |
13 | import sys, os | |
14 | import sys | |
15 | import os | |
14 | 16 | |
15 | 17 | # If extensions (or modules to document with autodoc) are in another directory, |
16 | 18 | # add these directories to sys.path here. If the directory is relative to the |
17 | 19 | # documentation root, use os.path.abspath to make it absolute, like shown here. |
18 | 20 | sys.path.extend((os.path.abspath('.'), os.path.abspath('..'))) |
19 | 21 | |
20 | # -- General configuration ----------------------------------------------------- | |
22 | # -- General configuration ---------------------------------------------------- | |
21 | 23 | |
22 | 24 | # If your documentation needs a minimal Sphinx version, state it here. |
23 | #needs_sphinx = '1.0' | |
24 | ||
25 | # Add any Sphinx extension module names here, as strings. They can be extensions | |
26 | # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. | |
25 | # needs_sphinx = '1.0' | |
26 | ||
27 | # Add any Sphinx extension module names here, as strings. They can be | |
28 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones. | |
27 | 29 | extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx'] |
28 | 30 | |
29 | 31 | # Add any paths that contain templates here, relative to this directory. |
33 | 35 | source_suffix = '.rst' |
34 | 36 | |
35 | 37 | # The encoding of source files. |
36 | #source_encoding = 'utf-8-sig' | |
38 | # source_encoding = 'utf-8-sig' | |
37 | 39 | |
38 | 40 | # The master toctree document. |
39 | 41 | master_doc = 'index' |
47 | 49 | # built documents. |
48 | 50 | # |
49 | 51 | # The short X.Y version. |
50 | version = '0.10.0' | |
51 | # The full version, including alpha/beta/rc tags. | |
52 | release = '0.10.0' | |
52 | with open(os.path.join(os.path.dirname(__file__), "..", "logbook", "__version__.py")) as version_file: | |
53 | # can't use import here... | |
54 | version = release = version_file.read().strip().split("=")[1].strip()[1:-1] | |
53 | 55 | |
54 | 56 | # The language for content autogenerated by Sphinx. Refer to documentation |
55 | 57 | # for a list of supported languages. |
56 | #language = None | |
58 | # language = None | |
57 | 59 | |
58 | 60 | # There are two options for replacing |today|: either, you set today to some |
59 | 61 | # non-false value, then it is used: |
60 | #today = '' | |
62 | # today = '' | |
61 | 63 | # Else, today_fmt is used as the format for a strftime call. |
62 | #today_fmt = '%B %d, %Y' | |
64 | # today_fmt = '%B %d, %Y' | |
63 | 65 | |
64 | 66 | # List of patterns, relative to source directory, that match files and |
65 | 67 | # directories to ignore when looking for source files. |
66 | 68 | exclude_patterns = ['_build'] |
67 | 69 | |
68 | # The reST default role (used for this markup: `text`) to use for all documents. | |
69 | #default_role = None | |
70 | # The reST default role (used for this markup: `text`) to use for all | |
71 | # documents. | |
72 | # default_role = None | |
70 | 73 | |
71 | 74 | # If true, '()' will be appended to :func: etc. cross-reference text. |
72 | #add_function_parentheses = True | |
75 | # add_function_parentheses = True | |
73 | 76 | |
74 | 77 | # If true, the current module name will be prepended to all description |
75 | 78 | # unit titles (such as .. function::). |
76 | #add_module_names = True | |
79 | # add_module_names = True | |
77 | 80 | |
78 | 81 | # If true, sectionauthor and moduleauthor directives will be shown in the |
79 | 82 | # output. They are ignored by default. |
80 | #show_authors = False | |
83 | # show_authors = False | |
81 | 84 | |
82 | 85 | # The name of the Pygments (syntax highlighting) style to use. |
83 | 86 | pygments_style = 'sphinx' |
84 | 87 | |
85 | 88 | # A list of ignored prefixes for module index sorting. |
86 | #modindex_common_prefix = [] | |
87 | ||
88 | ||
89 | # -- Options for HTML output --------------------------------------------------- | |
89 | # modindex_common_prefix = [] | |
90 | ||
91 | ||
92 | # -- Options for HTML output -------------------------------------------------- | |
90 | 93 | |
91 | 94 | # The theme to use for HTML and HTML Help pages. See the documentation for |
92 | 95 | # a list of builtin themes. |
111 | 114 | |
112 | 115 | # The name of an image file (relative to this directory) to place at the top |
113 | 116 | # of the sidebar. |
114 | #html_logo = None | |
117 | # html_logo = None | |
115 | 118 | |
116 | 119 | # The name of an image file (within the static path) to use as favicon of the |
117 | 120 | # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 |
118 | 121 | # pixels large. |
119 | #html_favicon = None | |
122 | # html_favicon = None | |
120 | 123 | |
121 | 124 | # Add any paths that contain custom static files (such as style sheets) here, |
122 | 125 | # relative to this directory. They are copied after the builtin static files, |
123 | 126 | # so a file named "default.css" will overwrite the builtin "default.css". |
124 | #html_static_path = ['_static'] | |
127 | # html_static_path = ['_static'] | |
125 | 128 | |
126 | 129 | # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, |
127 | 130 | # using the given strftime format. |
128 | #html_last_updated_fmt = '%b %d, %Y' | |
131 | # html_last_updated_fmt = '%b %d, %Y' | |
129 | 132 | |
130 | 133 | # If true, SmartyPants will be used to convert quotes and dashes to |
131 | 134 | # typographically correct entities. |
132 | #html_use_smartypants = True | |
135 | # html_use_smartypants = True | |
133 | 136 | |
134 | 137 | # Custom sidebar templates, maps document names to template names. |
135 | #html_sidebars = {} | |
138 | # html_sidebars = {} | |
136 | 139 | |
137 | 140 | # Additional templates that should be rendered to pages, maps page names to |
138 | 141 | # template names. |
139 | #html_additional_pages = {} | |
142 | # html_additional_pages = {} | |
140 | 143 | |
141 | 144 | # If false, no module index is generated. |
142 | #html_domain_indices = True | |
145 | # html_domain_indices = True | |
143 | 146 | |
144 | 147 | # If false, no index is generated. |
145 | #html_use_index = True | |
148 | # html_use_index = True | |
146 | 149 | |
147 | 150 | # If true, the index is split into individual pages for each letter. |
148 | #html_split_index = False | |
151 | # html_split_index = False | |
149 | 152 | |
150 | 153 | # If true, links to the reST sources are added to the pages. |
151 | #html_show_sourcelink = True | |
154 | # html_show_sourcelink = True | |
152 | 155 | |
153 | 156 | # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. |
154 | #html_show_sphinx = True | |
157 | # html_show_sphinx = True | |
155 | 158 | |
156 | 159 | # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. |
157 | #html_show_copyright = True | |
160 | # html_show_copyright = True | |
158 | 161 | |
159 | 162 | html_add_permalinks = False |
160 | 163 | |
161 | 164 | # If true, an OpenSearch description file will be output, and all pages will |
162 | 165 | # contain a <link> tag referring to it. The value of this option must be the |
163 | 166 | # base URL from which the finished HTML is served. |
164 | #html_use_opensearch = '' | |
167 | # html_use_opensearch = '' | |
165 | 168 | |
166 | 169 | # If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml"). |
167 | #html_file_suffix = '' | |
170 | # html_file_suffix = '' | |
168 | 171 | |
169 | 172 | # Output file base name for HTML help builder. |
170 | 173 | htmlhelp_basename = 'Logbookdoc' |
171 | 174 | |
172 | 175 | |
173 | # -- Options for LaTeX output -------------------------------------------------- | |
176 | # -- Options for LaTeX output ------------------------------------------------- | |
174 | 177 | |
175 | 178 | # The paper size ('letter' or 'a4'). |
176 | #latex_paper_size = 'letter' | |
179 | # latex_paper_size = 'letter' | |
177 | 180 | |
178 | 181 | # The font size ('10pt', '11pt' or '12pt'). |
179 | #latex_font_size = '10pt' | |
182 | # latex_font_size = '10pt' | |
180 | 183 | |
181 | 184 | # Grouping the document tree into LaTeX files. List of tuples |
182 | # (source start file, target name, title, author, documentclass [howto/manual]). | |
185 | # (source start file, target name, title, author, | |
186 | # documentclass [howto/manual]). | |
183 | 187 | latex_documents = [ |
184 | ('index', 'Logbook.tex', u'Logbook Documentation', | |
185 | u'Armin Ronacher, Georg Brandl', 'manual'), | |
188 | ('index', 'Logbook.tex', u'Logbook Documentation', | |
189 | u'Armin Ronacher, Georg Brandl', 'manual'), | |
186 | 190 | ] |
187 | 191 | |
188 | 192 | # The name of an image file (relative to this directory) to place at the top of |
189 | 193 | # the title page. |
190 | #latex_logo = None | |
194 | # latex_logo = None | |
191 | 195 | |
192 | 196 | # For "manual" documents, if this is true, then toplevel headings are parts, |
193 | 197 | # not chapters. |
194 | #latex_use_parts = False | |
198 | # latex_use_parts = False | |
195 | 199 | |
196 | 200 | # If true, show page references after internal links. |
197 | #latex_show_pagerefs = False | |
201 | # latex_show_pagerefs = False | |
198 | 202 | |
199 | 203 | # If true, show URL addresses after external links. |
200 | #latex_show_urls = False | |
204 | # latex_show_urls = False | |
201 | 205 | |
202 | 206 | # Additional stuff for the LaTeX preamble. |
203 | #latex_preamble = '' | |
207 | # latex_preamble = '' | |
204 | 208 | |
205 | 209 | # Documents to append as an appendix to all manuals. |
206 | #latex_appendices = [] | |
210 | # latex_appendices = [] | |
207 | 211 | |
208 | 212 | # If false, no module index is generated. |
209 | #latex_domain_indices = True | |
210 | ||
211 | ||
212 | # -- Options for manual page output -------------------------------------------- | |
213 | # latex_domain_indices = True | |
214 | ||
215 | ||
216 | # -- Options for manual page output ------------------------------------------- | |
213 | 217 | |
214 | 218 | # One entry per manual page. List of tuples |
215 | 219 | # (source start file, name, description, authors, manual section). |
50 | 50 | default dispatching can be triggered from a function |
51 | 51 | :func:`~logbook.base.dispatch_record`: |
52 | 52 | |
53 | >>> from logbook import dispatch_record, LogRecord, INFO | |
53 | >>> from logbook import dispatch_record, LogRecord, INFO, StreamHandler | |
54 | >>> import sys | |
54 | 55 | >>> record = LogRecord('My channel', INFO, 'Hello World!') |
55 | 56 | >>> dispatch_record(record) |
56 | [2010-09-04 15:56] INFO: My channel: Hello World! | |
57 | [2015-10-05 19:18:52.211472] INFO: My channel: Hello World! | |
57 | 58 | |
58 | 59 | It is pretty common for log records to be created without a dispatcher. |
59 | 60 | Here some common use cases for log records without a dispatcher: |
106 | 106 | |
107 | 107 | This is how easy it is to get started with Logbook:: |
108 | 108 | |
109 | from logbook import warn | |
109 | from logbook import warn, StreamHandler | |
110 | import sys | |
111 | StreamHandler(sys.stdout).push_application() | |
110 | 112 | warn('This is a warning') |
111 | ||
112 | That will use the default logging channel. But you can create as many as | |
113 | you like:: | |
114 | ||
115 | from logbook import Logger | |
116 | log = Logger('My Logger') | |
117 | log.warn('This is a warning') | |
118 | 113 | |
119 | 114 | Roadmap |
120 | 115 | ------- |
0 | 0 | Welcome to Logbook |
1 | 1 | ================== |
2 | 2 | |
3 | Logbook is a logging sytem for Python that replaces the standard library's | |
3 | Logbook is a logging system for Python that replaces the standard library's | |
4 | 4 | logging module. It was designed with both complex and simple applications |
5 | 5 | in mind and the idea to make logging fun: |
6 | 6 | |
7 | >>> from logbook import Logger | |
7 | >>> from logbook import Logger, StreamHandler | |
8 | >>> import sys | |
9 | >>> StreamHandler(sys.stdout).push_application() | |
8 | 10 | >>> log = Logger('Logbook') |
9 | 11 | >>> log.info('Hello, World!') |
10 | [2010-07-23 16:34] INFO: Logbook: Hello, World! | |
12 | [2015-10-05 18:55:56.937141] INFO: Logbook: Hello, World! | |
11 | 13 | |
12 | 14 | What makes it fun? What about getting log messages on your phone or |
13 | 15 | desktop notification system? :ref:`Logbook can do that <notifiers>`. |
48 | 50 | * IRC: ``#pocoo`` on freenode |
49 | 51 | |
50 | 52 | .. _Download from PyPI: http://pypi.python.org/pypi/Logbook |
51 | .. _Master repository on GitHub: https://github.com/mitsuhiko/logbook | |
53 | .. _Master repository on GitHub: https://github.com/getlogbook/logbook | |
52 | 54 | .. _Mailing list: http://groups.google.com/group/pocoo-libs |
5 | 5 | Logbook makes it very easy to get started with logging. Just import the logger |
6 | 6 | class, create yourself a logger and you are set: |
7 | 7 | |
8 | >>> from logbook import Logger | |
8 | >>> from logbook import Logger, StreamHandler | |
9 | >>> import sys | |
10 | >>> StreamHandler(sys.stdout).push_application() | |
9 | 11 | >>> log = Logger('My Awesome Logger') |
10 | 12 | >>> log.warn('This is too cool for stdlib') |
11 | [2010-07-23 16:34:42.687111] WARNING: My Awesome Logger: This is too cool for stdlib | |
13 | [2015-10-05 19:02:03.575723] WARNING: My Awesome Logger: This is too cool for stdlib | |
12 | 14 | |
13 | 15 | A logger is a so-called :class:`~logbook.base.RecordDispatcher`, which is |
14 | 16 | commonly referred to as a "logging channel". The name you give such a channel |
108 | 110 | error_handler = SyslogHandler('logbook example', level='ERROR', bubble=True) |
109 | 111 | with error_handler.applicationbound(): |
110 | 112 | # whatever is executed here and an error is logged to the |
111 | # error handler but it will also bubble up to the default | |
112 | # stderr handler. | |
113 | # error handler but it will also bubble up other handles. | |
113 | 114 | ... |
114 | 115 | |
115 | 116 | So what if you want to only log errors to the syslog and nothing to |
18 | 18 | |
19 | 19 | {% block footer %} |
20 | 20 | {% if online %} |
21 | <a href="http://github.com/mitsuhiko/logbook"> | |
21 | <a href="http://github.com/getlogbook/logbook"> | |
22 | 22 | <img style="position: fixed; top: 0; right: 0; border: 0;" |
23 | 23 | src="http://s3.amazonaws.com/github/ribbons/forkme_right_gray_6d6d6d.png" |
24 | 24 | alt="Fork me on GitHub"> |
10 | 10 | """ |
11 | 11 | |
12 | 12 | import os |
13 | from logbook.base import LogRecord, Logger, LoggerGroup, NestedSetup, \ | |
14 | Processor, Flags, get_level_name, lookup_level, dispatch_record, \ | |
15 | CRITICAL, ERROR, WARNING, NOTICE, INFO, DEBUG, NOTSET, \ | |
16 | set_datetime_format | |
17 | from logbook.handlers import Handler, StreamHandler, FileHandler, \ | |
18 | MonitoringFileHandler, StderrHandler, RotatingFileHandler, \ | |
19 | TimedRotatingFileHandler, TestHandler, MailHandler, GMailHandler, SyslogHandler, \ | |
20 | NullHandler, NTEventLogHandler, create_syshandler, StringFormatter, \ | |
21 | StringFormatterHandlerMixin, HashingHandlerMixin, \ | |
22 | LimitingHandlerMixin, WrapperHandler, FingersCrossedHandler, \ | |
23 | GroupHandler | |
13 | from .base import ( | |
14 | LogRecord, Logger, LoggerGroup, NestedSetup, Processor, Flags, | |
15 | get_level_name, lookup_level, dispatch_record, CRITICAL, ERROR, WARNING, | |
16 | NOTICE, INFO, DEBUG, TRACE, NOTSET, set_datetime_format) | |
17 | from .handlers import ( | |
18 | Handler, StreamHandler, FileHandler, MonitoringFileHandler, StderrHandler, | |
19 | RotatingFileHandler, TimedRotatingFileHandler, TestHandler, MailHandler, | |
20 | GMailHandler, SyslogHandler, NullHandler, NTEventLogHandler, | |
21 | create_syshandler, StringFormatter, StringFormatterHandlerMixin, | |
22 | HashingHandlerMixin, LimitingHandlerMixin, WrapperHandler, | |
23 | FingersCrossedHandler, GroupHandler) | |
24 | from . import compat | |
24 | 25 | |
25 | __version__ = '0.10.0' | |
26 | __version__ = '0.11.4-dev' | |
26 | 27 | |
27 | 28 | # create an anonymous default logger and provide all important |
28 | 29 | # methods of that logger as global functions |
29 | 30 | _default_logger = Logger('Generic') |
30 | 31 | _default_logger.suppress_dispatcher = True |
32 | trace = _default_logger.trace | |
31 | 33 | debug = _default_logger.debug |
32 | 34 | info = _default_logger.info |
33 | 35 | warn = _default_logger.warn |
0 | __version__ = "0.12.3" |
9 | 9 | """ |
10 | 10 | from itertools import count |
11 | 11 | from logbook.helpers import get_iterator_next_method |
12 | from logbook.concurrency import (thread_get_ident, greenlet_get_ident, | |
13 | thread_local, greenlet_local, | |
14 | ThreadLock, GreenletRLock, is_gevent_enabled) | |
12 | from logbook.concurrency import ( | |
13 | thread_get_ident, greenlet_get_ident, thread_local, greenlet_local, | |
14 | ThreadLock, GreenletRLock, is_gevent_enabled) | |
15 | 15 | |
16 | 16 | _missing = object() |
17 | 17 | _MAX_CONTEXT_OBJECT_CACHE = 256 |
29 | 29 | if self.group is None: |
30 | 30 | return default |
31 | 31 | return getattr(self.group, name) |
32 | ||
32 | 33 | def _set(self, value): |
33 | 34 | setattr(self, '_' + name, value) |
35 | ||
34 | 36 | def _del(self): |
35 | 37 | delattr(self, '_' + name) |
36 | 38 | return property(_get, _set, _del) |
148 | 150 | def push_greenlet(self, obj): |
149 | 151 | self._greenlet_context_lock.acquire() |
150 | 152 | try: |
151 | self._cache.pop(greenlet_get_ident(), None) # remote chance to conflict with thread ids | |
153 | # remote chance to conflict with thread ids | |
154 | self._cache.pop(greenlet_get_ident(), None) | |
152 | 155 | item = (self._stackop(), obj) |
153 | 156 | stack = getattr(self._greenlet_context, 'stack', None) |
154 | 157 | if stack is None: |
161 | 164 | def pop_greenlet(self): |
162 | 165 | self._greenlet_context_lock.acquire() |
163 | 166 | try: |
164 | self._cache.pop(greenlet_get_ident(), None) # remote chance to conflict with thread ids | |
167 | # remote chance to conflict with thread ids | |
168 | self._cache.pop(greenlet_get_ident(), None) | |
165 | 169 | stack = getattr(self._greenlet_context, 'stack', None) |
166 | 170 | assert stack, 'no objects on stack' |
167 | 171 | return stack.pop()[1] |
11 | 11 | esc = "\x1b[" |
12 | 12 | |
13 | 13 | codes = {} |
14 | codes[""] = "" | |
15 | codes["reset"] = esc + "39;49;00m" | |
14 | codes[""] = "" | |
15 | codes["reset"] = esc + "39;49;00m" | |
16 | 16 | |
17 | dark_colors = ["black", "darkred", "darkgreen", "brown", "darkblue", | |
18 | "purple", "teal", "lightgray"] | |
17 | dark_colors = ["black", "darkred", "darkgreen", "brown", "darkblue", | |
18 | "purple", "teal", "lightgray"] | |
19 | 19 | light_colors = ["darkgray", "red", "green", "yellow", "blue", |
20 | 20 | "fuchsia", "turquoise", "white"] |
21 | 21 | |
27 | 27 | |
28 | 28 | del d, l, x |
29 | 29 | |
30 | codes["darkteal"] = codes["turquoise"] | |
30 | codes["darkteal"] = codes["turquoise"] | |
31 | 31 | codes["darkyellow"] = codes["brown"] |
32 | codes["fuscia"] = codes["fuchsia"] | |
32 | codes["fuscia"] = codes["fuchsia"] | |
33 | 33 | |
34 | 34 | |
35 | 35 | def _str_to_type(obj, strtype): |
41 | 41 | |
42 | 42 | def colorize(color_key, text): |
43 | 43 | """Returns an ANSI formatted text with the given color.""" |
44 | return _str_to_type(codes[color_key], text) + text + \ | |
45 | _str_to_type(codes["reset"], text) | |
44 | return (_str_to_type(codes[color_key], text) + text + | |
45 | _str_to_type(codes["reset"], text)) |
13 | 13 | from itertools import chain |
14 | 14 | from weakref import ref as weakref |
15 | 15 | from datetime import datetime |
16 | from logbook import helpers | |
17 | from logbook.concurrency import thread_get_name, thread_get_ident, greenlet_get_ident | |
18 | ||
19 | from logbook.helpers import to_safe_json, parse_iso8601, cached_property, \ | |
20 | PY2, u, string_types, iteritems, integer_types | |
16 | from logbook.concurrency import ( | |
17 | thread_get_name, thread_get_ident, greenlet_get_ident) | |
18 | ||
19 | from logbook.helpers import ( | |
20 | to_safe_json, parse_iso8601, cached_property, PY2, u, string_types, | |
21 | iteritems, integer_types, xrange) | |
21 | 22 | try: |
22 | from logbook._speedups import group_reflected_property, \ | |
23 | ContextStackManager, StackedObject | |
23 | from logbook._speedups import ( | |
24 | group_reflected_property, ContextStackManager, StackedObject) | |
24 | 25 | except ImportError: |
25 | from logbook._fallback import group_reflected_property, \ | |
26 | ContextStackManager, StackedObject | |
26 | from logbook._fallback import ( | |
27 | group_reflected_property, ContextStackManager, StackedObject) | |
27 | 28 | |
28 | 29 | _datetime_factory = datetime.utcnow |
30 | ||
31 | ||
29 | 32 | def set_datetime_format(datetime_format): |
30 | 33 | """ |
31 | 34 | Set the format for the datetime objects created, which are then |
32 | 35 | made available as the :py:attr:`LogRecord.time` attribute of |
33 | 36 | :py:class:`LogRecord` instances. |
34 | 37 | |
35 | :param datetime_format: Indicates how to generate datetime objects. Possible values are: | |
38 | :param datetime_format: Indicates how to generate datetime objects. | |
39 | Possible values are: | |
36 | 40 | |
37 | 41 | "utc" |
38 | :py:attr:`LogRecord.time` will be a datetime in UTC time zone (but not time zone aware) | |
42 | :py:attr:`LogRecord.time` will be a datetime in UTC time zone | |
43 | (but not time zone aware) | |
39 | 44 | "local" |
40 | :py:attr:`LogRecord.time` will be a datetime in local time zone (but not time zone aware) | |
45 | :py:attr:`LogRecord.time` will be a datetime in local time zone | |
46 | (but not time zone aware) | |
41 | 47 | |
42 | 48 | This function defaults to creating datetime objects in UTC time, |
43 | 49 | using `datetime.utcnow() |
65 | 71 | elif datetime_format == "local": |
66 | 72 | _datetime_factory = datetime.now |
67 | 73 | else: |
68 | raise ValueError("Invalid value %r. Valid values are 'utc' and 'local'." % (datetime_format,)) | |
74 | raise ValueError("Invalid value %r. Valid values are 'utc' and " | |
75 | "'local'." % (datetime_format,)) | |
69 | 76 | |
70 | 77 | # make sure to sync these up with _speedups.pyx |
71 | CRITICAL = 6 | |
72 | ERROR = 5 | |
73 | WARNING = 4 | |
74 | NOTICE = 3 | |
75 | INFO = 2 | |
76 | DEBUG = 1 | |
78 | CRITICAL = 15 | |
79 | ERROR = 14 | |
80 | WARNING = 13 | |
81 | NOTICE = 12 | |
82 | INFO = 11 | |
83 | DEBUG = 10 | |
84 | TRACE = 9 | |
77 | 85 | NOTSET = 0 |
78 | 86 | |
79 | 87 | _level_names = { |
83 | 91 | NOTICE: 'NOTICE', |
84 | 92 | INFO: 'INFO', |
85 | 93 | DEBUG: 'DEBUG', |
94 | TRACE: 'TRACE', | |
86 | 95 | NOTSET: 'NOTSET' |
87 | 96 | } |
88 | 97 | _reverse_level_names = dict((v, k) for (k, v) in iteritems(_level_names)) |
342 | 351 | """ |
343 | 352 | _pullable_information = frozenset(( |
344 | 353 | 'func_name', 'module', 'filename', 'lineno', 'process_name', 'thread', |
345 | 'thread_name', 'greenlet', 'formatted_exception', 'message', 'exception_name', | |
346 | 'exception_message' | |
354 | 'thread_name', 'greenlet', 'formatted_exception', 'message', | |
355 | 'exception_name', 'exception_message' | |
347 | 356 | )) |
348 | 357 | _noned_on_close = frozenset(('exc_info', 'frame', 'calling_frame')) |
349 | 358 | |
368 | 377 | information_pulled = False |
369 | 378 | |
370 | 379 | def __init__(self, channel, level, msg, args=None, kwargs=None, |
371 | exc_info=None, extra=None, frame=None, dispatcher=None, frame_correction=0): | |
380 | exc_info=None, extra=None, frame=None, dispatcher=None, | |
381 | frame_correction=0): | |
372 | 382 | #: the name of the logger that created it or any other textual |
373 | 383 | #: channel description. This is a descriptive name and can be |
374 | 384 | #: used for filtering. |
384 | 394 | #: optional exception information. If set, this is a tuple in the |
385 | 395 | #: form ``(exc_type, exc_value, tb)`` as returned by |
386 | 396 | #: :func:`sys.exc_info`. |
387 | #: This parameter can also be ``True``, which would cause the exception info tuple | |
388 | #: to be fetched for you. | |
397 | #: This parameter can also be ``True``, which would cause the exception | |
398 | #: info tuple to be fetched for you. | |
399 | if not exc_info: | |
400 | # this is a special case where exc_info=False can be passed in | |
401 | # theory, and it should be the same as exc_info=None | |
402 | exc_info = None | |
389 | 403 | self.exc_info = exc_info |
390 | 404 | #: optional extra information as dictionary. This is the place |
391 | 405 | #: where custom log processors can attach custom context sensitive |
507 | 521 | return self.msg |
508 | 522 | try: |
509 | 523 | try: |
510 | return self._format_message(self.msg, *self.args, **self.kwargs) | |
524 | return self._format_message(self.msg, *self.args, | |
525 | **self.kwargs) | |
511 | 526 | except UnicodeDecodeError: |
512 | 527 | # Assume an unicode message but mixed-up args |
513 | 528 | msg = self.msg.encode('utf-8', 'replace') |
514 | 529 | return self._format_message(msg, *self.args, **self.kwargs) |
515 | 530 | except (UnicodeEncodeError, AttributeError): |
516 | # we catch AttributeError since if msg is bytes, it won't have the 'format' method | |
517 | if sys.exc_info()[0] is AttributeError and (PY2 or not isinstance(self.msg, bytes)): | |
531 | # we catch AttributeError since if msg is bytes, | |
532 | # it won't have the 'format' method | |
533 | if (sys.exc_info()[0] is AttributeError | |
534 | and (PY2 or not isinstance(self.msg, bytes))): | |
518 | 535 | # this is not the case we thought it is... |
519 | 536 | raise |
520 | 537 | # Assume encoded message with unicode args. |
531 | 548 | # that. |
532 | 549 | e = sys.exc_info()[1] |
533 | 550 | errormsg = ('Could not format message with provided ' |
534 | 'arguments: {err}\n msg={msg!r}\n ' | |
535 | 'args={args!r} \n kwargs={kwargs!r}.\n' | |
536 | 'Happened in file {file}, line {lineno}').format( | |
551 | 'arguments: {err}\n msg={msg!r}\n ' | |
552 | 'args={args!r} \n kwargs={kwargs!r}.\n' | |
553 | 'Happened in file {file}, line {lineno}').format( | |
537 | 554 | err=e, msg=self.msg, args=self.args, |
538 | 555 | kwargs=self.kwargs, file=self.filename, |
539 | 556 | lineno=self.lineno |
554 | 571 | while frm is not None and frm.f_globals is globs: |
555 | 572 | frm = frm.f_back |
556 | 573 | |
557 | for _ in helpers.xrange(self.frame_correction): | |
574 | for _ in xrange(self.frame_correction): | |
558 | 575 | frm = frm.f_back |
559 | 576 | |
560 | 577 | return frm |
603 | 620 | @cached_property |
604 | 621 | def greenlet(self): |
605 | 622 | """The ident of the greenlet. This is evaluated late and means that |
606 | if the log record is passed to another greenlet, :meth:`pull_information` | |
607 | was called in the old greenlet. | |
623 | if the log record is passed to another greenlet, | |
624 | :meth:`pull_information` was called in the old greenlet. | |
608 | 625 | """ |
609 | 626 | return greenlet_get_ident() |
610 | 627 | |
697 | 714 | #: The name of the minimium logging level required for records to be |
698 | 715 | #: created. |
699 | 716 | level_name = level_name_property() |
717 | ||
718 | def trace(self, *args, **kwargs): | |
719 | """Logs a :class:`~logbook.LogRecord` with the level set | |
720 | to :data:`~logbook.TRACE`. | |
721 | """ | |
722 | if not self.disabled and TRACE >= self.level: | |
723 | self._log(TRACE, args, kwargs) | |
700 | 724 | |
701 | 725 | def debug(self, *args, **kwargs): |
702 | 726 | """Logs a :class:`~logbook.LogRecord` with the level set |
883 | 907 | continue |
884 | 908 | |
885 | 909 | # first case of blackhole (without filter). |
886 | # this should discard all further processing and we don't have to heavy_init to know that... | |
910 | # this should discard all further processing and | |
911 | # we don't have to heavy_init to know that... | |
887 | 912 | if handler.filter is None and handler.blackhole: |
888 | 913 | break |
889 | 914 | |
896 | 921 | self.process_record(record) |
897 | 922 | record_initialized = True |
898 | 923 | |
899 | ||
900 | 924 | # a filter can still veto the handling of the record. This |
901 | 925 | # however is already operating on an initialized and processed |
902 | 926 | # record. The impact is that filters are slower than the |
903 | 927 | # handler's should_handle function in case there is no default |
904 | 928 | # handler that would handle the record (delayed init). |
905 | if handler.filter is not None \ | |
906 | and not handler.filter(record, handler): | |
929 | if (handler.filter is not None | |
930 | and not handler.filter(record, handler)): | |
907 | 931 | continue |
908 | 932 | |
909 | 933 | # We might have a filter, so now that we know we *should* handle |
910 | 934 | # this record, we should consider the case of us being a black hole... |
911 | 935 | if handler.blackhole: |
912 | 936 | break |
913 | ||
914 | 937 | |
915 | 938 | # handle the record. If the record was handled and |
916 | 939 | # the record is not bubbling we can abort now. |
24 | 24 | removes all otherwise registered handlers on root logger of |
25 | 25 | the logging system but leaves the other loggers untouched. |
26 | 26 | |
27 | :param set_root_logger_level: controls of the default level of the legacy root logger is changed | |
28 | so that all legacy log messages get redirected to Logbook | |
27 | :param set_root_logger_level: controls of the default level of the legacy | |
28 | root logger is changed so that all legacy log messages get redirected | |
29 | to Logbook | |
29 | 30 | """ |
30 | 31 | del logging.root.handlers[:] |
31 | 32 | logging.root.addHandler(RedirectLoggingHandler()) |
107 | 108 | """Tries to find the caller that issued the call.""" |
108 | 109 | frm = sys._getframe(2) |
109 | 110 | while frm is not None: |
110 | if frm.f_globals is globals() or \ | |
111 | frm.f_globals is logbook.base.__dict__ or \ | |
112 | frm.f_globals is logging.__dict__: | |
111 | if (frm.f_globals is globals() or | |
112 | frm.f_globals is logbook.base.__dict__ or | |
113 | frm.f_globals is logging.__dict__): | |
113 | 114 | frm = frm.f_back |
114 | 115 | else: |
115 | 116 | return frm |
123 | 124 | def convert_record(self, old_record): |
124 | 125 | """Converts an old logging record into a logbook log record.""" |
125 | 126 | record = LoggingCompatRecord(old_record.name, |
126 | self.convert_level(old_record.levelno), | |
127 | old_record.msg, old_record.args, | |
128 | None, old_record.exc_info, | |
129 | self.find_extra(old_record), | |
130 | self.find_caller(old_record)) | |
127 | self.convert_level(old_record.levelno), | |
128 | old_record.msg, old_record.args, | |
129 | None, old_record.exc_info, | |
130 | self.find_extra(old_record), | |
131 | self.find_caller(old_record)) | |
131 | 132 | record.time = self.convert_time(old_record.created) |
132 | 133 | return record |
133 | 134 |
48 | 48 | |
49 | 49 | def __repr__(self): |
50 | 50 | owner = self._owner |
51 | return "<%s owner=%r count=%d>" % (self.__class__.__name__, owner, self._count) | |
51 | return "<%s owner=%r count=%d>" % (self.__class__.__name__, owner, | |
52 | self._count) | |
52 | 53 | |
53 | 54 | def acquire(self, blocking=1): |
54 | 55 | tid = thread_get_ident() |
55 | 56 | gid = greenlet_get_ident() |
56 | 57 | tid_gid = (tid, gid) |
57 | if tid_gid == self._owner: # We trust the GIL here so we can do this comparison w/o locking. | |
58 | ||
59 | # We trust the GIL here so we can do this comparison w/o locking. | |
60 | if tid_gid == self._owner: | |
58 | 61 | self._count = self._count + 1 |
59 | 62 | return True |
60 | 63 | |
61 | 64 | greenlet_lock = self._get_greenlet_lock() |
62 | 65 | |
63 | 66 | self._wait_queue.append(gid) |
64 | # this is a safety in case an exception is raised somewhere and we must make sure we're not in the queue | |
67 | # this is a safety in case an exception is raised somewhere | |
68 | # and we must make sure we're not in the queue | |
65 | 69 | # otherwise it'll get stuck forever. |
66 | 70 | remove_from_queue_on_return = True |
67 | 71 | try: |
73 | 77 | # Hurray, we can have the lock. |
74 | 78 | self._owner = tid_gid |
75 | 79 | self._count = 1 |
76 | remove_from_queue_on_return = False # don't remove us from the queue | |
80 | ||
81 | # don't remove us from the queue | |
82 | remove_from_queue_on_return = False | |
77 | 83 | return True |
78 | 84 | else: |
79 | # we already hold the greenlet lock so obviously the owner is not in our thread. | |
85 | # we already hold the greenlet lock so obviously | |
86 | # the owner is not in our thread. | |
80 | 87 | greenlet_lock.release() |
81 | 88 | if blocking: |
82 | gevent.sleep(0.0005) # 500 us -> initial delay of 1 ms | |
89 | # 500 us -> initial delay of 1 ms | |
90 | gevent.sleep(0.0005) | |
83 | 91 | else: |
84 | 92 | return False |
85 | 93 | finally: |
113 | 121 | def _is_owned(self): |
114 | 122 | return self._owner == (thread_get_ident(), greenlet_get_ident()) |
115 | 123 | else: |
116 | from threading import Lock as ThreadLock, RLock as ThreadRLock, currentThread | |
124 | from threading import ( | |
125 | Lock as ThreadLock, RLock as ThreadRLock, currentThread) | |
117 | 126 | try: |
118 | from thread import get_ident as thread_get_ident, _local as thread_local | |
127 | from thread import ( | |
128 | get_ident as thread_get_ident, _local as thread_local) | |
119 | 129 | except ImportError: |
120 | from _thread import get_ident as thread_get_ident, _local as thread_local | |
130 | from _thread import ( | |
131 | get_ident as thread_get_ident, _local as thread_local) | |
121 | 132 | |
122 | 133 | def thread_get_name(): |
123 | 134 | return currentThread().getName() |
139 | 150 | def __exit__(self, t, v, tb): |
140 | 151 | pass |
141 | 152 | |
153 | ||
142 | 154 | def new_fine_grained_lock(): |
143 | 155 | global use_gevent |
144 | 156 | if use_gevent: |
7 | 7 | :copyright: (c) 2010 by Armin Ronacher, Georg Brandl. |
8 | 8 | :license: BSD, see LICENSE for more details. |
9 | 9 | """ |
10 | import io | |
10 | 11 | import os |
11 | 12 | import re |
12 | 13 | import sys |
20 | 21 | import traceback |
21 | 22 | from datetime import datetime, timedelta |
22 | 23 | from collections import deque |
23 | ||
24 | from six import add_metaclass | |
25 | ||
26 | from logbook.base import CRITICAL, ERROR, WARNING, NOTICE, INFO, DEBUG, \ | |
27 | NOTSET, level_name_property, _missing, lookup_level, \ | |
28 | Flags, ContextObject, ContextStackManager | |
29 | from logbook.helpers import rename, b, _is_text_stream, is_unicode, PY2, \ | |
30 | zip, xrange, string_types, integer_types, reraise, u | |
24 | from textwrap import dedent | |
25 | ||
26 | from logbook.base import ( | |
27 | CRITICAL, ERROR, WARNING, NOTICE, INFO, DEBUG, NOTSET, level_name_property, | |
28 | _missing, lookup_level, Flags, ContextObject, ContextStackManager) | |
29 | from logbook.helpers import ( | |
30 | rename, b, _is_text_stream, is_unicode, PY2, zip, xrange, string_types, | |
31 | integer_types, reraise, u, with_metaclass) | |
31 | 32 | from logbook.concurrency import new_fine_grained_lock |
32 | 33 | |
33 | DEFAULT_FORMAT_STRING = ( | |
34 | u('[{record.time:%Y-%m-%d %H:%M:%S.%f}] ') + | |
35 | u('{record.level_name}: {record.channel}: {record.message}') | |
36 | ) | |
34 | DEFAULT_FORMAT_STRING = u( | |
35 | '[{record.time:%Y-%m-%d %H:%M:%S.%f}] ' | |
36 | '{record.level_name}: {record.channel}: {record.message}') | |
37 | ||
37 | 38 | SYSLOG_FORMAT_STRING = u('{record.channel}: {record.message}') |
38 | NTLOG_FORMAT_STRING = u('''\ | |
39 | Message Level: {record.level_name} | |
40 | Location: {record.filename}:{record.lineno} | |
41 | Module: {record.module} | |
42 | Function: {record.func_name} | |
43 | Exact Time: {record.time:%Y-%m-%d %H:%M:%S} | |
44 | ||
45 | Event provided Message: | |
46 | ||
47 | {record.message} | |
48 | ''') | |
49 | TEST_FORMAT_STRING = \ | |
50 | u('[{record.level_name}] {record.channel}: {record.message}') | |
51 | MAIL_FORMAT_STRING = u('''\ | |
52 | Subject: {handler.subject} | |
53 | ||
54 | Message type: {record.level_name} | |
55 | Location: {record.filename}:{record.lineno} | |
56 | Module: {record.module} | |
57 | Function: {record.func_name} | |
58 | Time: {record.time:%Y-%m-%d %H:%M:%S} | |
59 | ||
60 | Message: | |
61 | ||
62 | {record.message} | |
63 | ''') | |
64 | MAIL_RELATED_FORMAT_STRING = u('''\ | |
65 | Message type: {record.level_name} | |
66 | Location: {record.filename}:{record.lineno} | |
67 | Module: {record.module} | |
68 | Function: {record.func_name} | |
69 | {record.message} | |
70 | ''') | |
39 | NTLOG_FORMAT_STRING = dedent(u(''' | |
40 | Message Level: {record.level_name} | |
41 | Location: {record.filename}:{record.lineno} | |
42 | Module: {record.module} | |
43 | Function: {record.func_name} | |
44 | Exact Time: {record.time:%Y-%m-%d %H:%M:%S} | |
45 | ||
46 | Event provided Message: | |
47 | ||
48 | {record.message} | |
49 | ''')).lstrip() | |
50 | ||
51 | TEST_FORMAT_STRING = u('[{record.level_name}] {record.channel}: {record.message}') | |
52 | MAIL_FORMAT_STRING = dedent(u(''' | |
53 | Subject: {handler.subject} | |
54 | ||
55 | Message type: {record.level_name} | |
56 | Location: {record.filename}:{record.lineno} | |
57 | Module: {record.module} | |
58 | Function: {record.func_name} | |
59 | Time: {record.time:%Y-%m-%d %H:%M:%S} | |
60 | ||
61 | Message: | |
62 | ||
63 | {record.message} | |
64 | ''')).lstrip() | |
65 | ||
66 | MAIL_RELATED_FORMAT_STRING = dedent(u(''' | |
67 | Message type: {record.level_name} | |
68 | Location: {record.filename}:{record.lineno} | |
69 | Module: {record.module} | |
70 | Function: {record.func_name} | |
71 | {record.message} | |
72 | ''')).lstrip() | |
71 | 73 | |
72 | 74 | SYSLOG_PORT = 514 |
73 | 75 | |
74 | 76 | REGTYPE = type(re.compile("I'm a regular expression!")) |
77 | ||
75 | 78 | |
76 | 79 | def create_syshandler(application_name, level=NOTSET): |
77 | 80 | """Creates the handler the operating system provides. On Unix systems |
93 | 96 | def __new__(cls, name, bases, d): |
94 | 97 | # aha, that thing has a custom close method. We will need a magic |
95 | 98 | # __del__ for it to be called on cleanup. |
96 | if bases != (ContextObject,) and 'close' in d and '__del__' not in d \ | |
97 | and not any(hasattr(x, '__del__') for x in bases): | |
99 | if (bases != (ContextObject,) and 'close' in d and '__del__' not in d | |
100 | and not any(hasattr(x, '__del__') for x in bases)): | |
98 | 101 | def _magic_del(self): |
99 | 102 | try: |
100 | 103 | self.close() |
106 | 109 | return type.__new__(cls, name, bases, d) |
107 | 110 | |
108 | 111 | |
109 | @add_metaclass(_HandlerType) | |
110 | class Handler(ContextObject): | |
112 | class Handler(with_metaclass(_HandlerType), ContextObject): | |
111 | 113 | """Handler instances dispatch logging events to specific destinations. |
112 | 114 | |
113 | 115 | The base handler class. Acts as a placeholder which defines the Handler |
116 | 118 | the 'raw' message as determined by record.message is logged. |
117 | 119 | |
118 | 120 | To bind a handler you can use the :meth:`push_application`, |
119 | :meth:`push_thread` or :meth:`push_greenlet` methods. This will push the handler on a stack of | |
120 | handlers. To undo this, use the :meth:`pop_application`, | |
121 | :meth:`push_thread` or :meth:`push_greenlet` methods. | |
122 | This will push the handler on a stack of handlers. | |
123 | To undo this, use the :meth:`pop_application`, | |
121 | 124 | :meth:`pop_thread` methods and :meth:`pop_greenlet`:: |
122 | 125 | |
123 | 126 | handler = MyHandler() |
306 | 309 | """ |
307 | 310 | blackhole = True |
308 | 311 | |
312 | def __init__(self, level=NOTSET, filter=None): | |
313 | super(NullHandler, self).__init__(level=level, filter=filter, | |
314 | bubble=False) | |
315 | ||
309 | 316 | |
310 | 317 | class WrapperHandler(Handler): |
311 | 318 | """A class that can wrap another handler and redirect all calls to the |
437 | 444 | |
438 | 445 | _NUMBER_TYPES = integer_types + (float,) |
439 | 446 | |
447 | ||
440 | 448 | class LimitingHandlerMixin(HashingHandlerMixin): |
441 | 449 | """Mixin class for handlers that want to limit emitting records. |
442 | 450 | |
483 | 491 | first_count = last_count |
484 | 492 | old_count = suppression_count |
485 | 493 | |
486 | if not suppression_count and \ | |
487 | len(self._record_limits) >= self.max_record_cache: | |
494 | if (not suppression_count and | |
495 | len(self._record_limits) >= self.max_record_cache): | |
488 | 496 | cache_items = self._record_limits.items() |
489 | 497 | cache_items.sort() |
490 | del cache_items[:int(self._record_limits) \ | |
491 | * self.record_cache_prune] | |
498 | del cache_items[:int(self._record_limits) | |
499 | * self.record_cache_prune] | |
492 | 500 | self._record_limits = dict(cache_items) |
493 | 501 | |
494 | 502 | self._record_limits[hash] = (first_count, old_count + 1) |
554 | 562 | """Encodes the message to the stream encoding.""" |
555 | 563 | stream = self.stream |
556 | 564 | rv = msg + '\n' |
557 | if (PY2 and is_unicode(rv)) or \ | |
558 | not (PY2 or is_unicode(rv) or _is_text_stream(stream)): | |
565 | if ((PY2 and is_unicode(rv)) or | |
566 | not (PY2 or is_unicode(rv) or _is_text_stream(stream))): | |
559 | 567 | enc = self.encoding |
560 | 568 | if enc is None: |
561 | 569 | enc = getattr(stream, 'encoding', None) or 'utf-8' |
602 | 610 | def _open(self, mode=None): |
603 | 611 | if mode is None: |
604 | 612 | mode = self._mode |
605 | self.stream = open(self._filename, mode) | |
613 | self.stream = io.open(self._filename, mode, encoding=self.encoding) | |
606 | 614 | |
607 | 615 | def write(self, item): |
608 | 616 | self.ensure_stream_is_open() |
609 | if not PY2 and isinstance(item, bytes): | |
617 | if isinstance(item, bytes): | |
610 | 618 | self.stream.buffer.write(item) |
611 | 619 | else: |
612 | 620 | self.stream.write(item) |
722 | 730 | format_string, delay, filter, bubble) |
723 | 731 | self.max_size = max_size |
724 | 732 | self.backup_count = backup_count |
725 | assert backup_count > 0, 'at least one backup file has to be ' \ | |
726 | 'specified' | |
733 | assert backup_count > 0, ('at least one backup file has to be ' | |
734 | 'specified') | |
727 | 735 | |
728 | 736 | def should_rollover(self, record, bytes): |
729 | 737 | self.stream.seek(0, 2) |
786 | 794 | self._filename = None |
787 | 795 | |
788 | 796 | def _get_timed_filename(self, datetime): |
789 | return datetime.strftime('-' + self.date_format) \ | |
790 | .join(self._fn_parts) | |
797 | return (datetime.strftime('-' + self.date_format) | |
798 | .join(self._fn_parts)) | |
791 | 799 | |
792 | 800 | def should_rollover(self, record): |
793 | 801 | fn = self._get_timed_filename(record.time) |
805 | 813 | files = [] |
806 | 814 | for filename in os.listdir(directory): |
807 | 815 | filename = os.path.join(directory, filename) |
808 | if filename.startswith(self._fn_parts[0] + '-') and \ | |
809 | filename.endswith(self._fn_parts[1]): | |
816 | if (filename.startswith(self._fn_parts[0] + '-') and | |
817 | filename.endswith(self._fn_parts[1])): | |
810 | 818 | files.append((os.path.getmtime(filename), filename)) |
811 | 819 | files.sort() |
812 | return files[:-self.backup_count + 1] if self.backup_count > 1\ | |
813 | else files[:] | |
820 | if self.backup_count > 1: | |
821 | return files[:-self.backup_count + 1] | |
822 | else: | |
823 | return files[:] | |
814 | 824 | |
815 | 825 | def perform_rollover(self): |
816 | 826 | self.stream.close() |
846 | 856 | default_format_string = TEST_FORMAT_STRING |
847 | 857 | |
848 | 858 | def __init__(self, level=NOTSET, format_string=None, filter=None, |
849 | bubble=False): | |
859 | bubble=False, force_heavy_init=False): | |
850 | 860 | Handler.__init__(self, level, filter, bubble) |
851 | 861 | StringFormatterHandlerMixin.__init__(self, format_string) |
852 | 862 | #: captures the :class:`LogRecord`\s as instances |
853 | 863 | self.records = [] |
854 | 864 | self._formatted_records = [] |
855 | 865 | self._formatted_record_cache = [] |
866 | self._force_heavy_init = force_heavy_init | |
856 | 867 | |
857 | 868 | def close(self): |
858 | 869 | """Close all records down when the handler is closed.""" |
864 | 875 | # call to the emit function. If we don't do that, the traceback |
865 | 876 | # attribute and other things will already be removed. |
866 | 877 | record.keep_open = True |
878 | if self._force_heavy_init: | |
879 | record.heavy_init() | |
867 | 880 | self.records.append(record) |
868 | 881 | |
869 | 882 | @property |
870 | 883 | def formatted_records(self): |
871 | 884 | """Captures the formatted log records as unicode strings.""" |
872 | if len(self._formatted_record_cache) != len(self.records) or \ | |
873 | any(r1 != r2 for r1, r2 in | |
874 | zip(self.records, self._formatted_record_cache)): | |
885 | if (len(self._formatted_record_cache) != len(self.records) or | |
886 | any(r1 != r2 for r1, r2 in | |
887 | zip(self.records, self._formatted_record_cache))): | |
875 | 888 | self._formatted_records = [self.format(r) for r in self.records] |
876 | 889 | self._formatted_record_cache = list(self.records) |
877 | 890 | return self._formatted_records |
1044 | 1057 | def _get_related_format_string(self): |
1045 | 1058 | if isinstance(self.related_formatter, StringFormatter): |
1046 | 1059 | return self.related_formatter.format_string |
1060 | ||
1047 | 1061 | def _set_related_format_string(self, value): |
1048 | 1062 | if value is None: |
1049 | 1063 | self.related_formatter = None |
1050 | 1064 | else: |
1051 | 1065 | self.related_formatter = self.formatter_class(value) |
1052 | 1066 | related_format_string = property(_get_related_format_string, |
1053 | _set_related_format_string) | |
1067 | _set_related_format_string) | |
1054 | 1068 | del _get_related_format_string, _set_related_format_string |
1055 | 1069 | |
1056 | 1070 | def get_recipients(self, record): |
1086 | 1100 | |
1087 | 1101 | body = '\r\n'.join(lineiter) |
1088 | 1102 | if suppressed: |
1089 | body += '\r\n\r\nThis message occurred additional %d ' \ | |
1090 | 'time(s) and was suppressed' % suppressed | |
1103 | body += ('\r\n\r\nThis message occurred additional %d ' | |
1104 | 'time(s) and was suppressed' % suppressed) | |
1091 | 1105 | |
1092 | 1106 | # inconsistency in Python 2.5 |
1093 | 1107 | # other versions correctly return msg.get_payload() as str |
1200 | 1214 | |
1201 | 1215 | class GMailHandler(MailHandler): |
1202 | 1216 | """ |
1203 | A customized mail handler class for sending emails via GMail (or Google Apps mail):: | |
1204 | ||
1205 | handler = GMailHandler("my_user@gmail.com", "mypassword", ["to_user@some_mail.com"], ...) # other arguments same as MailHandler | |
1217 | A customized mail handler class for sending emails via GMail (or Google | |
1218 | Apps mail):: | |
1219 | ||
1220 | handler = GMailHandler( | |
1221 | "my_user@gmail.com", "mypassword", ["to_user@some_mail.com"], | |
1222 | ...) # other arguments same as MailHandler | |
1206 | 1223 | |
1207 | 1224 | .. versionadded:: 0.6.0 |
1208 | 1225 | """ |
1209 | 1226 | |
1210 | 1227 | def __init__(self, account_id, password, recipients, **kw): |
1211 | 1228 | super(GMailHandler, self).__init__( |
1212 | account_id, recipients, secure=(), server_addr=("smtp.gmail.com", 587), | |
1229 | account_id, recipients, secure=(), | |
1230 | server_addr=("smtp.gmail.com", 587), | |
1213 | 1231 | credentials=(account_id, password), **kw) |
1214 | 1232 | |
1215 | 1233 | |
1220 | 1238 | default_format_string = SYSLOG_FORMAT_STRING |
1221 | 1239 | |
1222 | 1240 | # priorities |
1223 | LOG_EMERG = 0 # system is unusable | |
1224 | LOG_ALERT = 1 # action must be taken immediately | |
1225 | LOG_CRIT = 2 # critical conditions | |
1226 | LOG_ERR = 3 # error conditions | |
1227 | LOG_WARNING = 4 # warning conditions | |
1228 | LOG_NOTICE = 5 # normal but significant condition | |
1229 | LOG_INFO = 6 # informational | |
1230 | LOG_DEBUG = 7 # debug-level messages | |
1241 | LOG_EMERG = 0 # system is unusable | |
1242 | LOG_ALERT = 1 # action must be taken immediately | |
1243 | LOG_CRIT = 2 # critical conditions | |
1244 | LOG_ERR = 3 # error conditions | |
1245 | LOG_WARNING = 4 # warning conditions | |
1246 | LOG_NOTICE = 5 # normal but significant condition | |
1247 | LOG_INFO = 6 # informational | |
1248 | LOG_DEBUG = 7 # debug-level messages | |
1231 | 1249 | |
1232 | 1250 | # facility codes |
1233 | LOG_KERN = 0 # kernel messages | |
1234 | LOG_USER = 1 # random user-level messages | |
1235 | LOG_MAIL = 2 # mail system | |
1236 | LOG_DAEMON = 3 # system daemons | |
1237 | LOG_AUTH = 4 # security/authorization messages | |
1238 | LOG_SYSLOG = 5 # messages generated internally by syslogd | |
1239 | LOG_LPR = 6 # line printer subsystem | |
1240 | LOG_NEWS = 7 # network news subsystem | |
1241 | LOG_UUCP = 8 # UUCP subsystem | |
1242 | LOG_CRON = 9 # clock daemon | |
1243 | LOG_AUTHPRIV = 10 # security/authorization messages (private) | |
1244 | LOG_FTP = 11 # FTP daemon | |
1251 | LOG_KERN = 0 # kernel messages | |
1252 | LOG_USER = 1 # random user-level messages | |
1253 | LOG_MAIL = 2 # mail system | |
1254 | LOG_DAEMON = 3 # system daemons | |
1255 | LOG_AUTH = 4 # security/authorization messages | |
1256 | LOG_SYSLOG = 5 # messages generated internally by syslogd | |
1257 | LOG_LPR = 6 # line printer subsystem | |
1258 | LOG_NEWS = 7 # network news subsystem | |
1259 | LOG_UUCP = 8 # UUCP subsystem | |
1260 | LOG_CRON = 9 # clock daemon | |
1261 | LOG_AUTHPRIV = 10 # security/authorization messages (private) | |
1262 | LOG_FTP = 11 # FTP daemon | |
1245 | 1263 | |
1246 | 1264 | # other codes through 15 reserved for system use |
1247 | LOG_LOCAL0 = 16 # reserved for local use | |
1248 | LOG_LOCAL1 = 17 # reserved for local use | |
1249 | LOG_LOCAL2 = 18 # reserved for local use | |
1250 | LOG_LOCAL3 = 19 # reserved for local use | |
1251 | LOG_LOCAL4 = 20 # reserved for local use | |
1252 | LOG_LOCAL5 = 21 # reserved for local use | |
1253 | LOG_LOCAL6 = 22 # reserved for local use | |
1254 | LOG_LOCAL7 = 23 # reserved for local use | |
1265 | LOG_LOCAL0 = 16 # reserved for local use | |
1266 | LOG_LOCAL1 = 17 # reserved for local use | |
1267 | LOG_LOCAL2 = 18 # reserved for local use | |
1268 | LOG_LOCAL3 = 19 # reserved for local use | |
1269 | LOG_LOCAL4 = 20 # reserved for local use | |
1270 | LOG_LOCAL5 = 21 # reserved for local use | |
1271 | LOG_LOCAL6 = 22 # reserved for local use | |
1272 | LOG_LOCAL7 = 23 # reserved for local use | |
1255 | 1273 | |
1256 | 1274 | facility_names = { |
1257 | 1275 | 'auth': LOG_AUTH, |
1533 | 1551 | self.buffered_records.append(record) |
1534 | 1552 | if self._buffer_full: |
1535 | 1553 | self.buffered_records.popleft() |
1536 | elif self.buffer_size and \ | |
1537 | len(self.buffered_records) >= self.buffer_size: | |
1554 | elif (self.buffer_size and | |
1555 | len(self.buffered_records) >= self.buffer_size): | |
1538 | 1556 | self._buffer_full = True |
1539 | 1557 | return record.level >= self._level |
1540 | 1558 | return False |
59 | 59 | from http import client as http_client |
60 | 60 | |
61 | 61 | if PY2: |
62 | #Yucky, but apparently that's the only way to do this | |
62 | # Yucky, but apparently that's the only way to do this | |
63 | 63 | exec(""" |
64 | 64 | def reraise(tp, value, tb=None): |
65 | 65 | raise tp, value, tb |
81 | 81 | ) |
82 | 82 | _missing = object() |
83 | 83 | if PY2: |
84 | def b(x): return x | |
85 | def _is_text_stream(x): return True | |
84 | def b(x): | |
85 | return x | |
86 | ||
87 | def _is_text_stream(x): | |
88 | return True | |
86 | 89 | else: |
87 | 90 | import io |
88 | def b(x): return x.encode('ascii') | |
89 | def _is_text_stream(stream): return isinstance(stream, io.TextIOBase) | |
91 | ||
92 | def b(x): | |
93 | return x.encode('ascii') | |
94 | ||
95 | def _is_text_stream(stream): | |
96 | return isinstance(stream, io.TextIOBase) | |
90 | 97 | |
91 | 98 | |
92 | 99 | can_rename_open_file = False |
93 | if os.name == 'nt': # pragma: no cover | |
94 | _rename = lambda src, dst: False | |
95 | _rename_atomic = lambda src, dst: False | |
96 | ||
100 | if os.name == 'nt': | |
97 | 101 | try: |
98 | 102 | import ctypes |
99 | 103 | |
113 | 117 | rv = False |
114 | 118 | while not rv and retry < 100: |
115 | 119 | rv = _MoveFileEx(src, dst, _MOVEFILE_REPLACE_EXISTING | |
116 | _MOVEFILE_WRITE_THROUGH) | |
120 | _MOVEFILE_WRITE_THROUGH) | |
117 | 121 | if not rv: |
118 | 122 | time.sleep(0.001) |
119 | 123 | retry += 1 |
147 | 151 | finally: |
148 | 152 | _CloseHandle(ta) |
149 | 153 | except Exception: |
150 | pass | |
154 | def _rename(src, dst): | |
155 | return False | |
156 | ||
157 | def _rename_atomic(src, dst): | |
158 | return False | |
151 | 159 | |
152 | 160 | def rename(src, dst): |
153 | 161 | # Try atomic or pseudo-atomic rename |
172 | 180 | can_rename_open_file = True |
173 | 181 | |
174 | 182 | _JSON_SIMPLE_TYPES = (bool, float) + integer_types + string_types |
183 | ||
175 | 184 | |
176 | 185 | def to_safe_json(data): |
177 | 186 | """Makes a data structure safe for JSON silently discarding invalid |
272 | 281 | obj.__dict__[self.__name__] = value |
273 | 282 | return value |
274 | 283 | |
284 | ||
275 | 285 | def get_iterator_next_method(it): |
276 | 286 | return lambda: next(it) |
287 | ||
277 | 288 | |
278 | 289 | # python 2 support functions and aliases |
279 | 290 | def is_unicode(x): |
280 | 291 | if PY2: |
281 | 292 | return isinstance(x, unicode) |
282 | 293 | return isinstance(x, str) |
294 | ||
295 | if PY2: | |
296 | exec("""def with_metaclass(meta): | |
297 | class _WithMetaclassBase(object): | |
298 | __metaclass__ = meta | |
299 | return _WithMetaclassBase | |
300 | """) | |
301 | else: | |
302 | exec("""def with_metaclass(meta): | |
303 | class _WithMetaclassBase(object, metaclass=meta): | |
304 | pass | |
305 | return _WithMetaclassBase | |
306 | """) |
13 | 13 | from cgi import parse_qsl |
14 | 14 | from functools import partial |
15 | 15 | |
16 | from logbook.base import RecordDispatcher, dispatch_record, NOTSET, ERROR, NOTICE | |
17 | from logbook.handlers import Handler, StringFormatter, \ | |
18 | StringFormatterHandlerMixin, StderrHandler | |
16 | from logbook.base import ( | |
17 | RecordDispatcher, dispatch_record, NOTSET, ERROR, NOTICE) | |
18 | from logbook.handlers import ( | |
19 | Handler, StringFormatter, StringFormatterHandlerMixin, StderrHandler) | |
19 | 20 | from logbook._termcolors import colorize |
20 | 21 | from logbook.helpers import PY2, string_types, iteritems, u |
21 | 22 | |
28 | 29 | from urllib.parse import urlencode |
29 | 30 | |
30 | 31 | _ws_re = re.compile(r'(\s+)(?u)') |
31 | TWITTER_FORMAT_STRING = \ | |
32 | u('[{record.channel}] {record.level_name}: {record.message}') | |
32 | TWITTER_FORMAT_STRING = u( | |
33 | '[{record.channel}] {record.level_name}: {record.message}') | |
33 | 34 | TWITTER_ACCESS_TOKEN_URL = 'https://twitter.com/oauth/access_token' |
34 | 35 | NEW_TWEET_URL = 'https://api.twitter.com/1/statuses/update.json' |
35 | 36 | |
208 | 209 | def tweet(self, status): |
209 | 210 | """Tweets a given status. Status must not exceed 140 chars.""" |
210 | 211 | client = self.make_client() |
211 | resp, content = client.request(NEW_TWEET_URL, 'POST', | |
212 | resp, content = client.request( | |
213 | NEW_TWEET_URL, 'POST', | |
212 | 214 | body=urlencode({'status': status.encode('utf-8')}), |
213 | 215 | headers={'Content-Type': 'application/x-www-form-urlencoded'}) |
214 | 216 | return resp['status'] == '200' |
267 | 269 | self._subprocess = subprocess |
268 | 270 | |
269 | 271 | def emit(self, record): |
270 | args = [arg.format(record=record).encode(self.encoding) | |
272 | args = [arg.format(record=record) | |
271 | 273 | for arg in self._arguments] |
272 | 274 | if self._stdin_format is not None: |
273 | stdin_data = self._stdin_format.format(record=record) \ | |
274 | .encode(self.encoding) | |
275 | stdin_data = (self._stdin_format.format(record=record) | |
276 | .encode(self.encoding)) | |
275 | 277 | stdin = self._subprocess.PIPE |
276 | 278 | else: |
277 | 279 | stdin = None |
324 | 326 | |
325 | 327 | |
326 | 328 | # backwards compat. Should go away in some future releases |
327 | from logbook.handlers import FingersCrossedHandler as \ | |
328 | FingersCrossedHandlerBase | |
329 | from logbook.handlers import ( | |
330 | FingersCrossedHandler as FingersCrossedHandlerBase) | |
331 | ||
332 | ||
329 | 333 | class FingersCrossedHandler(FingersCrossedHandlerBase): |
330 | 334 | def __init__(self, *args, **kwargs): |
331 | 335 | FingersCrossedHandlerBase.__init__(self, *args, **kwargs) |
332 | 336 | from warnings import warn |
333 | 337 | warn(PendingDeprecationWarning('fingers crossed handler changed ' |
334 | 'location. It\'s now a core component of Logbook.')) | |
338 | 'location. It\'s now a core component of Logbook.')) | |
335 | 339 | |
336 | 340 | |
337 | 341 | class ExceptionHandler(Handler, StringFormatterHandlerMixin): |
359 | 363 | raise self.exc_type(self.format(record)) |
360 | 364 | return False |
361 | 365 | |
366 | ||
362 | 367 | class DedupHandler(Handler): |
363 | 368 | """A handler that deduplicates log messages. |
364 | 369 | |
365 | It emits each unique log record once, along with the number of times it was emitted. | |
370 | It emits each unique log record once, along with the number of times it was | |
371 | emitted. | |
366 | 372 | Example::: |
367 | 373 | |
368 | 374 | with logbook.more.DedupHandler(): |
375 | 381 | message repeated 2 times: foo |
376 | 382 | message repeated 1 times: bar |
377 | 383 | """ |
378 | def __init__(self, format_string='message repeated {count} times: {message}', *args, **kwargs): | |
384 | def __init__(self, | |
385 | format_string='message repeated {count} times: {message}', | |
386 | *args, **kwargs): | |
379 | 387 | Handler.__init__(self, bubble=False, *args, **kwargs) |
380 | 388 | self._format_string = format_string |
381 | 389 | self.clear() |
397 | 405 | self.flush() |
398 | 406 | |
399 | 407 | def handle(self, record): |
400 | if not record.message in self._message_to_count: | |
408 | if record.message not in self._message_to_count: | |
401 | 409 | self._unique_ordered_records.append(record) |
402 | 410 | self._message_to_count[record.message] += 1 |
403 | 411 | return True |
404 | 412 | |
405 | 413 | def flush(self): |
406 | 414 | for record in self._unique_ordered_records: |
407 | record.message = self._format_string.format(message=record.message, count=self._message_to_count[record.message]) | |
408 | # record.dispatcher is the logger who created the message, it's sometimes supressed (by logbook.info for example) | |
409 | dispatch = record.dispatcher.call_handlers if record.dispatcher is not None else dispatch_record | |
415 | record.message = self._format_string.format( | |
416 | message=record.message, | |
417 | count=self._message_to_count[record.message]) | |
418 | # record.dispatcher is the logger who created the message, | |
419 | # it's sometimes supressed (by logbook.info for example) | |
420 | if record.dispatcher is not None: | |
421 | dispatch = record.dispatcher.call_handlers | |
422 | else: | |
423 | dispatch = dispatch_record | |
410 | 424 | dispatch(record) |
411 | 425 | self.clear() |
412 |
14 | 14 | |
15 | 15 | from logbook.base import NOTSET, ERROR, WARNING |
16 | 16 | from logbook.handlers import Handler, LimitingHandlerMixin |
17 | from logbook.helpers import get_application_name, PY2, http_client | |
17 | from logbook.helpers import get_application_name, PY2, http_client, u | |
18 | 18 | |
19 | 19 | if PY2: |
20 | 20 | from urllib import urlencode |
21 | 21 | else: |
22 | 22 | from urllib.parse import urlencode |
23 | 23 | |
24 | def create_notification_handler(application_name=None, level=NOTSET, icon=None): | |
24 | ||
25 | def create_notification_handler(application_name=None, level=NOTSET, | |
26 | icon=None): | |
25 | 27 | """Creates a handler perfectly fit the current platform. On Linux |
26 | 28 | systems this creates a :class:`LibNotifyHandler`, on OS X systems it |
27 | 29 | will create a :class:`GrowlHandler`. |
150 | 152 | try: |
151 | 153 | from gtk import gdk |
152 | 154 | except ImportError: |
153 | #TODO: raise a warning? | |
155 | # TODO: raise a warning? | |
154 | 156 | raise RuntimeError('The gtk.gdk module is required to set an icon.') |
155 | 157 | |
156 | 158 | if icon is not None: |
196 | 198 | |
197 | 199 | def __init__(self, email, password, record_limit=None, record_delta=None, |
198 | 200 | level=NOTSET, filter=None, bubble=False): |
199 | NotificationBaseHandler.__init__(self, None, record_limit, record_delta, | |
200 | level, filter, bubble) | |
201 | NotificationBaseHandler.__init__(self, None, record_limit, | |
202 | record_delta, level, filter, bubble) | |
201 | 203 | self.email = email |
202 | 204 | self.password = password |
203 | 205 | |
218 | 220 | con = http_client.HTTPSConnection('boxcar.io') |
219 | 221 | con.request('POST', '/notifications/', headers={ |
220 | 222 | 'Authorization': 'Basic ' + |
221 | base64.b64encode((u('%s:%s') % | |
222 | (self.email, self.password)).encode('utf-8')).strip(), | |
223 | base64.b64encode((u('%s:%s') % (self.email, self.password)) | |
224 | .encode('utf-8')).strip(), | |
223 | 225 | }, body=body) |
224 | 226 | con.close() |
225 | 227 | |
230 | 232 | """ |
231 | 233 | |
232 | 234 | def __init__(self, application_name=None, username=None, secret=None, |
233 | record_limit=None, record_delta=None, level=NOTSET, filter=None, | |
234 | bubble=False, hide_level=False): | |
235 | record_limit=None, record_delta=None, level=NOTSET, | |
236 | filter=None, bubble=False, hide_level=False): | |
235 | 237 | try: |
236 | 238 | import notifo |
237 | 239 | except ImportError: |
239 | 241 | 'The notifo module is not available. You have ' |
240 | 242 | 'to install notifo to use the NotifoHandler.' |
241 | 243 | ) |
242 | NotificationBaseHandler.__init__(self, None, record_limit, record_delta, | |
243 | level, filter, bubble) | |
244 | NotificationBaseHandler.__init__(self, None, record_limit, | |
245 | record_delta, level, filter, bubble) | |
244 | 246 | self._notifo = notifo |
245 | 247 | self.application_name = application_name |
246 | 248 | self.username = username |
247 | 249 | self.secret = secret |
248 | 250 | self.hide_level = hide_level |
249 | ||
250 | 251 | |
251 | 252 | def emit(self, record): |
252 | 253 |
32 | 32 | |
33 | 33 | handler = RedisHandler('http://127.0.0.1', port='9200', key='redis') |
34 | 34 | |
35 | If your Redis instance is password protected, you can securely connect passing | |
36 | your password when creating a RedisHandler object. | |
35 | If your Redis instance is password protected, you can securely connect | |
36 | passing your password when creating a RedisHandler object. | |
37 | 37 | |
38 | 38 | Example:: |
39 | 39 | |
41 | 41 | |
42 | 42 | More info about the default buffer size: wp.me/p3tYJu-3b |
43 | 43 | """ |
44 | def __init__(self, host='127.0.0.1', port=6379, key='redis', extra_fields={}, | |
45 | flush_threshold=128, flush_time=1, level=NOTSET, filter=None, | |
46 | password=False, bubble=True, context=None, push_method='rpush'): | |
44 | def __init__(self, host='127.0.0.1', port=6379, key='redis', | |
45 | extra_fields={}, flush_threshold=128, flush_time=1, | |
46 | level=NOTSET, filter=None, password=False, bubble=True, | |
47 | context=None, push_method='rpush'): | |
47 | 48 | Handler.__init__(self, level, filter, bubble) |
48 | 49 | try: |
49 | 50 | import redis |
52 | 53 | raise RuntimeError('The redis library is required for ' |
53 | 54 | 'the RedisHandler') |
54 | 55 | |
55 | self.redis = redis.Redis(host=host, port=port, password=password, decode_responses=True) | |
56 | self.redis = redis.Redis(host=host, port=port, password=password, | |
57 | decode_responses=True) | |
56 | 58 | try: |
57 | 59 | self.redis.ping() |
58 | 60 | except ResponseError: |
59 | raise ResponseError('The password provided is apparently incorrect') | |
61 | raise ResponseError( | |
62 | 'The password provided is apparently incorrect') | |
60 | 63 | self.key = key |
61 | 64 | self.extra_fields = extra_fields |
62 | 65 | self.flush_threshold = flush_threshold |
64 | 67 | self.lock = Lock() |
65 | 68 | self.push_method = push_method |
66 | 69 | |
67 | #Set up a thread that flushes the queue every specified seconds | |
70 | # Set up a thread that flushes the queue every specified seconds | |
68 | 71 | self._stop_event = threading.Event() |
69 | 72 | self._flushing_t = threading.Thread(target=self._flush_task, |
70 | args=(flush_time, self._stop_event)) | |
73 | args=(flush_time, | |
74 | self._stop_event)) | |
71 | 75 | self._flushing_t.daemon = True |
72 | 76 | self._flushing_t.start() |
73 | ||
74 | 77 | |
75 | 78 | def _flush_task(self, time, stop_event): |
76 | 79 | """Calls the method _flush_buffer every certain time. |
80 | 83 | self._flush_buffer() |
81 | 84 | self._stop_event.wait(time) |
82 | 85 | |
83 | ||
84 | 86 | def _flush_buffer(self): |
85 | 87 | """Flushes the messaging queue into Redis. |
86 | 88 | |
92 | 94 | getattr(self.redis, self.push_method)(self.key, *self.queue) |
93 | 95 | self.queue = [] |
94 | 96 | |
95 | ||
96 | 97 | def disable_buffering(self): |
97 | 98 | """Disables buffering. |
98 | 99 | |
101 | 102 | self._stop_event.set() |
102 | 103 | self.flush_threshold = 1 |
103 | 104 | |
104 | ||
105 | 105 | def emit(self, record): |
106 | 106 | """Emits a pair (key, value) to redis. |
107 | 107 | |
108 | The key is the one provided when creating the handler, or redis if none was | |
109 | provided. The value contains both the message and the hostname. Extra values | |
110 | are also appended to the message. | |
108 | The key is the one provided when creating the handler, or redis if none | |
109 | was provided. The value contains both the message and the hostname. | |
110 | Extra values are also appended to the message. | |
111 | 111 | """ |
112 | 112 | with self.lock: |
113 | 113 | r = {"message": record.msg, |
120 | 120 | if len(self.queue) == self.flush_threshold: |
121 | 121 | self._flush_buffer() |
122 | 122 | |
123 | ||
124 | 123 | def close(self): |
125 | 124 | self._flush_buffer() |
126 | 125 | |
132 | 131 | The queue will be filled with JSON exported log records. To receive such |
133 | 132 | log records from a queue you can use the :class:`MessageQueueSubscriber`. |
134 | 133 | |
135 | Example setup:: | |
134 | For an AMQP backend such as RabbitMQ:: | |
135 | ||
136 | handler = MessageQueueHandler('amqp://guest:guest@localhost//') | |
137 | ||
138 | This requires the py-amqp or the librabbitmq client library. | |
139 | ||
140 | For Redis (requires redis client library):: | |
141 | ||
142 | handler = MessageQueueHandler('redis://localhost:8889/0') | |
143 | ||
144 | For MongoDB (requires pymongo):: | |
136 | 145 | |
137 | 146 | handler = MessageQueueHandler('mongodb://localhost:27017/logging') |
147 | ||
148 | Several other backends are also supported. | |
149 | Refer to the `kombu`_ documentation | |
150 | ||
151 | .. _kombu: http://kombu.readthedocs.org/en/latest/introduction.html | |
138 | 152 | """ |
139 | 153 | |
140 | 154 | def __init__(self, uri=None, queue='logging', level=NOTSET, |
141 | filter=None, bubble=False, context=None): | |
155 | filter=None, bubble=False): | |
142 | 156 | Handler.__init__(self, level, filter, bubble) |
143 | 157 | try: |
144 | 158 | import kombu |
205 | 219 | if uri is not None: |
206 | 220 | self.socket.bind(uri) |
207 | 221 | |
208 | ||
209 | 222 | def export_record(self, record): |
210 | 223 | """Exports the record into a dictionary ready for JSON dumping.""" |
211 | 224 | return record.to_dict(json_safe=True) |
212 | 225 | |
213 | 226 | def emit(self, record): |
214 | self.socket.send(json.dumps(self.export_record(record)).encode("utf-8")) | |
227 | self.socket.send(json.dumps( | |
228 | self.export_record(record)).encode("utf-8")) | |
215 | 229 | |
216 | 230 | def close(self, linger=-1): |
217 | 231 | self.socket.close(linger) |
218 | 232 | |
219 | 233 | def __del__(self): |
220 | # When the Handler is deleted we must close our socket in a non-blocking | |
221 | # fashion (using linger). | |
234 | # When the Handler is deleted we must close our socket in a | |
235 | # non-blocking fashion (using linger). | |
222 | 236 | # Otherwise it can block indefinitely, for example if the Subscriber is |
223 | 237 | # not reachable. |
224 | # If messages are pending on the socket, we wait 100ms for them to be sent | |
225 | # then we discard them. | |
238 | # If messages are pending on the socket, we wait 100ms for them to be | |
239 | # sent then we discard them. | |
226 | 240 | self.close(linger=100) |
227 | 241 | |
228 | 242 | |
268 | 282 | """Baseclass for all subscribers.""" |
269 | 283 | |
270 | 284 | def recv(self, timeout=None): |
271 | """Receives a single record from the socket. Timeout of 0 means nonblocking, | |
272 | `None` means blocking and otherwise it's a timeout in seconds after which | |
273 | the function just returns with `None`. | |
285 | """Receives a single record from the socket. Timeout of 0 means | |
286 | nonblocking, `None` means blocking and otherwise it's a timeout in | |
287 | seconds after which the function just returns with `None`. | |
274 | 288 | |
275 | 289 | Subclasses have to override this. |
276 | 290 | """ |
354 | 368 | self.queue.close() |
355 | 369 | |
356 | 370 | def recv(self, timeout=None): |
357 | """Receives a single record from the socket. Timeout of 0 means nonblocking, | |
358 | `None` means blocking and otherwise it's a timeout in seconds after which | |
359 | the function just returns with `None`. | |
371 | """Receives a single record from the socket. Timeout of 0 means | |
372 | nonblocking, `None` means blocking and otherwise it's a timeout in | |
373 | seconds after which the function just returns with `None`. | |
360 | 374 | """ |
361 | 375 | if timeout == 0: |
362 | 376 | try: |
444 | 458 | self.socket.close() |
445 | 459 | |
446 | 460 | def recv(self, timeout=None): |
447 | """Receives a single record from the socket. Timeout of 0 means nonblocking, | |
448 | `None` means blocking and otherwise it's a timeout in seconds after which | |
449 | the function just returns with `None`. | |
461 | """Receives a single record from the socket. Timeout of 0 means | |
462 | nonblocking, `None` means blocking and otherwise it's a timeout in | |
463 | seconds after which the function just returns with `None`. | |
450 | 464 | """ |
451 | 465 | if timeout is None: |
452 | 466 | rv = self.socket.recv() |
576 | 590 | try: |
577 | 591 | rv = self.channel.receive(timeout=timeout) |
578 | 592 | except self.channel.RemoteError: |
579 | #XXX: handle | |
593 | # XXX: handle | |
580 | 594 | return None |
581 | 595 | except (self.channel.TimeoutError, EOFError): |
582 | 596 | return None |
669 | 683 | if record: |
670 | 684 | try: |
671 | 685 | self.queue.put(record, timeout=0.05) |
672 | except Queue.Full: | |
686 | except Full: | |
673 | 687 | pass |
674 | 688 | finally: |
675 | 689 | if self.setup is not None: |
14 | 14 | from logbook.handlers import Handler, HashingHandlerMixin |
15 | 15 | from logbook.helpers import cached_property, b, PY2, u |
16 | 16 | |
17 | ||
17 | 18 | class Ticket(object): |
18 | 19 | """Represents a ticket from the database.""" |
19 | 20 | |
88 | 89 | """Returns the number of tickets.""" |
89 | 90 | raise NotImplementedError() |
90 | 91 | |
91 | def get_tickets(self, order_by='-last_occurrence_time', limit=50, offset=0): | |
92 | def get_tickets(self, order_by='-last_occurrence_time', | |
93 | limit=50, offset=0): | |
92 | 94 | """Selects tickets from the database.""" |
93 | 95 | raise NotImplementedError() |
94 | 96 | |
130 | 132 | |
131 | 133 | def setup_backend(self): |
132 | 134 | from sqlalchemy import create_engine, MetaData |
135 | from sqlalchemy.orm import sessionmaker, scoped_session | |
133 | 136 | engine_or_uri = self.options.pop('uri', None) |
134 | 137 | metadata = self.options.pop('metadata', None) |
135 | 138 | table_prefix = self.options.pop('table_prefix', 'logbook_') |
137 | 140 | if hasattr(engine_or_uri, 'execute'): |
138 | 141 | self.engine = engine_or_uri |
139 | 142 | else: |
140 | self.engine = create_engine(engine_or_uri, convert_unicode=True) | |
143 | # Pool recycle keeps connections from going stale, | |
144 | # which happens in MySQL Databases | |
145 | # Pool size is more custom for out stack | |
146 | self.engine = create_engine(engine_or_uri, convert_unicode=True, | |
147 | pool_recycle=360, pool_size=1000) | |
148 | ||
149 | # Create session factory using session maker | |
150 | session = sessionmaker() | |
151 | ||
152 | # Bind to the engined | |
153 | session.configure(bind=self.engine) | |
154 | ||
155 | # Scoped session is a thread safe solution for | |
156 | # interaction with the Database | |
157 | self.session = scoped_session(session) | |
158 | ||
141 | 159 | if metadata is None: |
142 | 160 | metadata = MetaData() |
143 | 161 | self.table_prefix = table_prefix |
151 | 169 | metadata. |
152 | 170 | """ |
153 | 171 | import sqlalchemy as db |
172 | ||
154 | 173 | def table(name, *args, **kwargs): |
155 | 174 | return db.Table(self.table_prefix + name, self.metadata, |
156 | 175 | *args, **kwargs) |
157 | 176 | self.tickets = table('tickets', |
158 | db.Column('ticket_id', db.Integer, primary_key=True), | |
159 | db.Column('record_hash', db.String(40), unique=True), | |
160 | db.Column('level', db.Integer), | |
161 | db.Column('channel', db.String(120)), | |
162 | db.Column('location', db.String(512)), | |
163 | db.Column('module', db.String(256)), | |
164 | db.Column('last_occurrence_time', db.DateTime), | |
165 | db.Column('occurrence_count', db.Integer), | |
166 | db.Column('solved', db.Boolean), | |
167 | db.Column('app_id', db.String(80)) | |
168 | ) | |
177 | db.Column('ticket_id', db.Integer, | |
178 | primary_key=True), | |
179 | db.Column('record_hash', db.String(40), | |
180 | unique=True), | |
181 | db.Column('level', db.Integer), | |
182 | db.Column('channel', db.String(120)), | |
183 | db.Column('location', db.String(512)), | |
184 | db.Column('module', db.String(256)), | |
185 | db.Column('last_occurrence_time', db.DateTime), | |
186 | db.Column('occurrence_count', db.Integer), | |
187 | db.Column('solved', db.Boolean), | |
188 | db.Column('app_id', db.String(80))) | |
169 | 189 | self.occurrences = table('occurrences', |
170 | db.Column('occurrence_id', db.Integer, primary_key=True), | |
171 | db.Column('ticket_id', db.Integer, | |
172 | db.ForeignKey(self.table_prefix + 'tickets.ticket_id')), | |
173 | db.Column('time', db.DateTime), | |
174 | db.Column('data', db.Text), | |
175 | db.Column('app_id', db.String(80)) | |
176 | ) | |
190 | db.Column('occurrence_id', | |
191 | db.Integer, primary_key=True), | |
192 | db.Column('ticket_id', db.Integer, | |
193 | db.ForeignKey(self.table_prefix + | |
194 | 'tickets.ticket_id')), | |
195 | db.Column('time', db.DateTime), | |
196 | db.Column('data', db.Text), | |
197 | db.Column('app_id', db.String(80))) | |
177 | 198 | |
178 | 199 | def _order(self, q, table, order_by): |
179 | 200 | if order_by[0] == '-': |
182 | 203 | |
183 | 204 | def record_ticket(self, record, data, hash, app_id): |
184 | 205 | """Records a log record as ticket.""" |
185 | cnx = self.engine.connect() | |
186 | trans = cnx.begin() | |
206 | # Can use the session instead engine.connection and transaction | |
207 | s = self.session | |
187 | 208 | try: |
188 | 209 | q = self.tickets.select(self.tickets.c.record_hash == hash) |
189 | row = cnx.execute(q).fetchone() | |
210 | row = s.execute(q).fetchone() | |
190 | 211 | if row is None: |
191 | row = cnx.execute(self.tickets.insert().values( | |
212 | row = s.execute(self.tickets.insert().values( | |
192 | 213 | record_hash=hash, |
193 | 214 | level=record.level, |
194 | 215 | channel=record.channel or u(''), |
201 | 222 | ticket_id = row.inserted_primary_key[0] |
202 | 223 | else: |
203 | 224 | ticket_id = row['ticket_id'] |
204 | cnx.execute(self.occurrences.insert() | |
205 | .values(ticket_id=ticket_id, | |
206 | time=record.time, | |
207 | app_id=app_id, | |
208 | data=json.dumps(data))) | |
209 | cnx.execute(self.tickets.update() | |
225 | s.execute(self.occurrences.insert() | |
226 | .values(ticket_id=ticket_id, | |
227 | time=record.time, | |
228 | app_id=app_id, | |
229 | data=json.dumps(data))) | |
230 | s.execute( | |
231 | self.tickets.update() | |
210 | 232 | .where(self.tickets.c.ticket_id == ticket_id) |
211 | 233 | .values(occurrence_count=self.tickets.c.occurrence_count + 1, |
212 | 234 | last_occurrence_time=record.time, |
213 | 235 | solved=False)) |
214 | trans.commit() | |
236 | s.commit() | |
215 | 237 | except Exception: |
216 | trans.rollback() | |
238 | s.rollback() | |
217 | 239 | raise |
218 | cnx.close() | |
240 | # Closes the session and removes it from the pool | |
241 | s.remove() | |
219 | 242 | |
220 | 243 | def count_tickets(self): |
221 | 244 | """Returns the number of tickets.""" |
222 | 245 | return self.engine.execute(self.tickets.count()).fetchone()[0] |
223 | 246 | |
224 | def get_tickets(self, order_by='-last_occurrence_time', limit=50, offset=0): | |
247 | def get_tickets(self, order_by='-last_occurrence_time', limit=50, | |
248 | offset=0): | |
225 | 249 | """Selects tickets from the database.""" |
226 | 250 | return [Ticket(self, row) for row in self.engine.execute( |
227 | 251 | self._order(self.tickets.select(), self.tickets, order_by) |
230 | 254 | def solve_ticket(self, ticket_id): |
231 | 255 | """Marks a ticket as solved.""" |
232 | 256 | self.engine.execute(self.tickets.update() |
233 | .where(self.tickets.c.ticket_id == ticket_id) | |
234 | .values(solved=True)) | |
257 | .where(self.tickets.c.ticket_id == ticket_id) | |
258 | .values(solved=True)) | |
235 | 259 | |
236 | 260 | def delete_ticket(self, ticket_id): |
237 | 261 | """Deletes a ticket from the database.""" |
238 | 262 | self.engine.execute(self.occurrences.delete() |
239 | .where(self.occurrences.c.ticket_id == ticket_id)) | |
263 | .where(self.occurrences.c.ticket_id == ticket_id)) | |
240 | 264 | self.engine.execute(self.tickets.delete() |
241 | .where(self.tickets.c.ticket_id == ticket_id)) | |
265 | .where(self.tickets.c.ticket_id == ticket_id)) | |
242 | 266 | |
243 | 267 | def get_ticket(self, ticket_id): |
244 | 268 | """Return a single ticket with all occurrences.""" |
250 | 274 | def get_occurrences(self, ticket, order_by='-time', limit=50, offset=0): |
251 | 275 | """Selects occurrences from the database for a ticket.""" |
252 | 276 | return [Occurrence(self, row) for row in |
253 | self.engine.execute(self._order(self.occurrences.select() | |
277 | self.engine.execute(self._order( | |
278 | self.occurrences.select() | |
254 | 279 | .where(self.occurrences.c.ticket_id == ticket), |
255 | 280 | self.occurrences, order_by) |
256 | 281 | .limit(limit).offset(offset)).fetchall()] |
272 | 297 | self.ticket_id = row['ticket_id'] |
273 | 298 | self.occurrence_id = row['_id'] |
274 | 299 | |
275 | #TODO: Update connection setup once PYTHON-160 is solved. | |
300 | # TODO: Update connection setup once PYTHON-160 is solved. | |
276 | 301 | def setup_backend(self): |
277 | import pymongo | |
278 | 302 | from pymongo import ASCENDING, DESCENDING |
279 | 303 | from pymongo.connection import Connection |
280 | 304 | |
312 | 336 | self.database = database |
313 | 337 | |
314 | 338 | # setup correct indexes |
315 | database.tickets.ensure_index([('record_hash', ASCENDING)], unique=True) | |
316 | database.tickets.ensure_index([('solved', ASCENDING), ('level', ASCENDING)]) | |
339 | database.tickets.ensure_index([('record_hash', ASCENDING)], | |
340 | unique=True) | |
341 | database.tickets.ensure_index([('solved', ASCENDING), | |
342 | ('level', ASCENDING)]) | |
317 | 343 | database.occurrences.ensure_index([('time', DESCENDING)]) |
318 | 344 | |
319 | 345 | def _order(self, q, order_by): |
336 | 362 | 'record_hash': hash, |
337 | 363 | 'level': record.level, |
338 | 364 | 'channel': record.channel or u(''), |
339 | 'location': u('%s:%d') % (record.filename, record.lineno), | |
365 | 'location': u('%s:%d') % (record.filename, | |
366 | record.lineno), | |
340 | 367 | 'module': record.module or u('<unknown>'), |
341 | 368 | 'occurrence_count': 0, |
342 | 369 | 'solved': False, |
348 | 375 | |
349 | 376 | db.tickets.update({'_id': ticket_id}, { |
350 | 377 | '$inc': { |
351 | 'occurrence_count': 1 | |
378 | 'occurrence_count': 1 | |
352 | 379 | }, |
353 | 380 | '$set': { |
354 | 381 | 'last_occurrence_time': record.time, |
368 | 395 | """Returns the number of tickets.""" |
369 | 396 | return self.database.tickets.count() |
370 | 397 | |
371 | def get_tickets(self, order_by='-last_occurrence_time', limit=50, offset=0): | |
398 | def get_tickets(self, order_by='-last_occurrence_time', limit=50, | |
399 | offset=0): | |
372 | 400 | """Selects tickets from the database.""" |
373 | query = self._order(self.database.tickets.find(), order_by) \ | |
374 | .limit(limit).skip(offset) | |
401 | query = (self._order(self.database.tickets.find(), order_by) | |
402 | .limit(limit).skip(offset)) | |
375 | 403 | return [self._FixedTicketClass(self, obj) for obj in query] |
376 | 404 | |
377 | 405 | def solve_ticket(self, ticket_id): |
433 | 461 | used to keep multiple application setups apart when logging |
434 | 462 | into the same database. |
435 | 463 | :param hash_salt: an optional salt (binary string) for the hashes. |
436 | :param backend: A backend class that implements the proper database handling. | |
464 | :param backend: A backend class that implements the proper database | |
465 | handling. | |
437 | 466 | Backends available are: :class:`SQLAlchemyBackend`, |
438 | 467 | :class:`MongoDBBackend`. |
439 | 468 | """ |
0 | from contextlib import contextmanager | |
1 | import functools | |
2 | import sys | |
3 | import threading | |
4 | ||
5 | from .base import Logger | |
6 | from .helpers import string_types | |
7 | from logbook import debug as logbook_debug | |
8 | ||
9 | ||
10 | class _SlowContextNotifier(object): | |
11 | ||
12 | def __init__(self, threshold, logger_func, args, kwargs): | |
13 | self.logger_func = logger_func | |
14 | self.args = args | |
15 | self.kwargs = kwargs or {} | |
16 | self.evt = threading.Event() | |
17 | self.threshold = threshold | |
18 | self.thread = threading.Thread(target=self._notifier) | |
19 | ||
20 | def _notifier(self): | |
21 | self.evt.wait(timeout=self.threshold) | |
22 | if not self.evt.is_set(): | |
23 | self.logger_func(*self.args, **self.kwargs) | |
24 | ||
25 | def __enter__(self): | |
26 | self.thread.start() | |
27 | return self | |
28 | ||
29 | def __exit__(self, *_): | |
30 | self.evt.set() | |
31 | self.thread.join() | |
32 | ||
33 | ||
34 | def logged_if_slow(message, threshold=1, func=logbook_debug, args=None, | |
35 | kwargs=None): | |
36 | """Logs a message (by default using the global debug logger) if a certain | |
37 | context containing a set of operations is too slow | |
38 | ||
39 | >>> with logged_if_slow('too slow!'): | |
40 | ... ... | |
41 | """ | |
42 | full_args = (message, ) if args is None else (message, ) + tuple(args) | |
43 | return _SlowContextNotifier(threshold, func, full_args, kwargs) | |
44 | ||
45 | ||
46 | class _Local(threading.local): | |
47 | enabled = True | |
48 | ||
49 | _local = _Local() | |
50 | ||
51 | ||
52 | @contextmanager | |
53 | def suppressed_deprecations(): | |
54 | """Disables deprecation messages temporarily | |
55 | ||
56 | >>> with suppressed_deprecations(): | |
57 | ... call_some_deprecated_logic() | |
58 | """ | |
59 | prev_enabled = _local.enabled | |
60 | _local.enabled = False | |
61 | try: | |
62 | yield | |
63 | finally: | |
64 | _local.enabled = prev_enabled | |
65 | ||
66 | ||
67 | _deprecation_logger = Logger("deprecation") | |
68 | _deprecation_locations = set() | |
69 | ||
70 | ||
71 | def forget_deprecation_locations(): | |
72 | _deprecation_locations.clear() | |
73 | ||
74 | ||
75 | def _write_deprecations_if_needed(message, frame_correction): | |
76 | if not _local.enabled: | |
77 | return | |
78 | caller_location = _get_caller_location(frame_correction=frame_correction+1) | |
79 | if caller_location not in _deprecation_locations: | |
80 | _deprecation_logger.warning(message, frame_correction=frame_correction+1) | |
81 | _deprecation_locations.add(caller_location) | |
82 | ||
83 | ||
84 | def log_deprecation_message(message, frame_correction=0): | |
85 | _write_deprecations_if_needed("Deprecation message: {0}".format(message), frame_correction=frame_correction+1) | |
86 | ||
87 | ||
88 | class _DeprecatedFunction(object): | |
89 | ||
90 | def __init__(self, func, message, obj=None, objtype=None): | |
91 | super(_DeprecatedFunction, self).__init__() | |
92 | self._func = func | |
93 | self._message = message | |
94 | self._obj = obj | |
95 | self._objtype = objtype | |
96 | ||
97 | def _get_underlying_func(self): | |
98 | returned = self._func | |
99 | if isinstance(returned, classmethod): | |
100 | if hasattr(returned, '__func__'): | |
101 | returned = returned.__func__ | |
102 | else: | |
103 | returned = returned.__get__(self._objtype).__func__ | |
104 | return returned | |
105 | ||
106 | def __call__(self, *args, **kwargs): | |
107 | func = self._get_underlying_func() | |
108 | warning = "{0} is deprecated.".format(self._get_func_str()) | |
109 | if self._message is not None: | |
110 | warning += " {0}".format(self._message) | |
111 | _write_deprecations_if_needed(warning, frame_correction=+1) | |
112 | if self._obj is not None: | |
113 | return func(self._obj, *args, **kwargs) | |
114 | elif self._objtype is not None: | |
115 | return func(self._objtype, *args, **kwargs) | |
116 | return func(*args, **kwargs) | |
117 | ||
118 | def _get_func_str(self): | |
119 | func = self._get_underlying_func() | |
120 | if self._objtype is not None: | |
121 | return '{0}.{1}'.format(self._objtype.__name__, func.__name__) | |
122 | return '{0}.{1}'.format(func.__module__, func.__name__) | |
123 | ||
124 | def __get__(self, obj, objtype): | |
125 | return self.bound_to(obj, objtype) | |
126 | ||
127 | def bound_to(self, obj, objtype): | |
128 | return _DeprecatedFunction(self._func, self._message, obj=obj, | |
129 | objtype=objtype) | |
130 | ||
131 | @property | |
132 | def __name__(self): | |
133 | return self._get_underlying_func().__name__ | |
134 | ||
135 | @property | |
136 | def __doc__(self): | |
137 | returned = self._get_underlying_func().__doc__ | |
138 | if returned: # pylint: disable=no-member | |
139 | returned += "\n.. deprecated\n" # pylint: disable=no-member | |
140 | if self._message: | |
141 | returned += " {0}".format( | |
142 | self._message) # pylint: disable=no-member | |
143 | return returned | |
144 | ||
145 | @__doc__.setter | |
146 | def __doc__(self, doc): | |
147 | self._get_underlying_func().__doc__ = doc | |
148 | ||
149 | ||
150 | def deprecated(func=None, message=None): | |
151 | """Marks the specified function as deprecated, and emits a warning when | |
152 | it's called. | |
153 | ||
154 | >>> @deprecated(message='No longer supported') | |
155 | ... def deprecated_func(): | |
156 | ... pass | |
157 | ||
158 | This will cause a warning log to be emitted when the function gets called, | |
159 | with the correct filename/lineno | |
160 | """ | |
161 | if isinstance(func, string_types): | |
162 | assert message is None | |
163 | message = func | |
164 | func = None | |
165 | ||
166 | if func is None: | |
167 | return functools.partial(deprecated, message=message) | |
168 | ||
169 | return _DeprecatedFunction(func, message) | |
170 | ||
171 | ||
172 | def _get_caller_location(frame_correction): | |
173 | frame = sys._getframe(frame_correction + 1) # pylint: disable=protected-access | |
174 | try: | |
175 | return (frame.f_code.co_name, frame.f_lineno) | |
176 | finally: | |
177 | del frame |
26 | 26 | match = re.search('^Version\s+(.*)', line.strip()) |
27 | 27 | if match is None: |
28 | 28 | continue |
29 | length = len(match.group(1)) | |
30 | 29 | version = match.group(1).strip() |
31 | 30 | if lineiter.next().count('-') != len(match.group(0)): |
32 | 31 | continue |
60 | 59 | |
61 | 60 | def set_filename_version(filename, version_number, pattern): |
62 | 61 | changed = [] |
62 | ||
63 | 63 | def inject_version(match): |
64 | 64 | before, old, after = match.groups() |
65 | 65 | changed.append(True) |
75 | 75 | f.write(contents) |
76 | 76 | |
77 | 77 | |
78 | def set_init_version(version): | |
79 | info('Setting __init__.py version to %s', version) | |
80 | set_filename_version('logbook/__init__.py', version, '__version__') | |
81 | ||
82 | ||
83 | def set_setup_version(version): | |
84 | info('Setting setup.py version to %s', version) | |
85 | set_filename_version('setup.py', version, 'version') | |
86 | ||
87 | def set_doc_version(version): | |
88 | info('Setting docs/conf.py version to %s', version) | |
89 | set_filename_version('docs/conf.py', version, 'version') | |
90 | set_filename_version('docs/conf.py', version, 'release') | |
91 | ||
92 | ||
93 | def build_and_upload(): | |
94 | Popen([sys.executable, 'setup.py', 'release', 'sdist', 'upload']).wait() | |
78 | def set_version(version): | |
79 | info('Setting version to %s', version) | |
80 | with open('logbook/__version__.py', 'w') as f: | |
81 | f.write('__version__ = {!r}'.format(version)) | |
95 | 82 | |
96 | 83 | |
97 | 84 | def fail(message, *args): |
104 | 91 | |
105 | 92 | |
106 | 93 | def get_git_tags(): |
107 | return set(Popen(['git', 'tag'], stdout=PIPE).communicate()[0].splitlines()) | |
94 | return set(Popen(['git', 'tag'], | |
95 | stdout=PIPE).communicate()[0].splitlines()) | |
108 | 96 | |
109 | 97 | |
110 | 98 | def git_is_clean(): |
122 | 110 | |
123 | 111 | |
124 | 112 | parser = argparse.ArgumentParser("%prog [options]") |
125 | parser.add_argument("--no-upload", dest="upload", action="store_false", default=True) | |
113 | parser.add_argument("--no-upload", dest="upload", | |
114 | action="store_false", default=True) | |
115 | ||
126 | 116 | |
127 | 117 | def main(): |
128 | 118 | args = parser.parse_args() |
143 | 133 | if version in tags: |
144 | 134 | fail('Version "%s" is already tagged', version) |
145 | 135 | if release_date.date() != date.today(): |
146 | fail('Release date is not today (%s != %s)' % (release_date.date(), date.today())) | |
136 | fail('Release date is not today (%s != %s)' % | |
137 | (release_date.date(), date.today())) | |
147 | 138 | |
148 | 139 | if not git_is_clean(): |
149 | 140 | fail('You have uncommitted changes in git') |
150 | 141 | |
151 | set_init_version(version) | |
152 | set_setup_version(version) | |
153 | set_doc_version(version) | |
142 | set_version(version) | |
154 | 143 | make_git_commit('Bump version number to %s', version) |
155 | 144 | make_git_tag(version) |
156 | if args.upload: | |
157 | build_and_upload() | |
158 | set_init_version(dev_version) | |
159 | set_setup_version(dev_version) | |
160 | set_doc_version(dev_version) | |
145 | set_version(dev_version) | |
161 | 146 | make_git_commit('Bump version number to %s', dev_version) |
162 | 147 | |
163 | 148 |
0 | 0 | #! /usr/bin/python |
1 | import subprocess | |
2 | import os | |
1 | import pip | |
3 | 2 | import sys |
4 | ||
5 | def _execute(*args, **kwargs): | |
6 | result = subprocess.call(*args, **kwargs) | |
7 | if result != 0: | |
8 | sys.exit(result) | |
9 | 3 | |
10 | 4 | if __name__ == '__main__': |
11 | 5 | python_version = sys.version_info |
23 | 17 | else: |
24 | 18 | deps.append("Jinja2") |
25 | 19 | print("Setting up dependencies...") |
26 | _execute([os.path.join(os.path.dirname(sys.executable), "pip"), "install"] + deps, shell=False) | |
20 | result = pip.main(["install"] + deps) | |
21 | sys.exit(result) |
12 | 12 | print("PyPy+Cython configuration skipped") |
13 | 13 | else: |
14 | 14 | sys.exit( |
15 | subprocess.call("make cybuild test" if use_cython else "make test", shell=True) | |
15 | subprocess.call( | |
16 | "make cybuild test" if use_cython else "make test", shell=True) | |
16 | 17 | ) |
4 | 4 | |
5 | 5 | [upload_docs] |
6 | 6 | upload-dir = docs/_build/html |
7 | ||
8 | [egg_info] | |
9 | tag_date = true | |
10 | ||
11 | [aliases] | |
12 | release = egg_info -RDb '' |
55 | 55 | import sys |
56 | 56 | from setuptools import setup, Extension, Feature |
57 | 57 | from distutils.command.build_ext import build_ext |
58 | from distutils.errors import CCompilerError, DistutilsExecError, \ | |
59 | DistutilsPlatformError | |
58 | from distutils.errors import ( | |
59 | CCompilerError, DistutilsExecError, DistutilsPlatformError) | |
60 | 60 | |
61 | 61 | |
62 | 62 | extra = {} |
91 | 91 | |
92 | 92 | cmdclass['build_ext'] = ve_build_ext |
93 | 93 | # Don't try to compile the extension if we're running on PyPy |
94 | if os.path.isfile('logbook/_speedups.c') and not hasattr(sys, "pypy_translation_info"): | |
94 | if (os.path.isfile('logbook/_speedups.c') and | |
95 | not hasattr(sys, "pypy_translation_info")): | |
95 | 96 | speedups = Feature('optional C speed-enhancement module', standard=True, |
96 | 97 | ext_modules=[Extension('logbook._speedups', |
97 | 98 | ['logbook/_speedups.c'])]) |
98 | 99 | else: |
99 | 100 | speedups = None |
101 | ||
102 | ||
103 | with open(os.path.join(os.path.dirname(__file__), "logbook", "__version__.py")) as version_file: | |
104 | exec(version_file.read()) # pylint: disable=W0122 | |
100 | 105 | |
101 | 106 | |
102 | 107 | def run_setup(with_binary): |
105 | 110 | features['speedups'] = speedups |
106 | 111 | setup( |
107 | 112 | name='Logbook', |
108 | version='0.10.0', | |
113 | version=__version__, | |
109 | 114 | license='BSD', |
110 | 115 | url='http://logbook.pocoo.org/', |
111 | 116 | author='Armin Ronacher, Georg Brandl', |
116 | 121 | zip_safe=False, |
117 | 122 | platforms='any', |
118 | 123 | cmdclass=cmdclass, |
124 | classifiers=[ | |
125 | "Programming Language :: Python :: 2.6", | |
126 | "Programming Language :: Python :: 2.7", | |
127 | "Programming Language :: Python :: 3.2", | |
128 | "Programming Language :: Python :: 3.3", | |
129 | "Programming Language :: Python :: 3.4", | |
130 | "Programming Language :: Python :: 3.5", | |
131 | ], | |
119 | 132 | features=features, |
120 | 133 | install_requires=[ |
121 | 'six>=1.4.0', | |
122 | 134 | ], |
123 | 135 | **extra |
124 | 136 | ) |
1 | 1 | import pytest |
2 | 2 | |
3 | 3 | logbook.StderrHandler().push_application() |
4 | ||
4 | 5 | |
5 | 6 | @pytest.fixture |
6 | 7 | def logger(): |
90 | 91 | |
91 | 92 | try: |
92 | 93 | import gevent |
93 | ||
94 | except ImportError: | |
95 | pass | |
96 | else: | |
94 | 97 | @pytest.fixture(scope="module", autouse=True, params=[False, True]) |
95 | 98 | def gevent(request): |
96 | 99 | module_name = getattr(request.module, '__name__', '') |
97 | if not any(s in module_name for s in ('queues', 'processors')) and request.param: | |
100 | if (not any(s in module_name for s in ('queues', 'processors')) | |
101 | and request.param): | |
98 | 102 | from logbook.concurrency import enable_gevent, _disable_gevent |
99 | 103 | enable_gevent() |
100 | 104 | |
101 | 105 | @request.addfinalizer |
102 | 106 | def fin(): |
103 | 107 | _disable_gevent() |
104 | except ImportError: | |
105 | pass |
27 | 27 | def test_deadlock_in_emit(): |
28 | 28 | logbook_logger = logbook.Logger("logbook") |
29 | 29 | obj = MyObject(logbook_logger.info) |
30 | stream_handler = logbook.StreamHandler(stream=sys.stderr, level=logbook.DEBUG) | |
30 | stream_handler = logbook.StreamHandler(stream=sys.stderr, | |
31 | level=logbook.DEBUG) | |
31 | 32 | stream_handler.lock = FakeLock() |
32 | 33 | with stream_handler.applicationbound(): |
33 | 34 | logbook_logger.info("format this: {}", obj) |
8 | 8 | |
9 | 9 | |
10 | 10 | def test_file_handler(logfile, activation_strategy, logger): |
11 | handler = logbook.FileHandler(logfile, | |
12 | format_string='{record.level_name}:{record.channel}:' | |
13 | '{record.message}',) | |
11 | handler = logbook.FileHandler( | |
12 | logfile, | |
13 | format_string='{record.level_name}:{record.channel}:{record.message}',) | |
14 | 14 | with activation_strategy(handler): |
15 | 15 | logger.warn('warning message') |
16 | 16 | handler.close() |
26 | 26 | |
27 | 27 | |
28 | 28 | def test_file_handler_delay(logfile, activation_strategy, logger): |
29 | handler = logbook.FileHandler(logfile, | |
30 | format_string='{record.level_name}:{record.channel}:' | |
31 | '{record.message}', delay=True) | |
29 | handler = logbook.FileHandler( | |
30 | logfile, | |
31 | format_string='{record.level_name}:{record.channel}:{record.message}', | |
32 | delay=True) | |
32 | 33 | assert (not os.path.isfile(logfile)) |
33 | 34 | with activation_strategy(handler): |
34 | 35 | logger.warn('warning message') |
40 | 41 | |
41 | 42 | def test_monitoring_file_handler(logfile, activation_strategy, logger): |
42 | 43 | if os.name == 'nt': |
43 | pytest.skip('unsupported on windows due to different IO (also unneeded)') | |
44 | handler = logbook.MonitoringFileHandler(logfile, | |
45 | format_string='{record.level_name}:{record.channel}:' | |
46 | '{record.message}', delay=True) | |
44 | pytest.skip( | |
45 | 'unsupported on windows due to different IO (also unneeded)') | |
46 | handler = logbook.MonitoringFileHandler( | |
47 | logfile, | |
48 | format_string='{record.level_name}:{record.channel}:{record.message}', | |
49 | delay=True) | |
47 | 50 | with activation_strategy(handler): |
48 | 51 | logger.warn('warning message') |
49 | 52 | os.rename(logfile, logfile + '.old') |
79 | 82 | if x.startswith(basename)] |
80 | 83 | files.sort() |
81 | 84 | |
82 |