Package list logbook / c52b3a1
Merge tag 'upstream/0.12.3' Upstream version 0.12.3 Agustin Henze 5 years ago
64 changed file(s) with 1733 addition(s) and 581 deletion(s). Raw diff Collapse all Expand all
0 IF DEFINED CYBUILD (
1 %WITH_COMPILER% python setup.py bdist_wheel
2 IF "%APPVEYOR_REPO_TAG%"=="true" (
3 twine upload -u %PYPI_USERNAME% -p %PYPI_PASSWORD% dist\*.whl
4 )
5 )
0 # Sample script to install Python and pip under Windows
1 # Authors: Olivier Grisel, Jonathan Helmus, Kyle Kastner, and Alex Willmer
2 # License: CC0 1.0 Universal: http://creativecommons.org/publicdomain/zero/1.0/
3
4 $MINICONDA_URL = "http://repo.continuum.io/miniconda/"
5 $BASE_URL = "https://www.python.org/ftp/python/"
6 $GET_PIP_URL = "https://bootstrap.pypa.io/get-pip.py"
7 $GET_PIP_PATH = "C:\get-pip.py"
8
9 $PYTHON_PRERELEASE_REGEX = @"
10 (?x)
11 (?<major>\d+)
12 \.
13 (?<minor>\d+)
14 \.
15 (?<micro>\d+)
16 (?<prerelease>[a-z]{1,2}\d+)
17 "@
18
19
20 function Download ($filename, $url) {
21 $webclient = New-Object System.Net.WebClient
22
23 $basedir = $pwd.Path + "\"
24 $filepath = $basedir + $filename
25 if (Test-Path $filename) {
26 Write-Host "Reusing" $filepath
27 return $filepath
28 }
29
30 # Download and retry up to 3 times in case of network transient errors.
31 Write-Host "Downloading" $filename "from" $url
32 $retry_attempts = 2
33 for ($i = 0; $i -lt $retry_attempts; $i++) {
34 try {
35 $webclient.DownloadFile($url, $filepath)
36 break
37 }
38 Catch [Exception]{
39 Start-Sleep 1
40 }
41 }
42 if (Test-Path $filepath) {
43 Write-Host "File saved at" $filepath
44 } else {
45 # Retry once to get the error message if any at the last try
46 $webclient.DownloadFile($url, $filepath)
47 }
48 return $filepath
49 }
50
51
52 function ParsePythonVersion ($python_version) {
53 if ($python_version -match $PYTHON_PRERELEASE_REGEX) {
54 return ([int]$matches.major, [int]$matches.minor, [int]$matches.micro,
55 $matches.prerelease)
56 }
57 $version_obj = [version]$python_version
58 return ($version_obj.major, $version_obj.minor, $version_obj.build, "")
59 }
60
61
62 function DownloadPython ($python_version, $platform_suffix) {
63 $major, $minor, $micro, $prerelease = ParsePythonVersion $python_version
64
65 if (($major -le 2 -and $micro -eq 0) `
66 -or ($major -eq 3 -and $minor -le 2 -and $micro -eq 0) `
67 ) {
68 $dir = "$major.$minor"
69 $python_version = "$major.$minor$prerelease"
70 } else {
71 $dir = "$major.$minor.$micro"
72 }
73
74 if ($prerelease) {
75 if (($major -le 2) `
76 -or ($major -eq 3 -and $minor -eq 1) `
77 -or ($major -eq 3 -and $minor -eq 2) `
78 -or ($major -eq 3 -and $minor -eq 3) `
79 ) {
80 $dir = "$dir/prev"
81 }
82 }
83
84 if (($major -le 2) -or ($major -le 3 -and $minor -le 4)) {
85 $ext = "msi"
86 if ($platform_suffix) {
87 $platform_suffix = ".$platform_suffix"
88 }
89 } else {
90 $ext = "exe"
91 if ($platform_suffix) {
92 $platform_suffix = "-$platform_suffix"
93 }
94 }
95
96 $filename = "python-$python_version$platform_suffix.$ext"
97 $url = "$BASE_URL$dir/$filename"
98 $filepath = Download $filename $url
99 return $filepath
100 }
101
102
103 function InstallPython ($python_version, $architecture, $python_home) {
104 Write-Host "Installing Python" $python_version "for" $architecture "bit architecture to" $python_home
105 if (Test-Path $python_home) {
106 Write-Host $python_home "already exists, skipping."
107 return $false
108 }
109 if ($architecture -eq "32") {
110 $platform_suffix = ""
111 } else {
112 $platform_suffix = "amd64"
113 }
114 $installer_path = DownloadPython $python_version $platform_suffix
115 $installer_ext = [System.IO.Path]::GetExtension($installer_path)
116 Write-Host "Installing $installer_path to $python_home"
117 $install_log = $python_home + ".log"
118 if ($installer_ext -eq '.msi') {
119 InstallPythonMSI $installer_path $python_home $install_log
120 } else {
121 InstallPythonEXE $installer_path $python_home $install_log
122 }
123 if (Test-Path $python_home) {
124 Write-Host "Python $python_version ($architecture) installation complete"
125 } else {
126 Write-Host "Failed to install Python in $python_home"
127 Get-Content -Path $install_log
128 Exit 1
129 }
130 }
131
132
133 function InstallPythonEXE ($exepath, $python_home, $install_log) {
134 $install_args = "/quiet InstallAllUsers=1 TargetDir=$python_home"
135 RunCommand $exepath $install_args
136 }
137
138
139 function InstallPythonMSI ($msipath, $python_home, $install_log) {
140 $install_args = "/qn /log $install_log /i $msipath TARGETDIR=$python_home"
141 $uninstall_args = "/qn /x $msipath"
142 RunCommand "msiexec.exe" $install_args
143 if (-not(Test-Path $python_home)) {
144 Write-Host "Python seems to be installed else-where, reinstalling."
145 RunCommand "msiexec.exe" $uninstall_args
146 RunCommand "msiexec.exe" $install_args
147 }
148 }
149
150 function RunCommand ($command, $command_args) {
151 Write-Host $command $command_args
152 Start-Process -FilePath $command -ArgumentList $command_args -Wait -Passthru
153 }
154
155
156 function InstallPip ($python_home) {
157 $pip_path = $python_home + "\Scripts\pip.exe"
158 $python_path = $python_home + "\python.exe"
159 if (-not(Test-Path $pip_path)) {
160 Write-Host "Installing pip..."
161 $webclient = New-Object System.Net.WebClient
162 $webclient.DownloadFile($GET_PIP_URL, $GET_PIP_PATH)
163 Write-Host "Executing:" $python_path $GET_PIP_PATH
164 & $python_path $GET_PIP_PATH
165 } else {
166 Write-Host "pip already installed."
167 }
168 }
169
170
171 function DownloadMiniconda ($python_version, $platform_suffix) {
172 if ($python_version -eq "3.4") {
173 $filename = "Miniconda3-3.5.5-Windows-" + $platform_suffix + ".exe"
174 } else {
175 $filename = "Miniconda-3.5.5-Windows-" + $platform_suffix + ".exe"
176 }
177 $url = $MINICONDA_URL + $filename
178 $filepath = Download $filename $url
179 return $filepath
180 }
181
182
183 function InstallMiniconda ($python_version, $architecture, $python_home) {
184 Write-Host "Installing Python" $python_version "for" $architecture "bit architecture to" $python_home
185 if (Test-Path $python_home) {
186 Write-Host $python_home "already exists, skipping."
187 return $false
188 }
189 if ($architecture -eq "32") {
190 $platform_suffix = "x86"
191 } else {
192 $platform_suffix = "x86_64"
193 }
194 $filepath = DownloadMiniconda $python_version $platform_suffix
195 Write-Host "Installing" $filepath "to" $python_home
196 $install_log = $python_home + ".log"
197 $args = "/S /D=$python_home"
198 Write-Host $filepath $args
199 Start-Process -FilePath $filepath -ArgumentList $args -Wait -Passthru
200 if (Test-Path $python_home) {
201 Write-Host "Python $python_version ($architecture) installation complete"
202 } else {
203 Write-Host "Failed to install Python in $python_home"
204 Get-Content -Path $install_log
205 Exit 1
206 }
207 }
208
209
210 function InstallMinicondaPip ($python_home) {
211 $pip_path = $python_home + "\Scripts\pip.exe"
212 $conda_path = $python_home + "\Scripts\conda.exe"
213 if (-not(Test-Path $pip_path)) {
214 Write-Host "Installing pip..."
215 $args = "install --yes pip"
216 Write-Host $conda_path $args
217 Start-Process -FilePath "$conda_path" -ArgumentList $args -Wait -Passthru
218 } else {
219 Write-Host "pip already installed."
220 }
221 }
222
223 function main () {
224 InstallPython $env:PYTHON_VERSION $env:PYTHON_ARCH $env:PYTHON
225 InstallPip $env:PYTHON
226 }
227
228 main
0 pip install wheel
1 nuget install redis-64 -excludeversion
2 redis-64\redis-server.exe --service-install
3 redis-64\redis-server.exe --service-start
4 nuget install ZeroMQ
5 %WITH_COMPILER% pip install cython pyzmq
6 python scripts\test_setup.py
7 python setup.py develop
8 IF DEFINED CYBUILD (
9 cython logbook\_speedups.pyx
10 %WITH_COMPILER% python setup.py build
11 pip install twine
12 )
0 :: To build extensions for 64 bit Python 3, we need to configure environment
1 :: variables to use the MSVC 2010 C++ compilers from GRMSDKX_EN_DVD.iso of:
2 :: MS Windows SDK for Windows 7 and .NET Framework 4 (SDK v7.1)
3 ::
4 :: To build extensions for 64 bit Python 2, we need to configure environment
5 :: variables to use the MSVC 2008 C++ compilers from GRMSDKX_EN_DVD.iso of:
6 :: MS Windows SDK for Windows 7 and .NET Framework 3.5 (SDK v7.0)
7 ::
8 :: 32 bit builds, and 64-bit builds for 3.5 and beyond, do not require specific
9 :: environment configurations.
10 ::
11 :: Note: this script needs to be run with the /E:ON and /V:ON flags for the
12 :: cmd interpreter, at least for (SDK v7.0)
13 ::
14 :: More details at:
15 :: https://github.com/cython/cython/wiki/64BitCythonExtensionsOnWindows
16 :: http://stackoverflow.com/a/13751649/163740
17 ::
18 :: Author: Olivier Grisel
19 :: License: CC0 1.0 Universal: http://creativecommons.org/publicdomain/zero/1.0/
20 ::
21 :: Notes about batch files for Python people:
22 ::
23 :: Quotes in values are literally part of the values:
24 :: SET FOO="bar"
25 :: FOO is now five characters long: " b a r "
26 :: If you don't want quotes, don't include them on the right-hand side.
27 ::
28 :: The CALL lines at the end of this file look redundant, but if you move them
29 :: outside of the IF clauses, they do not run properly in the SET_SDK_64==Y
30 :: case, I don't know why.
31 @ECHO OFF
32
33 SET COMMAND_TO_RUN=%*
34 SET WIN_SDK_ROOT=C:\Program Files\Microsoft SDKs\Windows
35 SET WIN_WDK=c:\Program Files (x86)\Windows Kits\10\Include\wdf
36
37 :: Extract the major and minor versions, and allow for the minor version to be
38 :: more than 9. This requires the version number to have two dots in it.
39 SET MAJOR_PYTHON_VERSION=%PYTHON_VERSION:~0,1%
40 IF "%PYTHON_VERSION:~3,1%" == "." (
41 SET MINOR_PYTHON_VERSION=%PYTHON_VERSION:~2,1%
42 ) ELSE (
43 SET MINOR_PYTHON_VERSION=%PYTHON_VERSION:~2,2%
44 )
45
46 :: Based on the Python version, determine what SDK version to use, and whether
47 :: to set the SDK for 64-bit.
48 IF %MAJOR_PYTHON_VERSION% == 2 (
49 SET WINDOWS_SDK_VERSION="v7.0"
50 SET SET_SDK_64=Y
51 ) ELSE (
52 IF %MAJOR_PYTHON_VERSION% == 3 (
53 SET WINDOWS_SDK_VERSION="v7.1"
54 IF %MINOR_PYTHON_VERSION% LEQ 4 (
55 SET SET_SDK_64=Y
56 ) ELSE (
57 SET SET_SDK_64=N
58 IF EXIST "%WIN_WDK%" (
59 :: See: https://connect.microsoft.com/VisualStudio/feedback/details/1610302/
60 REN "%WIN_WDK%" 0wdf
61 )
62 )
63 ) ELSE (
64 ECHO Unsupported Python version: "%MAJOR_PYTHON_VERSION%"
65 EXIT 1
66 )
67 )
68
69 IF %PYTHON_ARCH% == 64 (
70 IF %SET_SDK_64% == Y (
71 ECHO Configuring Windows SDK %WINDOWS_SDK_VERSION% for Python %MAJOR_PYTHON_VERSION% on a 64 bit architecture
72 SET DISTUTILS_USE_SDK=1
73 SET MSSdk=1
74 "%WIN_SDK_ROOT%\%WINDOWS_SDK_VERSION%\Setup\WindowsSdkVer.exe" -q -version:%WINDOWS_SDK_VERSION%
75 "%WIN_SDK_ROOT%\%WINDOWS_SDK_VERSION%\Bin\SetEnv.cmd" /x64 /release
76 ECHO Executing: %COMMAND_TO_RUN%
77 call %COMMAND_TO_RUN% || EXIT 1
78 ) ELSE (
79 ECHO Using default MSVC build environment for 64 bit architecture
80 ECHO Executing: %COMMAND_TO_RUN%
81 call %COMMAND_TO_RUN% || EXIT 1
82 )
83 ) ELSE (
84 ECHO Using default MSVC build environment for 32 bit architecture
85 ECHO Executing: %COMMAND_TO_RUN%
86 call %COMMAND_TO_RUN% || EXIT 1
87 )
1212 build
1313 .vagrant
1414 flycheck-*
15 .cache
00 language: python
1
21 services:
3 - redis-server
4
2 - redis-server
53 python:
6 - "2.6"
7 - "2.7"
8 - "3.2"
9 - "3.3"
10 - "3.4"
11 - "pypy"
12 - "pypy3"
13
4 - '2.6'
5 - '2.7'
6 - '3.2'
7 - '3.3'
8 - '3.4'
9 - '3.5'
10 - pypy
11 - pypy3
1412 install:
15 # this fixes SemLock issues on travis
16 - "sudo rm -rf /dev/shm && sudo ln -s /run/shm /dev/shm"
17 - "sudo apt-add-repository -y ppa:chris-lea/zeromq"
18 - "sudo apt-get update"
19 - "sudo apt-get install -y libzmq3-dev"
20 - "pip install cython redis"
21 - "easy_install pyzmq"
22 - "make test_setup"
23 - "python setup.py develop"
24
13 - sudo rm -rf /dev/shm && sudo ln -s /run/shm /dev/shm
14 - sudo apt-add-repository -y ppa:chris-lea/zeromq
15 - sudo apt-get update
16 - sudo apt-get install -y libzmq3-dev
17 - pip install cython redis
18 - easy_install pyzmq
19 - make test_setup
20 - python setup.py develop
2521 env:
26 - COMMAND="make test"
27 - COMMAND="make cybuild test"
28
29 script: "$COMMAND"
30
22 - COMMAND="make test"
23 - COMMAND="make cybuild test"
24 script: $COMMAND
3125 matrix:
3226 exclude:
33 - python: "pypy"
34 env: COMMAND="make cybuild test"
35 - python: "pypy3"
36 env: COMMAND="make cybuild test"
37
27 - python: pypy
28 env: COMMAND="make cybuild test"
29 - python: pypy3
30 env: COMMAND="make cybuild test"
3831 notifications:
39 email:
32 email:
4033 recipients:
41 - vmalloc@gmail.com
34 - vmalloc@gmail.com
4235 irc:
4336 channels:
44 - "chat.freenode.net#pocoo"
37 - chat.freenode.net#pocoo
4538 on_success: change
4639 on_failure: always
4740 use_notice: true
4841 skip_join: true
42 before_deploy:
43 - make logbook/_speedups.so
44 deploy:
45 provider: pypi
46 user: vmalloc
47 password:
48 secure: WFmuAbtBDIkeZArIFQRCwyO1TdvF2PaZpo75r3mFgnY+aWm75cdgjZKoNqVprF/f+v9EsX2kDdQ7ZfuhMLgP8MNziB+ty7579ZDGwh64jGoi+DIoeblAFu5xNAqjvhie540uCE8KySk9s+Pq5EpOA5w18V4zxTw+h6tnBQ0M9cQ=
49 on:
50 tags: true
51 repo: getlogbook/logbook
52 distributions: "sdist bdist_egg"
1414 - Roman Valls Guimera
1515 - Guillermo Carrasco Hernández
1616 - Raphaël Vinot
17 - Rotem Yaari
18 - Frazer McLean
19
11 =================
22
33 Here you can see the full list of changes between each Logbook release.
4
5 Version 0.12.0
6 --------------
7
8 Released on November 24th 2015
9
10 - Added logbook.utils.deprecated to automatically emit warnings when certain functions are called (Thanks Ayala Shachar)
11 - Added logbook.utils.suppressed_deprecations context to temporarily suppress deprecations (Thanks Ayala Shachar)
12 - Added logbook.utils.logged_if_slow_context to emit logs when certain operations exceed a time threshold (Thanks Ayala Shachar)
13 - Many PEP8 fixes and code cleanups (thanks Taranjeet Singh and Frazer McLean)
14 - TestHandler constructor now receives an optional `force_heavy_init=True`, forcing all records to heavy-initialize
15
16
17 Version 0.11.3
18 --------------
19
20 Released on November 5th 2015
21
22 - Windows-specific fixes and CI configuration (Thanks Frazer McLean)
23 - Several Unicode-specific fixes (Thanks Frazer McLean)
24 - Documentation cleanups
25
26 Version 0.11.2
27 --------------
28
29 Released on September 29th 2015
30
31 - Fix importing issue with SQLAlchemy ticketing handler
32
33 Version 0.11.0
34 --------------
35
36 Released on September 29th 2015
37
38 - Added TRACE log level for enabling logs below DEBUG
39 - Improved SQLAlchemy session management (thanks @fintan)
40 - Removed the ``bubble`` argument from NullHandler, preventing many future confusions
41
42 Version 0.10.1
43 --------------
44
45 Released on August 4th 2015
46
47 - Small bugfix supporting exc_info=False without breaking formatting
448
549 Version 0.10.0
650 --------------
88 @python scripts/test_setup.py
99
1010 test:
11 @py.test tests
11 @py.test -r s tests
1212
1313 toxtest:
1414 @tox
2020 bench:
2121 @python benchmark/run.py
2222
23 upload-docs: docs
24 python setup.py upload_docs
25
2623 docs:
2724 make -C docs html SPHINXOPTS=-Aonline=1
2825
29 release: logbook/_speedups.so upload-docs
26 release: logbook/_speedups.so
3027 python scripts/make-release.py
3128
3229 logbook/_speedups.so: logbook/_speedups.pyx
0 # Welcome to Logbook
1
2 | | |
3 |--------------------|-----------------------------|
4 | Travis | [![Build Status][ti]][tl] |
5 | AppVeyor | [![Build Status][ai]][al] |
6 | Supported Versions | ![Supported Versions][vi] |
7 | Downloads | ![Downloads][di] |
8 | Latest Version | [![Latest Version][pi]][pl] |
9
10
11 Logbook is a nice logging replacement.
12
13 It should be easy to setup, use and configure and support web applications :)
14
15 For more information: http://logbook.readthedocs.org
16
17 [ti]: https://secure.travis-ci.org/getlogbook/logbook.svg?branch=master
18 [tl]: https://travis-ci.org/getlogbook/logbook
19 [ai]: https://ci.appveyor.com/api/projects/status/quu99exa26e06npp?svg=true
20 [vi]: https://img.shields.io/pypi/pyversions/logbook.svg
21 [di]: https://img.shields.io/pypi/dm/logbook.svg
22 [al]: https://ci.appveyor.com/project/vmalloc/logbook
23 [pi]: https://img.shields.io/pypi/v/logbook.svg
24 [pl]: https://pypi.python.org/pypi/Logbook
+0
-17
README.rst less more
0 Welcome to Logbook
1 ==================
2
3 .. image:: https://secure.travis-ci.org/mitsuhiko/logbook.png
4 :target: https://travis-ci.org/mitsuhiko/logbook
5
6 .. image:: https://pypip.in/d/Logbook/badge.png
7 :target: https://crate.io/packages/Logbook
8
9 .. image:: https://pypip.in/v/Logbook/badge.png
10 :target: https://crate.io/packages/Logbook
11
12 Logbook is a nice logging replacement.
13
14 It should be easy to setup, use and configure and support web applications :)
15
16 For more information look at http://pythonhosted.org/Logbook
0 cache:
1 - C:\Users\appveyor\AppData\Local\pip\Cache\wheels
2
3 environment:
4 global:
5 # SDK v7.0 MSVC Express 2008's SetEnv.cmd script will fail if the
6 # /E:ON and /V:ON options are not enabled in the batch script intepreter
7 # See: http://stackoverflow.com/a/13751649/163740
8 WITH_COMPILER: "cmd /E:ON /V:ON /C .\\.appveyor\\run_with_compiler.cmd"
9 PYPI_USERNAME:
10 secure: ixvjwUN/HsSfGkU3OvtQ8Q==
11 PYPI_PASSWORD:
12 secure: KOr+oEHZJmo1el3bT+ivmQ==
13
14 matrix:
15 # Python 2.6.6 is the latest Python 2.6 with a Windows installer
16 # See: https://github.com/ogrisel/python-appveyor-demo/issues/10
17
18 - PYTHON: "C:\\Python266"
19 PYTHON_VERSION: "2.6.6"
20 PYTHON_ARCH: "32"
21
22 - PYTHON: "C:\\Python266"
23 PYTHON_VERSION: "2.6.6"
24 PYTHON_ARCH: "32"
25 CYBUILD: "TRUE"
26
27 - PYTHON: "C:\\Python266-x64"
28 PYTHON_VERSION: "2.6.6"
29 PYTHON_ARCH: "64"
30
31 - PYTHON: "C:\\Python266-x64"
32 PYTHON_VERSION: "2.6.6"
33 PYTHON_ARCH: "64"
34 CYBUILD: "TRUE"
35
36 # Pre-installed Python versions, which Appveyor may upgrade to
37 # a later point release.
38 # See: http://www.appveyor.com/docs/installed-software#python
39
40 - PYTHON: "C:\\Python27"
41 PYTHON_VERSION: "2.7.x"
42 PYTHON_ARCH: "32"
43
44 - PYTHON: "C:\\Python27"
45 PYTHON_VERSION: "2.7.x"
46 PYTHON_ARCH: "32"
47 CYBUILD: "TRUE"
48
49 - PYTHON: "C:\\Python27-x64"
50 PYTHON_VERSION: "2.7.x"
51 PYTHON_ARCH: "64"
52
53 - PYTHON: "C:\\Python27-x64"
54 PYTHON_VERSION: "2.7.x"
55 PYTHON_ARCH: "64"
56 CYBUILD: "TRUE"
57
58 # Python 3.2 isn't preinstalled
59
60 - PYTHON: "C:\\Python325"
61 PYTHON_VERSION: "3.2.5"
62 PYTHON_ARCH: "32"
63
64 - PYTHON: "C:\\Python325"
65 PYTHON_VERSION: "3.2.5"
66 PYTHON_ARCH: "32"
67 CYBUILD: "TRUE"
68
69 - PYTHON: "C:\\Python325-x64"
70 PYTHON_VERSION: "3.2.5"
71 PYTHON_ARCH: "64"
72
73 - PYTHON: "C:\\Python325-x64"
74 PYTHON_VERSION: "3.2.5"
75 PYTHON_ARCH: "64"
76 CYBUILD: "TRUE"
77
78 # Pre-installed Python versions, which Appveyor may upgrade to
79 # a later point release.
80 # See: http://www.appveyor.com/docs/installed-software#python
81
82 - PYTHON: "C:\\Python33"
83 PYTHON_VERSION: "3.3.x"
84 PYTHON_ARCH: "32"
85
86 - PYTHON: "C:\\Python33"
87 PYTHON_VERSION: "3.3.x"
88 PYTHON_ARCH: "32"
89 CYBUILD: "TRUE"
90
91 - PYTHON: "C:\\Python33-x64"
92 PYTHON_VERSION: "3.3.x"
93 PYTHON_ARCH: "64"
94
95 - PYTHON: "C:\\Python33-x64"
96 PYTHON_VERSION: "3.3.x"
97 PYTHON_ARCH: "64"
98 CYBUILD: "TRUE"
99
100 - PYTHON: "C:\\Python34"
101 PYTHON_VERSION: "3.4.x"
102 PYTHON_ARCH: "32"
103
104 - PYTHON: "C:\\Python34"
105 PYTHON_VERSION: "3.4.x"
106 PYTHON_ARCH: "32"
107 CYBUILD: "TRUE"
108
109 - PYTHON: "C:\\Python34-x64"
110 PYTHON_VERSION: "3.4.x"
111 PYTHON_ARCH: "64"
112
113 - PYTHON: "C:\\Python34-x64"
114 PYTHON_VERSION: "3.4.x"
115 PYTHON_ARCH: "64"
116 CYBUILD: "TRUE"
117
118 - PYTHON: "C:\\Python35"
119 PYTHON_VERSION: "3.5.x"
120 PYTHON_ARCH: "32"
121
122 - PYTHON: "C:\\Python35"
123 PYTHON_VERSION: "3.5.x"
124 PYTHON_ARCH: "32"
125 CYBUILD: "TRUE"
126
127 - PYTHON: "C:\\Python35-x64"
128 PYTHON_VERSION: "3.5.x"
129 PYTHON_ARCH: "64"
130
131 - PYTHON: "C:\\Python35-x64"
132 PYTHON_VERSION: "3.5.x"
133 PYTHON_ARCH: "64"
134 CYBUILD: "TRUE"
135
136
137 init:
138 - echo %PYTHON% %PYTHON_VERSION% %PYTHON_ARCH%
139 - set PATH=%PYTHON%;%PYTHON%\Scripts;%PATH%
140
141 install:
142 - powershell .appveyor\\install.ps1
143 - ".appveyor\\prepare.bat"
144 - ps: if (Test-Path Env:\CYBUILD) {Copy-Item build\*\logbook\*.pyd logbook\}
145
146 build: off
147
148 test_script:
149 - py.test -r s tests
150
151 after_test:
152 - ".appveyor\\after_test.bat"
153
154 artifacts:
155 # Archive the generated packages in the ci.appveyor.com build report.
156 - path: dist\*
157
158 deploy:
159 description: ''
160 provider: GitHub
161 auth_token:
162 secure: 0yLUo/V+wwSvSFk9nBW/77RN9iTjJA1B5p/TM1XgVLPPFEZWkH756jyJ0FOmtJPt
163 artifact: /.*\.whl/
164 draft: true
165 prerelease: false
166 on:
167 appveyor_repo_tag: true
00 """Tests basic stack manipulation performance"""
11 from logbook import Handler, NullHandler, StreamHandler, FileHandler, \
2 ERROR, WARNING
2 ERROR, WARNING
33 from tempfile import NamedTemporaryFile
44 from cStringIO import StringIO
55
3939
4040 def bench_wrapper(use_gevent=False):
4141 print '=' * 80
42 print 'Running benchmark with Logbook %s (gevent enabled=%s)' % (version, use_gevent)
42 print 'Running benchmark with Logbook %s (gevent enabled=%s)' % \
43 (version, use_gevent)
4344 print '-' * 80
4445 os.chdir(bench_directory)
4546 for bench in list_benchmarks():
1717 :members:
1818 :inherited-members:
1919
20 Redis
21 -----
20 AMQP Message Queues
21 -------------------
2222
23 .. autoclass:: RedisHandler
23 .. autoclass:: MessageQueueHandler
24 :members:
25
26 .. autoclass:: MessageQueueSubscriber
2427 :members:
2528
2629 MultiProcessing
00 Utilities
11 =========
2
3 Misc. Utilities
4 ---------------
25
36 This documents general purpose utility functions available in Logbook.
47
2528 .. autofunction:: log
2629
2730 .. autofunction:: set_datetime_format
31
32 Slow Operations Logging
33 -----------------------
34
35 .. module:: logbook.utils
36 .. autofunction:: logged_if_slow_context
37
38
39 Deprecations
40 ------------
41
42 .. autofunction:: deprecated
43
44 .. autofunction:: suppressed_deprecations
45
1818
1919 This also means you don't have to call :func:`logging.basicConfig`:
2020
21 >>> from logbook.compat import redirect_logging
21 >>> from logbook.compat import redirect_logging, StreamHandler
22 >>> import sys
23 >>> StreamHandler(sys.stdout).push_application()
2224 >>> redirect_logging()
2325 >>> from logging import getLogger
2426 >>> log = getLogger('My Logger')
2527 >>> log.warn('This is a warning')
26 [2010-07-25 00:24] WARNING: My Logger: This is a warning
28 [2015-10-05 19:13:37.524346] WARNING: My Logger: This is a warning
2729
2830 Advanced Setup
2931 --------------
3436 active logbook handlers. This handler can then be added to specific logging
3537 loggers if you want:
3638
37 >>> from logging import getLogger
39 >>> from logging import getLogger, StreamHandler
40 >>> import sys
41 >>> StreamHandler(sys.stdout).push_application()
3842 >>> mylog = getLogger('My Log')
3943 >>> from logbook.compat import RedirectLoggingHandler
4044 >>> mylog.addHandler(RedirectLoggingHandler())
4246 >>> otherlog.warn('logging is deprecated')
4347 No handlers could be found for logger "Other Log"
4448 >>> mylog.warn('but logbook is awesome')
45 [2010-07-25 00:29] WARNING: My Log: but logbook is awesome
49 [2015-10-05 19:13:37.524346] WARNING: My Log: but logbook is awesome
4650
4751 Reverse Redirects
4852 -----------------
22 # Logbook documentation build configuration file, created by
33 # sphinx-quickstart on Fri Jul 23 16:54:49 2010.
44 #
5 # This file is execfile()d with the current directory set to its containing dir.
5 # This file is execfile()d with the current directory set to its containing
6 # dir.
67 #
78 # Note that not all possible configuration values are present in this
89 # autogenerated file.
1011 # All configuration values have a default; values that are commented out
1112 # serve to show the default.
1213
13 import sys, os
14 import sys
15 import os
1416
1517 # If extensions (or modules to document with autodoc) are in another directory,
1618 # add these directories to sys.path here. If the directory is relative to the
1719 # documentation root, use os.path.abspath to make it absolute, like shown here.
1820 sys.path.extend((os.path.abspath('.'), os.path.abspath('..')))
1921
20 # -- General configuration -----------------------------------------------------
22 # -- General configuration ----------------------------------------------------
2123
2224 # If your documentation needs a minimal Sphinx version, state it here.
23 #needs_sphinx = '1.0'
24
25 # Add any Sphinx extension module names here, as strings. They can be extensions
26 # coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
25 # needs_sphinx = '1.0'
26
27 # Add any Sphinx extension module names here, as strings. They can be
28 # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
2729 extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx']
2830
2931 # Add any paths that contain templates here, relative to this directory.
3335 source_suffix = '.rst'
3436
3537 # The encoding of source files.
36 #source_encoding = 'utf-8-sig'
38 # source_encoding = 'utf-8-sig'
3739
3840 # The master toctree document.
3941 master_doc = 'index'
4749 # built documents.
4850 #
4951 # The short X.Y version.
50 version = '0.10.0'
51 # The full version, including alpha/beta/rc tags.
52 release = '0.10.0'
52 with open(os.path.join(os.path.dirname(__file__), "..", "logbook", "__version__.py")) as version_file:
53 # can't use import here...
54 version = release = version_file.read().strip().split("=")[1].strip()[1:-1]
5355
5456 # The language for content autogenerated by Sphinx. Refer to documentation
5557 # for a list of supported languages.
56 #language = None
58 # language = None
5759
5860 # There are two options for replacing |today|: either, you set today to some
5961 # non-false value, then it is used:
60 #today = ''
62 # today = ''
6163 # Else, today_fmt is used as the format for a strftime call.
62 #today_fmt = '%B %d, %Y'
64 # today_fmt = '%B %d, %Y'
6365
6466 # List of patterns, relative to source directory, that match files and
6567 # directories to ignore when looking for source files.
6668 exclude_patterns = ['_build']
6769
68 # The reST default role (used for this markup: `text`) to use for all documents.
69 #default_role = None
70 # The reST default role (used for this markup: `text`) to use for all
71 # documents.
72 # default_role = None
7073
7174 # If true, '()' will be appended to :func: etc. cross-reference text.
72 #add_function_parentheses = True
75 # add_function_parentheses = True
7376
7477 # If true, the current module name will be prepended to all description
7578 # unit titles (such as .. function::).
76 #add_module_names = True
79 # add_module_names = True
7780
7881 # If true, sectionauthor and moduleauthor directives will be shown in the
7982 # output. They are ignored by default.
80 #show_authors = False
83 # show_authors = False
8184
8285 # The name of the Pygments (syntax highlighting) style to use.
8386 pygments_style = 'sphinx'
8487
8588 # A list of ignored prefixes for module index sorting.
86 #modindex_common_prefix = []
87
88
89 # -- Options for HTML output ---------------------------------------------------
89 # modindex_common_prefix = []
90
91
92 # -- Options for HTML output --------------------------------------------------
9093
9194 # The theme to use for HTML and HTML Help pages. See the documentation for
9295 # a list of builtin themes.
111114
112115 # The name of an image file (relative to this directory) to place at the top
113116 # of the sidebar.
114 #html_logo = None
117 # html_logo = None
115118
116119 # The name of an image file (within the static path) to use as favicon of the
117120 # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
118121 # pixels large.
119 #html_favicon = None
122 # html_favicon = None
120123
121124 # Add any paths that contain custom static files (such as style sheets) here,
122125 # relative to this directory. They are copied after the builtin static files,
123126 # so a file named "default.css" will overwrite the builtin "default.css".
124 #html_static_path = ['_static']
127 # html_static_path = ['_static']
125128
126129 # If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
127130 # using the given strftime format.
128 #html_last_updated_fmt = '%b %d, %Y'
131 # html_last_updated_fmt = '%b %d, %Y'
129132
130133 # If true, SmartyPants will be used to convert quotes and dashes to
131134 # typographically correct entities.
132 #html_use_smartypants = True
135 # html_use_smartypants = True
133136
134137 # Custom sidebar templates, maps document names to template names.
135 #html_sidebars = {}
138 # html_sidebars = {}
136139
137140 # Additional templates that should be rendered to pages, maps page names to
138141 # template names.
139 #html_additional_pages = {}
142 # html_additional_pages = {}
140143
141144 # If false, no module index is generated.
142 #html_domain_indices = True
145 # html_domain_indices = True
143146
144147 # If false, no index is generated.
145 #html_use_index = True
148 # html_use_index = True
146149
147150 # If true, the index is split into individual pages for each letter.
148 #html_split_index = False
151 # html_split_index = False
149152
150153 # If true, links to the reST sources are added to the pages.
151 #html_show_sourcelink = True
154 # html_show_sourcelink = True
152155
153156 # If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
154 #html_show_sphinx = True
157 # html_show_sphinx = True
155158
156159 # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
157 #html_show_copyright = True
160 # html_show_copyright = True
158161
159162 html_add_permalinks = False
160163
161164 # If true, an OpenSearch description file will be output, and all pages will
162165 # contain a <link> tag referring to it. The value of this option must be the
163166 # base URL from which the finished HTML is served.
164 #html_use_opensearch = ''
167 # html_use_opensearch = ''
165168
166169 # If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
167 #html_file_suffix = ''
170 # html_file_suffix = ''
168171
169172 # Output file base name for HTML help builder.
170173 htmlhelp_basename = 'Logbookdoc'
171174
172175
173 # -- Options for LaTeX output --------------------------------------------------
176 # -- Options for LaTeX output -------------------------------------------------
174177
175178 # The paper size ('letter' or 'a4').
176 #latex_paper_size = 'letter'
179 # latex_paper_size = 'letter'
177180
178181 # The font size ('10pt', '11pt' or '12pt').
179 #latex_font_size = '10pt'
182 # latex_font_size = '10pt'
180183
181184 # Grouping the document tree into LaTeX files. List of tuples
182 # (source start file, target name, title, author, documentclass [howto/manual]).
185 # (source start file, target name, title, author,
186 # documentclass [howto/manual]).
183187 latex_documents = [
184 ('index', 'Logbook.tex', u'Logbook Documentation',
185 u'Armin Ronacher, Georg Brandl', 'manual'),
188 ('index', 'Logbook.tex', u'Logbook Documentation',
189 u'Armin Ronacher, Georg Brandl', 'manual'),
186190 ]
187191
188192 # The name of an image file (relative to this directory) to place at the top of
189193 # the title page.
190 #latex_logo = None
194 # latex_logo = None
191195
192196 # For "manual" documents, if this is true, then toplevel headings are parts,
193197 # not chapters.
194 #latex_use_parts = False
198 # latex_use_parts = False
195199
196200 # If true, show page references after internal links.
197 #latex_show_pagerefs = False
201 # latex_show_pagerefs = False
198202
199203 # If true, show URL addresses after external links.
200 #latex_show_urls = False
204 # latex_show_urls = False
201205
202206 # Additional stuff for the LaTeX preamble.
203 #latex_preamble = ''
207 # latex_preamble = ''
204208
205209 # Documents to append as an appendix to all manuals.
206 #latex_appendices = []
210 # latex_appendices = []
207211
208212 # If false, no module index is generated.
209 #latex_domain_indices = True
210
211
212 # -- Options for manual page output --------------------------------------------
213 # latex_domain_indices = True
214
215
216 # -- Options for manual page output -------------------------------------------
213217
214218 # One entry per manual page. List of tuples
215219 # (source start file, name, description, authors, manual section).
5050 default dispatching can be triggered from a function
5151 :func:`~logbook.base.dispatch_record`:
5252
53 >>> from logbook import dispatch_record, LogRecord, INFO
53 >>> from logbook import dispatch_record, LogRecord, INFO, StreamHandler
54 >>> import sys
5455 >>> record = LogRecord('My channel', INFO, 'Hello World!')
5556 >>> dispatch_record(record)
56 [2010-09-04 15:56] INFO: My channel: Hello World!
57 [2015-10-05 19:18:52.211472] INFO: My channel: Hello World!
5758
5859 It is pretty common for log records to be created without a dispatcher.
5960 Here some common use cases for log records without a dispatcher:
106106
107107 This is how easy it is to get started with Logbook::
108108
109 from logbook import warn
109 from logbook import warn, StreamHandler
110 import sys
111 StreamHandler(sys.stdout).push_application()
110112 warn('This is a warning')
111
112 That will use the default logging channel. But you can create as many as
113 you like::
114
115 from logbook import Logger
116 log = Logger('My Logger')
117 log.warn('This is a warning')
118113
119114 Roadmap
120115 -------
00 Welcome to Logbook
11 ==================
22
3 Logbook is a logging sytem for Python that replaces the standard library's
3 Logbook is a logging system for Python that replaces the standard library's
44 logging module. It was designed with both complex and simple applications
55 in mind and the idea to make logging fun:
66
7 >>> from logbook import Logger
7 >>> from logbook import Logger, StreamHandler
8 >>> import sys
9 >>> StreamHandler(sys.stdout).push_application()
810 >>> log = Logger('Logbook')
911 >>> log.info('Hello, World!')
10 [2010-07-23 16:34] INFO: Logbook: Hello, World!
12 [2015-10-05 18:55:56.937141] INFO: Logbook: Hello, World!
1113
1214 What makes it fun? What about getting log messages on your phone or
1315 desktop notification system? :ref:`Logbook can do that <notifiers>`.
4850 * IRC: ``#pocoo`` on freenode
4951
5052 .. _Download from PyPI: http://pypi.python.org/pypi/Logbook
51 .. _Master repository on GitHub: https://github.com/mitsuhiko/logbook
53 .. _Master repository on GitHub: https://github.com/getlogbook/logbook
5254 .. _Mailing list: http://groups.google.com/group/pocoo-libs
55 Logbook makes it very easy to get started with logging. Just import the logger
66 class, create yourself a logger and you are set:
77
8 >>> from logbook import Logger
8 >>> from logbook import Logger, StreamHandler
9 >>> import sys
10 >>> StreamHandler(sys.stdout).push_application()
911 >>> log = Logger('My Awesome Logger')
1012 >>> log.warn('This is too cool for stdlib')
11 [2010-07-23 16:34:42.687111] WARNING: My Awesome Logger: This is too cool for stdlib
13 [2015-10-05 19:02:03.575723] WARNING: My Awesome Logger: This is too cool for stdlib
1214
1315 A logger is a so-called :class:`~logbook.base.RecordDispatcher`, which is
1416 commonly referred to as a "logging channel". The name you give such a channel
108110 error_handler = SyslogHandler('logbook example', level='ERROR', bubble=True)
109111 with error_handler.applicationbound():
110112 # whatever is executed here and an error is logged to the
111 # error handler but it will also bubble up to the default
112 # stderr handler.
113 # error handler but it will also bubble up other handles.
113114 ...
114115
115116 So what if you want to only log errors to the syslog and nothing to
1818
1919 {% block footer %}
2020 {% if online %}
21 <a href="http://github.com/mitsuhiko/logbook">
21 <a href="http://github.com/getlogbook/logbook">
2222 <img style="position: fixed; top: 0; right: 0; border: 0;"
2323 src="http://s3.amazonaws.com/github/ribbons/forkme_right_gray_6d6d6d.png"
2424 alt="Fork me on GitHub">
1010 """
1111
1212 import os
13 from logbook.base import LogRecord, Logger, LoggerGroup, NestedSetup, \
14 Processor, Flags, get_level_name, lookup_level, dispatch_record, \
15 CRITICAL, ERROR, WARNING, NOTICE, INFO, DEBUG, NOTSET, \
16 set_datetime_format
17 from logbook.handlers import Handler, StreamHandler, FileHandler, \
18 MonitoringFileHandler, StderrHandler, RotatingFileHandler, \
19 TimedRotatingFileHandler, TestHandler, MailHandler, GMailHandler, SyslogHandler, \
20 NullHandler, NTEventLogHandler, create_syshandler, StringFormatter, \
21 StringFormatterHandlerMixin, HashingHandlerMixin, \
22 LimitingHandlerMixin, WrapperHandler, FingersCrossedHandler, \
23 GroupHandler
13 from .base import (
14 LogRecord, Logger, LoggerGroup, NestedSetup, Processor, Flags,
15 get_level_name, lookup_level, dispatch_record, CRITICAL, ERROR, WARNING,
16 NOTICE, INFO, DEBUG, TRACE, NOTSET, set_datetime_format)
17 from .handlers import (
18 Handler, StreamHandler, FileHandler, MonitoringFileHandler, StderrHandler,
19 RotatingFileHandler, TimedRotatingFileHandler, TestHandler, MailHandler,
20 GMailHandler, SyslogHandler, NullHandler, NTEventLogHandler,
21 create_syshandler, StringFormatter, StringFormatterHandlerMixin,
22 HashingHandlerMixin, LimitingHandlerMixin, WrapperHandler,
23 FingersCrossedHandler, GroupHandler)
24 from . import compat
2425
25 __version__ = '0.10.0'
26 __version__ = '0.11.4-dev'
2627
2728 # create an anonymous default logger and provide all important
2829 # methods of that logger as global functions
2930 _default_logger = Logger('Generic')
3031 _default_logger.suppress_dispatcher = True
32 trace = _default_logger.trace
3133 debug = _default_logger.debug
3234 info = _default_logger.info
3335 warn = _default_logger.warn
0 __version__ = "0.12.3"
99 """
1010 from itertools import count
1111 from logbook.helpers import get_iterator_next_method
12 from logbook.concurrency import (thread_get_ident, greenlet_get_ident,
13 thread_local, greenlet_local,
14 ThreadLock, GreenletRLock, is_gevent_enabled)
12 from logbook.concurrency import (
13 thread_get_ident, greenlet_get_ident, thread_local, greenlet_local,
14 ThreadLock, GreenletRLock, is_gevent_enabled)
1515
1616 _missing = object()
1717 _MAX_CONTEXT_OBJECT_CACHE = 256
2929 if self.group is None:
3030 return default
3131 return getattr(self.group, name)
32
3233 def _set(self, value):
3334 setattr(self, '_' + name, value)
35
3436 def _del(self):
3537 delattr(self, '_' + name)
3638 return property(_get, _set, _del)
148150 def push_greenlet(self, obj):
149151 self._greenlet_context_lock.acquire()
150152 try:
151 self._cache.pop(greenlet_get_ident(), None) # remote chance to conflict with thread ids
153 # remote chance to conflict with thread ids
154 self._cache.pop(greenlet_get_ident(), None)
152155 item = (self._stackop(), obj)
153156 stack = getattr(self._greenlet_context, 'stack', None)
154157 if stack is None:
161164 def pop_greenlet(self):
162165 self._greenlet_context_lock.acquire()
163166 try:
164 self._cache.pop(greenlet_get_ident(), None) # remote chance to conflict with thread ids
167 # remote chance to conflict with thread ids
168 self._cache.pop(greenlet_get_ident(), None)
165169 stack = getattr(self._greenlet_context, 'stack', None)
166170 assert stack, 'no objects on stack'
167171 return stack.pop()[1]
1111 esc = "\x1b["
1212
1313 codes = {}
14 codes[""] = ""
15 codes["reset"] = esc + "39;49;00m"
14 codes[""] = ""
15 codes["reset"] = esc + "39;49;00m"
1616
17 dark_colors = ["black", "darkred", "darkgreen", "brown", "darkblue",
18 "purple", "teal", "lightgray"]
17 dark_colors = ["black", "darkred", "darkgreen", "brown", "darkblue",
18 "purple", "teal", "lightgray"]
1919 light_colors = ["darkgray", "red", "green", "yellow", "blue",
2020 "fuchsia", "turquoise", "white"]
2121
2727
2828 del d, l, x
2929
30 codes["darkteal"] = codes["turquoise"]
30 codes["darkteal"] = codes["turquoise"]
3131 codes["darkyellow"] = codes["brown"]
32 codes["fuscia"] = codes["fuchsia"]
32 codes["fuscia"] = codes["fuchsia"]
3333
3434
3535 def _str_to_type(obj, strtype):
4141
4242 def colorize(color_key, text):
4343 """Returns an ANSI formatted text with the given color."""
44 return _str_to_type(codes[color_key], text) + text + \
45 _str_to_type(codes["reset"], text)
44 return (_str_to_type(codes[color_key], text) + text +
45 _str_to_type(codes["reset"], text))
1313 from itertools import chain
1414 from weakref import ref as weakref
1515 from datetime import datetime
16 from logbook import helpers
17 from logbook.concurrency import thread_get_name, thread_get_ident, greenlet_get_ident
18
19 from logbook.helpers import to_safe_json, parse_iso8601, cached_property, \
20 PY2, u, string_types, iteritems, integer_types
16 from logbook.concurrency import (
17 thread_get_name, thread_get_ident, greenlet_get_ident)
18
19 from logbook.helpers import (
20 to_safe_json, parse_iso8601, cached_property, PY2, u, string_types,
21 iteritems, integer_types, xrange)
2122 try:
22 from logbook._speedups import group_reflected_property, \
23 ContextStackManager, StackedObject
23 from logbook._speedups import (
24 group_reflected_property, ContextStackManager, StackedObject)
2425 except ImportError:
25 from logbook._fallback import group_reflected_property, \
26 ContextStackManager, StackedObject
26 from logbook._fallback import (
27 group_reflected_property, ContextStackManager, StackedObject)
2728
2829 _datetime_factory = datetime.utcnow
30
31
2932 def set_datetime_format(datetime_format):
3033 """
3134 Set the format for the datetime objects created, which are then
3235 made available as the :py:attr:`LogRecord.time` attribute of
3336 :py:class:`LogRecord` instances.
3437
35 :param datetime_format: Indicates how to generate datetime objects. Possible values are:
38 :param datetime_format: Indicates how to generate datetime objects.
39 Possible values are:
3640
3741 "utc"
38 :py:attr:`LogRecord.time` will be a datetime in UTC time zone (but not time zone aware)
42 :py:attr:`LogRecord.time` will be a datetime in UTC time zone
43 (but not time zone aware)
3944 "local"
40 :py:attr:`LogRecord.time` will be a datetime in local time zone (but not time zone aware)
45 :py:attr:`LogRecord.time` will be a datetime in local time zone
46 (but not time zone aware)
4147
4248 This function defaults to creating datetime objects in UTC time,
4349 using `datetime.utcnow()
6571 elif datetime_format == "local":
6672 _datetime_factory = datetime.now
6773 else:
68 raise ValueError("Invalid value %r. Valid values are 'utc' and 'local'." % (datetime_format,))
74 raise ValueError("Invalid value %r. Valid values are 'utc' and "
75 "'local'." % (datetime_format,))
6976
7077 # make sure to sync these up with _speedups.pyx
71 CRITICAL = 6
72 ERROR = 5
73 WARNING = 4
74 NOTICE = 3
75 INFO = 2
76 DEBUG = 1
78 CRITICAL = 15
79 ERROR = 14
80 WARNING = 13
81 NOTICE = 12
82 INFO = 11
83 DEBUG = 10
84 TRACE = 9
7785 NOTSET = 0
7886
7987 _level_names = {
8391 NOTICE: 'NOTICE',
8492 INFO: 'INFO',
8593 DEBUG: 'DEBUG',
94 TRACE: 'TRACE',
8695 NOTSET: 'NOTSET'
8796 }
8897 _reverse_level_names = dict((v, k) for (k, v) in iteritems(_level_names))
342351 """
343352 _pullable_information = frozenset((
344353 'func_name', 'module', 'filename', 'lineno', 'process_name', 'thread',
345 'thread_name', 'greenlet', 'formatted_exception', 'message', 'exception_name',
346 'exception_message'
354 'thread_name', 'greenlet', 'formatted_exception', 'message',
355 'exception_name', 'exception_message'
347356 ))
348357 _noned_on_close = frozenset(('exc_info', 'frame', 'calling_frame'))
349358
368377 information_pulled = False
369378
370379 def __init__(self, channel, level, msg, args=None, kwargs=None,
371 exc_info=None, extra=None, frame=None, dispatcher=None, frame_correction=0):
380 exc_info=None, extra=None, frame=None, dispatcher=None,
381 frame_correction=0):
372382 #: the name of the logger that created it or any other textual
373383 #: channel description. This is a descriptive name and can be
374384 #: used for filtering.
384394 #: optional exception information. If set, this is a tuple in the
385395 #: form ``(exc_type, exc_value, tb)`` as returned by
386396 #: :func:`sys.exc_info`.
387 #: This parameter can also be ``True``, which would cause the exception info tuple
388 #: to be fetched for you.
397 #: This parameter can also be ``True``, which would cause the exception
398 #: info tuple to be fetched for you.
399 if not exc_info:
400 # this is a special case where exc_info=False can be passed in
401 # theory, and it should be the same as exc_info=None
402 exc_info = None
389403 self.exc_info = exc_info
390404 #: optional extra information as dictionary. This is the place
391405 #: where custom log processors can attach custom context sensitive
507521 return self.msg
508522 try:
509523 try:
510 return self._format_message(self.msg, *self.args, **self.kwargs)
524 return self._format_message(self.msg, *self.args,
525 **self.kwargs)
511526 except UnicodeDecodeError:
512527 # Assume an unicode message but mixed-up args
513528 msg = self.msg.encode('utf-8', 'replace')
514529 return self._format_message(msg, *self.args, **self.kwargs)
515530 except (UnicodeEncodeError, AttributeError):
516 # we catch AttributeError since if msg is bytes, it won't have the 'format' method
517 if sys.exc_info()[0] is AttributeError and (PY2 or not isinstance(self.msg, bytes)):
531 # we catch AttributeError since if msg is bytes,
532 # it won't have the 'format' method
533 if (sys.exc_info()[0] is AttributeError
534 and (PY2 or not isinstance(self.msg, bytes))):
518535 # this is not the case we thought it is...
519536 raise
520537 # Assume encoded message with unicode args.
531548 # that.
532549 e = sys.exc_info()[1]
533550 errormsg = ('Could not format message with provided '
534 'arguments: {err}\n msg={msg!r}\n '
535 'args={args!r} \n kwargs={kwargs!r}.\n'
536 'Happened in file {file}, line {lineno}').format(
551 'arguments: {err}\n msg={msg!r}\n '
552 'args={args!r} \n kwargs={kwargs!r}.\n'
553 'Happened in file {file}, line {lineno}').format(
537554 err=e, msg=self.msg, args=self.args,
538555 kwargs=self.kwargs, file=self.filename,
539556 lineno=self.lineno
554571 while frm is not None and frm.f_globals is globs:
555572 frm = frm.f_back
556573
557 for _ in helpers.xrange(self.frame_correction):
574 for _ in xrange(self.frame_correction):
558575 frm = frm.f_back
559576
560577 return frm
603620 @cached_property
604621 def greenlet(self):
605622 """The ident of the greenlet. This is evaluated late and means that
606 if the log record is passed to another greenlet, :meth:`pull_information`
607 was called in the old greenlet.
623 if the log record is passed to another greenlet,
624 :meth:`pull_information` was called in the old greenlet.
608625 """
609626 return greenlet_get_ident()
610627
697714 #: The name of the minimium logging level required for records to be
698715 #: created.
699716 level_name = level_name_property()
717
718 def trace(self, *args, **kwargs):
719 """Logs a :class:`~logbook.LogRecord` with the level set
720 to :data:`~logbook.TRACE`.
721 """
722 if not self.disabled and TRACE >= self.level:
723 self._log(TRACE, args, kwargs)
700724
701725 def debug(self, *args, **kwargs):
702726 """Logs a :class:`~logbook.LogRecord` with the level set
883907 continue
884908
885909 # first case of blackhole (without filter).
886 # this should discard all further processing and we don't have to heavy_init to know that...
910 # this should discard all further processing and
911 # we don't have to heavy_init to know that...
887912 if handler.filter is None and handler.blackhole:
888913 break
889914
896921 self.process_record(record)
897922 record_initialized = True
898923
899
900924 # a filter can still veto the handling of the record. This
901925 # however is already operating on an initialized and processed
902926 # record. The impact is that filters are slower than the
903927 # handler's should_handle function in case there is no default
904928 # handler that would handle the record (delayed init).
905 if handler.filter is not None \
906 and not handler.filter(record, handler):
929 if (handler.filter is not None
930 and not handler.filter(record, handler)):
907931 continue
908932
909933 # We might have a filter, so now that we know we *should* handle
910934 # this record, we should consider the case of us being a black hole...
911935 if handler.blackhole:
912936 break
913
914937
915938 # handle the record. If the record was handled and
916939 # the record is not bubbling we can abort now.
2424 removes all otherwise registered handlers on root logger of
2525 the logging system but leaves the other loggers untouched.
2626
27 :param set_root_logger_level: controls of the default level of the legacy root logger is changed
28 so that all legacy log messages get redirected to Logbook
27 :param set_root_logger_level: controls of the default level of the legacy
28 root logger is changed so that all legacy log messages get redirected
29 to Logbook
2930 """
3031 del logging.root.handlers[:]
3132 logging.root.addHandler(RedirectLoggingHandler())
107108 """Tries to find the caller that issued the call."""
108109 frm = sys._getframe(2)
109110 while frm is not None:
110 if frm.f_globals is globals() or \
111 frm.f_globals is logbook.base.__dict__ or \
112 frm.f_globals is logging.__dict__:
111 if (frm.f_globals is globals() or
112 frm.f_globals is logbook.base.__dict__ or
113 frm.f_globals is logging.__dict__):
113114 frm = frm.f_back
114115 else:
115116 return frm
123124 def convert_record(self, old_record):
124125 """Converts an old logging record into a logbook log record."""
125126 record = LoggingCompatRecord(old_record.name,
126 self.convert_level(old_record.levelno),
127 old_record.msg, old_record.args,
128 None, old_record.exc_info,
129 self.find_extra(old_record),
130 self.find_caller(old_record))
127 self.convert_level(old_record.levelno),
128 old_record.msg, old_record.args,
129 None, old_record.exc_info,
130 self.find_extra(old_record),
131 self.find_caller(old_record))
131132 record.time = self.convert_time(old_record.created)
132133 return record
133134
4848
4949 def __repr__(self):
5050 owner = self._owner
51 return "<%s owner=%r count=%d>" % (self.__class__.__name__, owner, self._count)
51 return "<%s owner=%r count=%d>" % (self.__class__.__name__, owner,
52 self._count)
5253
5354 def acquire(self, blocking=1):
5455 tid = thread_get_ident()
5556 gid = greenlet_get_ident()
5657 tid_gid = (tid, gid)
57 if tid_gid == self._owner: # We trust the GIL here so we can do this comparison w/o locking.
58
59 # We trust the GIL here so we can do this comparison w/o locking.
60 if tid_gid == self._owner:
5861 self._count = self._count + 1
5962 return True
6063
6164 greenlet_lock = self._get_greenlet_lock()
6265
6366 self._wait_queue.append(gid)
64 # this is a safety in case an exception is raised somewhere and we must make sure we're not in the queue
67 # this is a safety in case an exception is raised somewhere
68 # and we must make sure we're not in the queue
6569 # otherwise it'll get stuck forever.
6670 remove_from_queue_on_return = True
6771 try:
7377 # Hurray, we can have the lock.
7478 self._owner = tid_gid
7579 self._count = 1
76 remove_from_queue_on_return = False # don't remove us from the queue
80
81 # don't remove us from the queue
82 remove_from_queue_on_return = False
7783 return True
7884 else:
79 # we already hold the greenlet lock so obviously the owner is not in our thread.
85 # we already hold the greenlet lock so obviously
86 # the owner is not in our thread.
8087 greenlet_lock.release()
8188 if blocking:
82 gevent.sleep(0.0005) # 500 us -> initial delay of 1 ms
89 # 500 us -> initial delay of 1 ms
90 gevent.sleep(0.0005)
8391 else:
8492 return False
8593 finally:
113121 def _is_owned(self):
114122 return self._owner == (thread_get_ident(), greenlet_get_ident())
115123 else:
116 from threading import Lock as ThreadLock, RLock as ThreadRLock, currentThread
124 from threading import (
125 Lock as ThreadLock, RLock as ThreadRLock, currentThread)
117126 try:
118 from thread import get_ident as thread_get_ident, _local as thread_local
127 from thread import (
128 get_ident as thread_get_ident, _local as thread_local)
119129 except ImportError:
120 from _thread import get_ident as thread_get_ident, _local as thread_local
130 from _thread import (
131 get_ident as thread_get_ident, _local as thread_local)
121132
122133 def thread_get_name():
123134 return currentThread().getName()
139150 def __exit__(self, t, v, tb):
140151 pass
141152
153
142154 def new_fine_grained_lock():
143155 global use_gevent
144156 if use_gevent:
77 :copyright: (c) 2010 by Armin Ronacher, Georg Brandl.
88 :license: BSD, see LICENSE for more details.
99 """
10 import io
1011 import os
1112 import re
1213 import sys
2021 import traceback
2122 from datetime import datetime, timedelta
2223 from collections import deque
23
24 from six import add_metaclass
25
26 from logbook.base import CRITICAL, ERROR, WARNING, NOTICE, INFO, DEBUG, \
27 NOTSET, level_name_property, _missing, lookup_level, \
28 Flags, ContextObject, ContextStackManager
29 from logbook.helpers import rename, b, _is_text_stream, is_unicode, PY2, \
30 zip, xrange, string_types, integer_types, reraise, u
24 from textwrap import dedent
25
26 from logbook.base import (
27 CRITICAL, ERROR, WARNING, NOTICE, INFO, DEBUG, NOTSET, level_name_property,
28 _missing, lookup_level, Flags, ContextObject, ContextStackManager)
29 from logbook.helpers import (
30 rename, b, _is_text_stream, is_unicode, PY2, zip, xrange, string_types,
31 integer_types, reraise, u, with_metaclass)
3132 from logbook.concurrency import new_fine_grained_lock
3233
33 DEFAULT_FORMAT_STRING = (
34 u('[{record.time:%Y-%m-%d %H:%M:%S.%f}] ') +
35 u('{record.level_name}: {record.channel}: {record.message}')
36 )
34 DEFAULT_FORMAT_STRING = u(
35 '[{record.time:%Y-%m-%d %H:%M:%S.%f}] '
36 '{record.level_name}: {record.channel}: {record.message}')
37
3738 SYSLOG_FORMAT_STRING = u('{record.channel}: {record.message}')
38 NTLOG_FORMAT_STRING = u('''\
39 Message Level: {record.level_name}
40 Location: {record.filename}:{record.lineno}
41 Module: {record.module}
42 Function: {record.func_name}
43 Exact Time: {record.time:%Y-%m-%d %H:%M:%S}
44
45 Event provided Message:
46
47 {record.message}
48 ''')
49 TEST_FORMAT_STRING = \
50 u('[{record.level_name}] {record.channel}: {record.message}')
51 MAIL_FORMAT_STRING = u('''\
52 Subject: {handler.subject}
53
54 Message type: {record.level_name}
55 Location: {record.filename}:{record.lineno}
56 Module: {record.module}
57 Function: {record.func_name}
58 Time: {record.time:%Y-%m-%d %H:%M:%S}
59
60 Message:
61
62 {record.message}
63 ''')
64 MAIL_RELATED_FORMAT_STRING = u('''\
65 Message type: {record.level_name}
66 Location: {record.filename}:{record.lineno}
67 Module: {record.module}
68 Function: {record.func_name}
69 {record.message}
70 ''')
39 NTLOG_FORMAT_STRING = dedent(u('''
40 Message Level: {record.level_name}
41 Location: {record.filename}:{record.lineno}
42 Module: {record.module}
43 Function: {record.func_name}
44 Exact Time: {record.time:%Y-%m-%d %H:%M:%S}
45
46 Event provided Message:
47
48 {record.message}
49 ''')).lstrip()
50
51 TEST_FORMAT_STRING = u('[{record.level_name}] {record.channel}: {record.message}')
52 MAIL_FORMAT_STRING = dedent(u('''
53 Subject: {handler.subject}
54
55 Message type: {record.level_name}
56 Location: {record.filename}:{record.lineno}
57 Module: {record.module}
58 Function: {record.func_name}
59 Time: {record.time:%Y-%m-%d %H:%M:%S}
60
61 Message:
62
63 {record.message}
64 ''')).lstrip()
65
66 MAIL_RELATED_FORMAT_STRING = dedent(u('''
67 Message type: {record.level_name}
68 Location: {record.filename}:{record.lineno}
69 Module: {record.module}
70 Function: {record.func_name}
71 {record.message}
72 ''')).lstrip()
7173
7274 SYSLOG_PORT = 514
7375
7476 REGTYPE = type(re.compile("I'm a regular expression!"))
77
7578
7679 def create_syshandler(application_name, level=NOTSET):
7780 """Creates the handler the operating system provides. On Unix systems
9396 def __new__(cls, name, bases, d):
9497 # aha, that thing has a custom close method. We will need a magic
9598 # __del__ for it to be called on cleanup.
96 if bases != (ContextObject,) and 'close' in d and '__del__' not in d \
97 and not any(hasattr(x, '__del__') for x in bases):
99 if (bases != (ContextObject,) and 'close' in d and '__del__' not in d
100 and not any(hasattr(x, '__del__') for x in bases)):
98101 def _magic_del(self):
99102 try:
100103 self.close()
106109 return type.__new__(cls, name, bases, d)
107110
108111
109 @add_metaclass(_HandlerType)
110 class Handler(ContextObject):
112 class Handler(with_metaclass(_HandlerType), ContextObject):
111113 """Handler instances dispatch logging events to specific destinations.
112114
113115 The base handler class. Acts as a placeholder which defines the Handler
116118 the 'raw' message as determined by record.message is logged.
117119
118120 To bind a handler you can use the :meth:`push_application`,
119 :meth:`push_thread` or :meth:`push_greenlet` methods. This will push the handler on a stack of
120 handlers. To undo this, use the :meth:`pop_application`,
121 :meth:`push_thread` or :meth:`push_greenlet` methods.
122 This will push the handler on a stack of handlers.
123 To undo this, use the :meth:`pop_application`,
121124 :meth:`pop_thread` methods and :meth:`pop_greenlet`::
122125
123126 handler = MyHandler()
306309 """
307310 blackhole = True
308311
312 def __init__(self, level=NOTSET, filter=None):
313 super(NullHandler, self).__init__(level=level, filter=filter,
314 bubble=False)
315
309316
310317 class WrapperHandler(Handler):
311318 """A class that can wrap another handler and redirect all calls to the
437444
438445 _NUMBER_TYPES = integer_types + (float,)
439446
447
440448 class LimitingHandlerMixin(HashingHandlerMixin):
441449 """Mixin class for handlers that want to limit emitting records.
442450
483491 first_count = last_count
484492 old_count = suppression_count
485493
486 if not suppression_count and \
487 len(self._record_limits) >= self.max_record_cache:
494 if (not suppression_count and
495 len(self._record_limits) >= self.max_record_cache):
488496 cache_items = self._record_limits.items()
489497 cache_items.sort()
490 del cache_items[:int(self._record_limits) \
491 * self.record_cache_prune]
498 del cache_items[:int(self._record_limits)
499 * self.record_cache_prune]
492500 self._record_limits = dict(cache_items)
493501
494502 self._record_limits[hash] = (first_count, old_count + 1)
554562 """Encodes the message to the stream encoding."""
555563 stream = self.stream
556564 rv = msg + '\n'
557 if (PY2 and is_unicode(rv)) or \
558 not (PY2 or is_unicode(rv) or _is_text_stream(stream)):
565 if ((PY2 and is_unicode(rv)) or
566 not (PY2 or is_unicode(rv) or _is_text_stream(stream))):
559567 enc = self.encoding
560568 if enc is None:
561569 enc = getattr(stream, 'encoding', None) or 'utf-8'
602610 def _open(self, mode=None):
603611 if mode is None:
604612 mode = self._mode
605 self.stream = open(self._filename, mode)
613 self.stream = io.open(self._filename, mode, encoding=self.encoding)
606614
607615 def write(self, item):
608616 self.ensure_stream_is_open()
609 if not PY2 and isinstance(item, bytes):
617 if isinstance(item, bytes):
610618 self.stream.buffer.write(item)
611619 else:
612620 self.stream.write(item)
722730 format_string, delay, filter, bubble)
723731 self.max_size = max_size
724732 self.backup_count = backup_count
725 assert backup_count > 0, 'at least one backup file has to be ' \
726 'specified'
733 assert backup_count > 0, ('at least one backup file has to be '
734 'specified')
727735
728736 def should_rollover(self, record, bytes):
729737 self.stream.seek(0, 2)
786794 self._filename = None
787795
788796 def _get_timed_filename(self, datetime):
789 return datetime.strftime('-' + self.date_format) \
790 .join(self._fn_parts)
797 return (datetime.strftime('-' + self.date_format)
798 .join(self._fn_parts))
791799
792800 def should_rollover(self, record):
793801 fn = self._get_timed_filename(record.time)
805813 files = []
806814 for filename in os.listdir(directory):
807815 filename = os.path.join(directory, filename)
808 if filename.startswith(self._fn_parts[0] + '-') and \
809 filename.endswith(self._fn_parts[1]):
816 if (filename.startswith(self._fn_parts[0] + '-') and
817 filename.endswith(self._fn_parts[1])):
810818 files.append((os.path.getmtime(filename), filename))
811819 files.sort()
812 return files[:-self.backup_count + 1] if self.backup_count > 1\
813 else files[:]
820 if self.backup_count > 1:
821 return files[:-self.backup_count + 1]
822 else:
823 return files[:]
814824
815825 def perform_rollover(self):
816826 self.stream.close()
846856 default_format_string = TEST_FORMAT_STRING
847857
848858 def __init__(self, level=NOTSET, format_string=None, filter=None,
849 bubble=False):
859 bubble=False, force_heavy_init=False):
850860 Handler.__init__(self, level, filter, bubble)
851861 StringFormatterHandlerMixin.__init__(self, format_string)
852862 #: captures the :class:`LogRecord`\s as instances
853863 self.records = []
854864 self._formatted_records = []
855865 self._formatted_record_cache = []
866 self._force_heavy_init = force_heavy_init
856867
857868 def close(self):
858869 """Close all records down when the handler is closed."""
864875 # call to the emit function. If we don't do that, the traceback
865876 # attribute and other things will already be removed.
866877 record.keep_open = True
878 if self._force_heavy_init:
879 record.heavy_init()
867880 self.records.append(record)
868881
869882 @property
870883 def formatted_records(self):
871884 """Captures the formatted log records as unicode strings."""
872 if len(self._formatted_record_cache) != len(self.records) or \
873 any(r1 != r2 for r1, r2 in
874 zip(self.records, self._formatted_record_cache)):
885 if (len(self._formatted_record_cache) != len(self.records) or
886 any(r1 != r2 for r1, r2 in
887 zip(self.records, self._formatted_record_cache))):
875888 self._formatted_records = [self.format(r) for r in self.records]
876889 self._formatted_record_cache = list(self.records)
877890 return self._formatted_records
10441057 def _get_related_format_string(self):
10451058 if isinstance(self.related_formatter, StringFormatter):
10461059 return self.related_formatter.format_string
1060
10471061 def _set_related_format_string(self, value):
10481062 if value is None:
10491063 self.related_formatter = None
10501064 else:
10511065 self.related_formatter = self.formatter_class(value)
10521066 related_format_string = property(_get_related_format_string,
1053 _set_related_format_string)
1067 _set_related_format_string)
10541068 del _get_related_format_string, _set_related_format_string
10551069
10561070 def get_recipients(self, record):
10861100
10871101 body = '\r\n'.join(lineiter)
10881102 if suppressed:
1089 body += '\r\n\r\nThis message occurred additional %d ' \
1090 'time(s) and was suppressed' % suppressed
1103 body += ('\r\n\r\nThis message occurred additional %d '
1104 'time(s) and was suppressed' % suppressed)
10911105
10921106 # inconsistency in Python 2.5
10931107 # other versions correctly return msg.get_payload() as str
12001214
12011215 class GMailHandler(MailHandler):
12021216 """
1203 A customized mail handler class for sending emails via GMail (or Google Apps mail)::
1204
1205 handler = GMailHandler("my_user@gmail.com", "mypassword", ["to_user@some_mail.com"], ...) # other arguments same as MailHandler
1217 A customized mail handler class for sending emails via GMail (or Google
1218 Apps mail)::
1219
1220 handler = GMailHandler(
1221 "my_user@gmail.com", "mypassword", ["to_user@some_mail.com"],
1222 ...) # other arguments same as MailHandler
12061223
12071224 .. versionadded:: 0.6.0
12081225 """
12091226
12101227 def __init__(self, account_id, password, recipients, **kw):
12111228 super(GMailHandler, self).__init__(
1212 account_id, recipients, secure=(), server_addr=("smtp.gmail.com", 587),
1229 account_id, recipients, secure=(),
1230 server_addr=("smtp.gmail.com", 587),
12131231 credentials=(account_id, password), **kw)
12141232
12151233
12201238 default_format_string = SYSLOG_FORMAT_STRING
12211239
12221240 # priorities
1223 LOG_EMERG = 0 # system is unusable
1224 LOG_ALERT = 1 # action must be taken immediately
1225 LOG_CRIT = 2 # critical conditions
1226 LOG_ERR = 3 # error conditions
1227 LOG_WARNING = 4 # warning conditions
1228 LOG_NOTICE = 5 # normal but significant condition
1229 LOG_INFO = 6 # informational
1230 LOG_DEBUG = 7 # debug-level messages
1241 LOG_EMERG = 0 # system is unusable
1242 LOG_ALERT = 1 # action must be taken immediately
1243 LOG_CRIT = 2 # critical conditions
1244 LOG_ERR = 3 # error conditions
1245 LOG_WARNING = 4 # warning conditions
1246 LOG_NOTICE = 5 # normal but significant condition
1247 LOG_INFO = 6 # informational
1248 LOG_DEBUG = 7 # debug-level messages
12311249
12321250 # facility codes
1233 LOG_KERN = 0 # kernel messages
1234 LOG_USER = 1 # random user-level messages
1235 LOG_MAIL = 2 # mail system
1236 LOG_DAEMON = 3 # system daemons
1237 LOG_AUTH = 4 # security/authorization messages
1238 LOG_SYSLOG = 5 # messages generated internally by syslogd
1239 LOG_LPR = 6 # line printer subsystem
1240 LOG_NEWS = 7 # network news subsystem
1241 LOG_UUCP = 8 # UUCP subsystem
1242 LOG_CRON = 9 # clock daemon
1243 LOG_AUTHPRIV = 10 # security/authorization messages (private)
1244 LOG_FTP = 11 # FTP daemon
1251 LOG_KERN = 0 # kernel messages
1252 LOG_USER = 1 # random user-level messages
1253 LOG_MAIL = 2 # mail system
1254 LOG_DAEMON = 3 # system daemons
1255 LOG_AUTH = 4 # security/authorization messages
1256 LOG_SYSLOG = 5 # messages generated internally by syslogd
1257 LOG_LPR = 6 # line printer subsystem
1258 LOG_NEWS = 7 # network news subsystem
1259 LOG_UUCP = 8 # UUCP subsystem
1260 LOG_CRON = 9 # clock daemon
1261 LOG_AUTHPRIV = 10 # security/authorization messages (private)
1262 LOG_FTP = 11 # FTP daemon
12451263
12461264 # other codes through 15 reserved for system use
1247 LOG_LOCAL0 = 16 # reserved for local use
1248 LOG_LOCAL1 = 17 # reserved for local use
1249 LOG_LOCAL2 = 18 # reserved for local use
1250 LOG_LOCAL3 = 19 # reserved for local use
1251 LOG_LOCAL4 = 20 # reserved for local use
1252 LOG_LOCAL5 = 21 # reserved for local use
1253 LOG_LOCAL6 = 22 # reserved for local use
1254 LOG_LOCAL7 = 23 # reserved for local use
1265 LOG_LOCAL0 = 16 # reserved for local use
1266 LOG_LOCAL1 = 17 # reserved for local use
1267 LOG_LOCAL2 = 18 # reserved for local use
1268 LOG_LOCAL3 = 19 # reserved for local use
1269 LOG_LOCAL4 = 20 # reserved for local use
1270 LOG_LOCAL5 = 21 # reserved for local use
1271 LOG_LOCAL6 = 22 # reserved for local use
1272 LOG_LOCAL7 = 23 # reserved for local use
12551273
12561274 facility_names = {
12571275 'auth': LOG_AUTH,
15331551 self.buffered_records.append(record)
15341552 if self._buffer_full:
15351553 self.buffered_records.popleft()
1536 elif self.buffer_size and \
1537 len(self.buffered_records) >= self.buffer_size:
1554 elif (self.buffer_size and
1555 len(self.buffered_records) >= self.buffer_size):
15381556 self._buffer_full = True
15391557 return record.level >= self._level
15401558 return False
5959 from http import client as http_client
6060
6161 if PY2:
62 #Yucky, but apparently that's the only way to do this
62 # Yucky, but apparently that's the only way to do this
6363 exec("""
6464 def reraise(tp, value, tb=None):
6565 raise tp, value, tb
8181 )
8282 _missing = object()
8383 if PY2:
84 def b(x): return x
85 def _is_text_stream(x): return True
84 def b(x):
85 return x
86
87 def _is_text_stream(x):
88 return True
8689 else:
8790 import io
88 def b(x): return x.encode('ascii')
89 def _is_text_stream(stream): return isinstance(stream, io.TextIOBase)
91
92 def b(x):
93 return x.encode('ascii')
94
95 def _is_text_stream(stream):
96 return isinstance(stream, io.TextIOBase)
9097
9198
9299 can_rename_open_file = False
93 if os.name == 'nt': # pragma: no cover
94 _rename = lambda src, dst: False
95 _rename_atomic = lambda src, dst: False
96
100 if os.name == 'nt':
97101 try:
98102 import ctypes
99103
113117 rv = False
114118 while not rv and retry < 100:
115119 rv = _MoveFileEx(src, dst, _MOVEFILE_REPLACE_EXISTING |
116 _MOVEFILE_WRITE_THROUGH)
120 _MOVEFILE_WRITE_THROUGH)
117121 if not rv:
118122 time.sleep(0.001)
119123 retry += 1
147151 finally:
148152 _CloseHandle(ta)
149153 except Exception:
150 pass
154 def _rename(src, dst):
155 return False
156
157 def _rename_atomic(src, dst):
158 return False
151159
152160 def rename(src, dst):
153161 # Try atomic or pseudo-atomic rename
172180 can_rename_open_file = True
173181
174182 _JSON_SIMPLE_TYPES = (bool, float) + integer_types + string_types
183
175184
176185 def to_safe_json(data):
177186 """Makes a data structure safe for JSON silently discarding invalid
272281 obj.__dict__[self.__name__] = value
273282 return value
274283
284
275285 def get_iterator_next_method(it):
276286 return lambda: next(it)
287
277288
278289 # python 2 support functions and aliases
279290 def is_unicode(x):
280291 if PY2:
281292 return isinstance(x, unicode)
282293 return isinstance(x, str)
294
295 if PY2:
296 exec("""def with_metaclass(meta):
297 class _WithMetaclassBase(object):
298 __metaclass__ = meta
299 return _WithMetaclassBase
300 """)
301 else:
302 exec("""def with_metaclass(meta):
303 class _WithMetaclassBase(object, metaclass=meta):
304 pass
305 return _WithMetaclassBase
306 """)
1313 from cgi import parse_qsl
1414 from functools import partial
1515
16 from logbook.base import RecordDispatcher, dispatch_record, NOTSET, ERROR, NOTICE
17 from logbook.handlers import Handler, StringFormatter, \
18 StringFormatterHandlerMixin, StderrHandler
16 from logbook.base import (
17 RecordDispatcher, dispatch_record, NOTSET, ERROR, NOTICE)
18 from logbook.handlers import (
19 Handler, StringFormatter, StringFormatterHandlerMixin, StderrHandler)
1920 from logbook._termcolors import colorize
2021 from logbook.helpers import PY2, string_types, iteritems, u
2122
2829 from urllib.parse import urlencode
2930
3031 _ws_re = re.compile(r'(\s+)(?u)')
31 TWITTER_FORMAT_STRING = \
32 u('[{record.channel}] {record.level_name}: {record.message}')
32 TWITTER_FORMAT_STRING = u(
33 '[{record.channel}] {record.level_name}: {record.message}')
3334 TWITTER_ACCESS_TOKEN_URL = 'https://twitter.com/oauth/access_token'
3435 NEW_TWEET_URL = 'https://api.twitter.com/1/statuses/update.json'
3536
208209 def tweet(self, status):
209210 """Tweets a given status. Status must not exceed 140 chars."""
210211 client = self.make_client()
211 resp, content = client.request(NEW_TWEET_URL, 'POST',
212 resp, content = client.request(
213 NEW_TWEET_URL, 'POST',
212214 body=urlencode({'status': status.encode('utf-8')}),
213215 headers={'Content-Type': 'application/x-www-form-urlencoded'})
214216 return resp['status'] == '200'
267269 self._subprocess = subprocess
268270
269271 def emit(self, record):
270 args = [arg.format(record=record).encode(self.encoding)
272 args = [arg.format(record=record)
271273 for arg in self._arguments]
272274 if self._stdin_format is not None:
273 stdin_data = self._stdin_format.format(record=record) \
274 .encode(self.encoding)
275 stdin_data = (self._stdin_format.format(record=record)
276 .encode(self.encoding))
275277 stdin = self._subprocess.PIPE
276278 else:
277279 stdin = None
324326
325327
326328 # backwards compat. Should go away in some future releases
327 from logbook.handlers import FingersCrossedHandler as \
328 FingersCrossedHandlerBase
329 from logbook.handlers import (
330 FingersCrossedHandler as FingersCrossedHandlerBase)
331
332
329333 class FingersCrossedHandler(FingersCrossedHandlerBase):
330334 def __init__(self, *args, **kwargs):
331335 FingersCrossedHandlerBase.__init__(self, *args, **kwargs)
332336 from warnings import warn
333337 warn(PendingDeprecationWarning('fingers crossed handler changed '
334 'location. It\'s now a core component of Logbook.'))
338 'location. It\'s now a core component of Logbook.'))
335339
336340
337341 class ExceptionHandler(Handler, StringFormatterHandlerMixin):
359363 raise self.exc_type(self.format(record))
360364 return False
361365
366
362367 class DedupHandler(Handler):
363368 """A handler that deduplicates log messages.
364369
365 It emits each unique log record once, along with the number of times it was emitted.
370 It emits each unique log record once, along with the number of times it was
371 emitted.
366372 Example:::
367373
368374 with logbook.more.DedupHandler():
375381 message repeated 2 times: foo
376382 message repeated 1 times: bar
377383 """
378 def __init__(self, format_string='message repeated {count} times: {message}', *args, **kwargs):
384 def __init__(self,
385 format_string='message repeated {count} times: {message}',
386 *args, **kwargs):
379387 Handler.__init__(self, bubble=False, *args, **kwargs)
380388 self._format_string = format_string
381389 self.clear()
397405 self.flush()
398406
399407 def handle(self, record):
400 if not record.message in self._message_to_count:
408 if record.message not in self._message_to_count:
401409 self._unique_ordered_records.append(record)
402410 self._message_to_count[record.message] += 1
403411 return True
404412
405413 def flush(self):
406414 for record in self._unique_ordered_records:
407 record.message = self._format_string.format(message=record.message, count=self._message_to_count[record.message])
408 # record.dispatcher is the logger who created the message, it's sometimes supressed (by logbook.info for example)
409 dispatch = record.dispatcher.call_handlers if record.dispatcher is not None else dispatch_record
415 record.message = self._format_string.format(
416 message=record.message,
417 count=self._message_to_count[record.message])
418 # record.dispatcher is the logger who created the message,
419 # it's sometimes supressed (by logbook.info for example)
420 if record.dispatcher is not None:
421 dispatch = record.dispatcher.call_handlers
422 else:
423 dispatch = dispatch_record
410424 dispatch(record)
411425 self.clear()
412
1414
1515 from logbook.base import NOTSET, ERROR, WARNING
1616 from logbook.handlers import Handler, LimitingHandlerMixin
17 from logbook.helpers import get_application_name, PY2, http_client
17 from logbook.helpers import get_application_name, PY2, http_client, u
1818
1919 if PY2:
2020 from urllib import urlencode
2121 else:
2222 from urllib.parse import urlencode
2323
24 def create_notification_handler(application_name=None, level=NOTSET, icon=None):
24
25 def create_notification_handler(application_name=None, level=NOTSET,
26 icon=None):
2527 """Creates a handler perfectly fit the current platform. On Linux
2628 systems this creates a :class:`LibNotifyHandler`, on OS X systems it
2729 will create a :class:`GrowlHandler`.
150152 try:
151153 from gtk import gdk
152154 except ImportError:
153 #TODO: raise a warning?
155 # TODO: raise a warning?
154156 raise RuntimeError('The gtk.gdk module is required to set an icon.')
155157
156158 if icon is not None:
196198
197199 def __init__(self, email, password, record_limit=None, record_delta=None,
198200 level=NOTSET, filter=None, bubble=False):
199 NotificationBaseHandler.__init__(self, None, record_limit, record_delta,
200 level, filter, bubble)
201 NotificationBaseHandler.__init__(self, None, record_limit,
202 record_delta, level, filter, bubble)
201203 self.email = email
202204 self.password = password
203205
218220 con = http_client.HTTPSConnection('boxcar.io')
219221 con.request('POST', '/notifications/', headers={
220222 'Authorization': 'Basic ' +
221 base64.b64encode((u('%s:%s') %
222 (self.email, self.password)).encode('utf-8')).strip(),
223 base64.b64encode((u('%s:%s') % (self.email, self.password))
224 .encode('utf-8')).strip(),
223225 }, body=body)
224226 con.close()
225227
230232 """
231233
232234 def __init__(self, application_name=None, username=None, secret=None,
233 record_limit=None, record_delta=None, level=NOTSET, filter=None,
234 bubble=False, hide_level=False):
235 record_limit=None, record_delta=None, level=NOTSET,
236 filter=None, bubble=False, hide_level=False):
235237 try:
236238 import notifo
237239 except ImportError:
239241 'The notifo module is not available. You have '
240242 'to install notifo to use the NotifoHandler.'
241243 )
242 NotificationBaseHandler.__init__(self, None, record_limit, record_delta,
243 level, filter, bubble)
244 NotificationBaseHandler.__init__(self, None, record_limit,
245 record_delta, level, filter, bubble)
244246 self._notifo = notifo
245247 self.application_name = application_name
246248 self.username = username
247249 self.secret = secret
248250 self.hide_level = hide_level
249
250251
251252 def emit(self, record):
252253
3232
3333 handler = RedisHandler('http://127.0.0.1', port='9200', key='redis')
3434
35 If your Redis instance is password protected, you can securely connect passing
36 your password when creating a RedisHandler object.
35 If your Redis instance is password protected, you can securely connect
36 passing your password when creating a RedisHandler object.
3737
3838 Example::
3939
4141
4242 More info about the default buffer size: wp.me/p3tYJu-3b
4343 """
44 def __init__(self, host='127.0.0.1', port=6379, key='redis', extra_fields={},
45 flush_threshold=128, flush_time=1, level=NOTSET, filter=None,
46 password=False, bubble=True, context=None, push_method='rpush'):
44 def __init__(self, host='127.0.0.1', port=6379, key='redis',
45 extra_fields={}, flush_threshold=128, flush_time=1,
46 level=NOTSET, filter=None, password=False, bubble=True,
47 context=None, push_method='rpush'):
4748 Handler.__init__(self, level, filter, bubble)
4849 try:
4950 import redis
5253 raise RuntimeError('The redis library is required for '
5354 'the RedisHandler')
5455
55 self.redis = redis.Redis(host=host, port=port, password=password, decode_responses=True)
56 self.redis = redis.Redis(host=host, port=port, password=password,
57 decode_responses=True)
5658 try:
5759 self.redis.ping()
5860 except ResponseError:
59 raise ResponseError('The password provided is apparently incorrect')
61 raise ResponseError(
62 'The password provided is apparently incorrect')
6063 self.key = key
6164 self.extra_fields = extra_fields
6265 self.flush_threshold = flush_threshold
6467 self.lock = Lock()
6568 self.push_method = push_method
6669
67 #Set up a thread that flushes the queue every specified seconds
70 # Set up a thread that flushes the queue every specified seconds
6871 self._stop_event = threading.Event()
6972 self._flushing_t = threading.Thread(target=self._flush_task,
70 args=(flush_time, self._stop_event))
73 args=(flush_time,
74 self._stop_event))
7175 self._flushing_t.daemon = True
7276 self._flushing_t.start()
73
7477
7578 def _flush_task(self, time, stop_event):
7679 """Calls the method _flush_buffer every certain time.
8083 self._flush_buffer()
8184 self._stop_event.wait(time)
8285
83
8486 def _flush_buffer(self):
8587 """Flushes the messaging queue into Redis.
8688
9294 getattr(self.redis, self.push_method)(self.key, *self.queue)
9395 self.queue = []
9496
95
9697 def disable_buffering(self):
9798 """Disables buffering.
9899
101102 self._stop_event.set()
102103 self.flush_threshold = 1
103104
104
105105 def emit(self, record):
106106 """Emits a pair (key, value) to redis.
107107
108 The key is the one provided when creating the handler, or redis if none was
109 provided. The value contains both the message and the hostname. Extra values
110 are also appended to the message.
108 The key is the one provided when creating the handler, or redis if none
109 was provided. The value contains both the message and the hostname.
110 Extra values are also appended to the message.
111111 """
112112 with self.lock:
113113 r = {"message": record.msg,
120120 if len(self.queue) == self.flush_threshold:
121121 self._flush_buffer()
122122
123
124123 def close(self):
125124 self._flush_buffer()
126125
132131 The queue will be filled with JSON exported log records. To receive such
133132 log records from a queue you can use the :class:`MessageQueueSubscriber`.
134133
135 Example setup::
134 For an AMQP backend such as RabbitMQ::
135
136 handler = MessageQueueHandler('amqp://guest:guest@localhost//')
137
138 This requires the py-amqp or the librabbitmq client library.
139
140 For Redis (requires redis client library)::
141
142 handler = MessageQueueHandler('redis://localhost:8889/0')
143
144 For MongoDB (requires pymongo)::
136145
137146 handler = MessageQueueHandler('mongodb://localhost:27017/logging')
147
148 Several other backends are also supported.
149 Refer to the `kombu`_ documentation
150
151 .. _kombu: http://kombu.readthedocs.org/en/latest/introduction.html
138152 """
139153
140154 def __init__(self, uri=None, queue='logging', level=NOTSET,
141 filter=None, bubble=False, context=None):
155 filter=None, bubble=False):
142156 Handler.__init__(self, level, filter, bubble)
143157 try:
144158 import kombu
205219 if uri is not None:
206220 self.socket.bind(uri)
207221
208
209222 def export_record(self, record):
210223 """Exports the record into a dictionary ready for JSON dumping."""
211224 return record.to_dict(json_safe=True)
212225
213226 def emit(self, record):
214 self.socket.send(json.dumps(self.export_record(record)).encode("utf-8"))
227 self.socket.send(json.dumps(
228 self.export_record(record)).encode("utf-8"))
215229
216230 def close(self, linger=-1):
217231 self.socket.close(linger)
218232
219233 def __del__(self):
220 # When the Handler is deleted we must close our socket in a non-blocking
221 # fashion (using linger).
234 # When the Handler is deleted we must close our socket in a
235 # non-blocking fashion (using linger).
222236 # Otherwise it can block indefinitely, for example if the Subscriber is
223237 # not reachable.
224 # If messages are pending on the socket, we wait 100ms for them to be sent
225 # then we discard them.
238 # If messages are pending on the socket, we wait 100ms for them to be
239 # sent then we discard them.
226240 self.close(linger=100)
227241
228242
268282 """Baseclass for all subscribers."""
269283
270284 def recv(self, timeout=None):
271 """Receives a single record from the socket. Timeout of 0 means nonblocking,
272 `None` means blocking and otherwise it's a timeout in seconds after which
273 the function just returns with `None`.
285 """Receives a single record from the socket. Timeout of 0 means
286 nonblocking, `None` means blocking and otherwise it's a timeout in
287 seconds after which the function just returns with `None`.
274288
275289 Subclasses have to override this.
276290 """
354368 self.queue.close()
355369
356370 def recv(self, timeout=None):
357 """Receives a single record from the socket. Timeout of 0 means nonblocking,
358 `None` means blocking and otherwise it's a timeout in seconds after which
359 the function just returns with `None`.
371 """Receives a single record from the socket. Timeout of 0 means
372 nonblocking, `None` means blocking and otherwise it's a timeout in
373 seconds after which the function just returns with `None`.
360374 """
361375 if timeout == 0:
362376 try:
444458 self.socket.close()
445459
446460 def recv(self, timeout=None):
447 """Receives a single record from the socket. Timeout of 0 means nonblocking,
448 `None` means blocking and otherwise it's a timeout in seconds after which
449 the function just returns with `None`.
461 """Receives a single record from the socket. Timeout of 0 means
462 nonblocking, `None` means blocking and otherwise it's a timeout in
463 seconds after which the function just returns with `None`.
450464 """
451465 if timeout is None:
452466 rv = self.socket.recv()
576590 try:
577591 rv = self.channel.receive(timeout=timeout)
578592 except self.channel.RemoteError:
579 #XXX: handle
593 # XXX: handle
580594 return None
581595 except (self.channel.TimeoutError, EOFError):
582596 return None
669683 if record:
670684 try:
671685 self.queue.put(record, timeout=0.05)
672 except Queue.Full:
686 except Full:
673687 pass
674688 finally:
675689 if self.setup is not None:
1414 from logbook.handlers import Handler, HashingHandlerMixin
1515 from logbook.helpers import cached_property, b, PY2, u
1616
17
1718 class Ticket(object):
1819 """Represents a ticket from the database."""
1920
8889 """Returns the number of tickets."""
8990 raise NotImplementedError()
9091
91 def get_tickets(self, order_by='-last_occurrence_time', limit=50, offset=0):
92 def get_tickets(self, order_by='-last_occurrence_time',
93 limit=50, offset=0):
9294 """Selects tickets from the database."""
9395 raise NotImplementedError()
9496
130132
131133 def setup_backend(self):
132134 from sqlalchemy import create_engine, MetaData
135 from sqlalchemy.orm import sessionmaker, scoped_session
133136 engine_or_uri = self.options.pop('uri', None)
134137 metadata = self.options.pop('metadata', None)
135138 table_prefix = self.options.pop('table_prefix', 'logbook_')
137140 if hasattr(engine_or_uri, 'execute'):
138141 self.engine = engine_or_uri
139142 else:
140 self.engine = create_engine(engine_or_uri, convert_unicode=True)
143 # Pool recycle keeps connections from going stale,
144 # which happens in MySQL Databases
145 # Pool size is more custom for out stack
146 self.engine = create_engine(engine_or_uri, convert_unicode=True,
147 pool_recycle=360, pool_size=1000)
148
149 # Create session factory using session maker
150 session = sessionmaker()
151
152 # Bind to the engined
153 session.configure(bind=self.engine)
154
155 # Scoped session is a thread safe solution for
156 # interaction with the Database
157 self.session = scoped_session(session)
158
141159 if metadata is None:
142160 metadata = MetaData()
143161 self.table_prefix = table_prefix
151169 metadata.
152170 """
153171 import sqlalchemy as db
172
154173 def table(name, *args, **kwargs):
155174 return db.Table(self.table_prefix + name, self.metadata,
156175 *args, **kwargs)
157176 self.tickets = table('tickets',
158 db.Column('ticket_id', db.Integer, primary_key=True),
159 db.Column('record_hash', db.String(40), unique=True),
160 db.Column('level', db.Integer),
161 db.Column('channel', db.String(120)),
162 db.Column('location', db.String(512)),
163 db.Column('module', db.String(256)),
164 db.Column('last_occurrence_time', db.DateTime),
165 db.Column('occurrence_count', db.Integer),
166 db.Column('solved', db.Boolean),
167 db.Column('app_id', db.String(80))
168 )
177 db.Column('ticket_id', db.Integer,
178 primary_key=True),
179 db.Column('record_hash', db.String(40),
180 unique=True),
181 db.Column('level', db.Integer),
182 db.Column('channel', db.String(120)),
183 db.Column('location', db.String(512)),
184 db.Column('module', db.String(256)),
185 db.Column('last_occurrence_time', db.DateTime),
186 db.Column('occurrence_count', db.Integer),
187 db.Column('solved', db.Boolean),
188 db.Column('app_id', db.String(80)))
169189 self.occurrences = table('occurrences',
170 db.Column('occurrence_id', db.Integer, primary_key=True),
171 db.Column('ticket_id', db.Integer,
172 db.ForeignKey(self.table_prefix + 'tickets.ticket_id')),
173 db.Column('time', db.DateTime),
174 db.Column('data', db.Text),
175 db.Column('app_id', db.String(80))
176 )
190 db.Column('occurrence_id',
191 db.Integer, primary_key=True),
192 db.Column('ticket_id', db.Integer,
193 db.ForeignKey(self.table_prefix +
194 'tickets.ticket_id')),
195 db.Column('time', db.DateTime),
196 db.Column('data', db.Text),
197 db.Column('app_id', db.String(80)))
177198
178199 def _order(self, q, table, order_by):
179200 if order_by[0] == '-':
182203
183204 def record_ticket(self, record, data, hash, app_id):
184205 """Records a log record as ticket."""
185 cnx = self.engine.connect()
186 trans = cnx.begin()
206 # Can use the session instead engine.connection and transaction
207 s = self.session
187208 try:
188209 q = self.tickets.select(self.tickets.c.record_hash == hash)
189 row = cnx.execute(q).fetchone()
210 row = s.execute(q).fetchone()
190211 if row is None:
191 row = cnx.execute(self.tickets.insert().values(
212 row = s.execute(self.tickets.insert().values(
192213 record_hash=hash,
193214 level=record.level,
194215 channel=record.channel or u(''),
201222 ticket_id = row.inserted_primary_key[0]
202223 else:
203224 ticket_id = row['ticket_id']
204 cnx.execute(self.occurrences.insert()
205 .values(ticket_id=ticket_id,
206 time=record.time,
207 app_id=app_id,
208 data=json.dumps(data)))
209 cnx.execute(self.tickets.update()
225 s.execute(self.occurrences.insert()
226 .values(ticket_id=ticket_id,
227 time=record.time,
228 app_id=app_id,
229 data=json.dumps(data)))
230 s.execute(
231 self.tickets.update()
210232 .where(self.tickets.c.ticket_id == ticket_id)
211233 .values(occurrence_count=self.tickets.c.occurrence_count + 1,
212234 last_occurrence_time=record.time,
213235 solved=False))
214 trans.commit()
236 s.commit()
215237 except Exception:
216 trans.rollback()
238 s.rollback()
217239 raise
218 cnx.close()
240 # Closes the session and removes it from the pool
241 s.remove()
219242
220243 def count_tickets(self):
221244 """Returns the number of tickets."""
222245 return self.engine.execute(self.tickets.count()).fetchone()[0]
223246
224 def get_tickets(self, order_by='-last_occurrence_time', limit=50, offset=0):
247 def get_tickets(self, order_by='-last_occurrence_time', limit=50,
248 offset=0):
225249 """Selects tickets from the database."""
226250 return [Ticket(self, row) for row in self.engine.execute(
227251 self._order(self.tickets.select(), self.tickets, order_by)
230254 def solve_ticket(self, ticket_id):
231255 """Marks a ticket as solved."""
232256 self.engine.execute(self.tickets.update()
233 .where(self.tickets.c.ticket_id == ticket_id)
234 .values(solved=True))
257 .where(self.tickets.c.ticket_id == ticket_id)
258 .values(solved=True))
235259
236260 def delete_ticket(self, ticket_id):
237261 """Deletes a ticket from the database."""
238262 self.engine.execute(self.occurrences.delete()
239 .where(self.occurrences.c.ticket_id == ticket_id))
263 .where(self.occurrences.c.ticket_id == ticket_id))
240264 self.engine.execute(self.tickets.delete()
241 .where(self.tickets.c.ticket_id == ticket_id))
265 .where(self.tickets.c.ticket_id == ticket_id))
242266
243267 def get_ticket(self, ticket_id):
244268 """Return a single ticket with all occurrences."""
250274 def get_occurrences(self, ticket, order_by='-time', limit=50, offset=0):
251275 """Selects occurrences from the database for a ticket."""
252276 return [Occurrence(self, row) for row in
253 self.engine.execute(self._order(self.occurrences.select()
277 self.engine.execute(self._order(
278 self.occurrences.select()
254279 .where(self.occurrences.c.ticket_id == ticket),
255280 self.occurrences, order_by)
256281 .limit(limit).offset(offset)).fetchall()]
272297 self.ticket_id = row['ticket_id']
273298 self.occurrence_id = row['_id']
274299
275 #TODO: Update connection setup once PYTHON-160 is solved.
300 # TODO: Update connection setup once PYTHON-160 is solved.
276301 def setup_backend(self):
277 import pymongo
278302 from pymongo import ASCENDING, DESCENDING
279303 from pymongo.connection import Connection
280304
312336 self.database = database
313337
314338 # setup correct indexes
315 database.tickets.ensure_index([('record_hash', ASCENDING)], unique=True)
316 database.tickets.ensure_index([('solved', ASCENDING), ('level', ASCENDING)])
339 database.tickets.ensure_index([('record_hash', ASCENDING)],
340 unique=True)
341 database.tickets.ensure_index([('solved', ASCENDING),
342 ('level', ASCENDING)])
317343 database.occurrences.ensure_index([('time', DESCENDING)])
318344
319345 def _order(self, q, order_by):
336362 'record_hash': hash,
337363 'level': record.level,
338364 'channel': record.channel or u(''),
339 'location': u('%s:%d') % (record.filename, record.lineno),
365 'location': u('%s:%d') % (record.filename,
366 record.lineno),
340367 'module': record.module or u('<unknown>'),
341368 'occurrence_count': 0,
342369 'solved': False,
348375
349376 db.tickets.update({'_id': ticket_id}, {
350377 '$inc': {
351 'occurrence_count': 1
378 'occurrence_count': 1
352379 },
353380 '$set': {
354381 'last_occurrence_time': record.time,
368395 """Returns the number of tickets."""
369396 return self.database.tickets.count()
370397
371 def get_tickets(self, order_by='-last_occurrence_time', limit=50, offset=0):
398 def get_tickets(self, order_by='-last_occurrence_time', limit=50,
399 offset=0):
372400 """Selects tickets from the database."""
373 query = self._order(self.database.tickets.find(), order_by) \
374 .limit(limit).skip(offset)
401 query = (self._order(self.database.tickets.find(), order_by)
402 .limit(limit).skip(offset))
375403 return [self._FixedTicketClass(self, obj) for obj in query]
376404
377405 def solve_ticket(self, ticket_id):
433461 used to keep multiple application setups apart when logging
434462 into the same database.
435463 :param hash_salt: an optional salt (binary string) for the hashes.
436 :param backend: A backend class that implements the proper database handling.
464 :param backend: A backend class that implements the proper database
465 handling.
437466 Backends available are: :class:`SQLAlchemyBackend`,
438467 :class:`MongoDBBackend`.
439468 """
0 from contextlib import contextmanager
1 import functools
2 import sys
3 import threading
4
5 from .base import Logger
6 from .helpers import string_types
7 from logbook import debug as logbook_debug
8
9
10 class _SlowContextNotifier(object):
11
12 def __init__(self, threshold, logger_func, args, kwargs):
13 self.logger_func = logger_func
14 self.args = args
15 self.kwargs = kwargs or {}
16 self.evt = threading.Event()
17 self.threshold = threshold
18 self.thread = threading.Thread(target=self._notifier)
19
20 def _notifier(self):
21 self.evt.wait(timeout=self.threshold)
22 if not self.evt.is_set():
23 self.logger_func(*self.args, **self.kwargs)
24
25 def __enter__(self):
26 self.thread.start()
27 return self
28
29 def __exit__(self, *_):
30 self.evt.set()
31 self.thread.join()
32
33
34 def logged_if_slow(message, threshold=1, func=logbook_debug, args=None,
35 kwargs=None):
36 """Logs a message (by default using the global debug logger) if a certain
37 context containing a set of operations is too slow
38
39 >>> with logged_if_slow('too slow!'):
40 ... ...
41 """
42 full_args = (message, ) if args is None else (message, ) + tuple(args)
43 return _SlowContextNotifier(threshold, func, full_args, kwargs)
44
45
46 class _Local(threading.local):
47 enabled = True
48
49 _local = _Local()
50
51
52 @contextmanager
53 def suppressed_deprecations():
54 """Disables deprecation messages temporarily
55
56 >>> with suppressed_deprecations():
57 ... call_some_deprecated_logic()
58 """
59 prev_enabled = _local.enabled
60 _local.enabled = False
61 try:
62 yield
63 finally:
64 _local.enabled = prev_enabled
65
66
67 _deprecation_logger = Logger("deprecation")
68 _deprecation_locations = set()
69
70
71 def forget_deprecation_locations():
72 _deprecation_locations.clear()
73
74
75 def _write_deprecations_if_needed(message, frame_correction):
76 if not _local.enabled:
77 return
78 caller_location = _get_caller_location(frame_correction=frame_correction+1)
79 if caller_location not in _deprecation_locations:
80 _deprecation_logger.warning(message, frame_correction=frame_correction+1)
81 _deprecation_locations.add(caller_location)
82
83
84 def log_deprecation_message(message, frame_correction=0):
85 _write_deprecations_if_needed("Deprecation message: {0}".format(message), frame_correction=frame_correction+1)
86
87
88 class _DeprecatedFunction(object):
89
90 def __init__(self, func, message, obj=None, objtype=None):
91 super(_DeprecatedFunction, self).__init__()
92 self._func = func
93 self._message = message
94 self._obj = obj
95 self._objtype = objtype
96
97 def _get_underlying_func(self):
98 returned = self._func
99 if isinstance(returned, classmethod):
100 if hasattr(returned, '__func__'):
101 returned = returned.__func__
102 else:
103 returned = returned.__get__(self._objtype).__func__
104 return returned
105
106 def __call__(self, *args, **kwargs):
107 func = self._get_underlying_func()
108 warning = "{0} is deprecated.".format(self._get_func_str())
109 if self._message is not None:
110 warning += " {0}".format(self._message)
111 _write_deprecations_if_needed(warning, frame_correction=+1)
112 if self._obj is not None:
113 return func(self._obj, *args, **kwargs)
114 elif self._objtype is not None:
115 return func(self._objtype, *args, **kwargs)
116 return func(*args, **kwargs)
117
118 def _get_func_str(self):
119 func = self._get_underlying_func()
120 if self._objtype is not None:
121 return '{0}.{1}'.format(self._objtype.__name__, func.__name__)
122 return '{0}.{1}'.format(func.__module__, func.__name__)
123
124 def __get__(self, obj, objtype):
125 return self.bound_to(obj, objtype)
126
127 def bound_to(self, obj, objtype):
128 return _DeprecatedFunction(self._func, self._message, obj=obj,
129 objtype=objtype)
130
131 @property
132 def __name__(self):
133 return self._get_underlying_func().__name__
134
135 @property
136 def __doc__(self):
137 returned = self._get_underlying_func().__doc__
138 if returned: # pylint: disable=no-member
139 returned += "\n.. deprecated\n" # pylint: disable=no-member
140 if self._message:
141 returned += " {0}".format(
142 self._message) # pylint: disable=no-member
143 return returned
144
145 @__doc__.setter
146 def __doc__(self, doc):
147 self._get_underlying_func().__doc__ = doc
148
149
150 def deprecated(func=None, message=None):
151 """Marks the specified function as deprecated, and emits a warning when
152 it's called.
153
154 >>> @deprecated(message='No longer supported')
155 ... def deprecated_func():
156 ... pass
157
158 This will cause a warning log to be emitted when the function gets called,
159 with the correct filename/lineno
160 """
161 if isinstance(func, string_types):
162 assert message is None
163 message = func
164 func = None
165
166 if func is None:
167 return functools.partial(deprecated, message=message)
168
169 return _DeprecatedFunction(func, message)
170
171
172 def _get_caller_location(frame_correction):
173 frame = sys._getframe(frame_correction + 1) # pylint: disable=protected-access
174 try:
175 return (frame.f_code.co_name, frame.f_lineno)
176 finally:
177 del frame
2626 match = re.search('^Version\s+(.*)', line.strip())
2727 if match is None:
2828 continue
29 length = len(match.group(1))
3029 version = match.group(1).strip()
3130 if lineiter.next().count('-') != len(match.group(0)):
3231 continue
6059
6160 def set_filename_version(filename, version_number, pattern):
6261 changed = []
62
6363 def inject_version(match):
6464 before, old, after = match.groups()
6565 changed.append(True)
7575 f.write(contents)
7676
7777
78 def set_init_version(version):
79 info('Setting __init__.py version to %s', version)
80 set_filename_version('logbook/__init__.py', version, '__version__')
81
82
83 def set_setup_version(version):
84 info('Setting setup.py version to %s', version)
85 set_filename_version('setup.py', version, 'version')
86
87 def set_doc_version(version):
88 info('Setting docs/conf.py version to %s', version)
89 set_filename_version('docs/conf.py', version, 'version')
90 set_filename_version('docs/conf.py', version, 'release')
91
92
93 def build_and_upload():
94 Popen([sys.executable, 'setup.py', 'release', 'sdist', 'upload']).wait()
78 def set_version(version):
79 info('Setting version to %s', version)
80 with open('logbook/__version__.py', 'w') as f:
81 f.write('__version__ = {!r}'.format(version))
9582
9683
9784 def fail(message, *args):
10491
10592
10693 def get_git_tags():
107 return set(Popen(['git', 'tag'], stdout=PIPE).communicate()[0].splitlines())
94 return set(Popen(['git', 'tag'],
95 stdout=PIPE).communicate()[0].splitlines())
10896
10997
11098 def git_is_clean():
122110
123111
124112 parser = argparse.ArgumentParser("%prog [options]")
125 parser.add_argument("--no-upload", dest="upload", action="store_false", default=True)
113 parser.add_argument("--no-upload", dest="upload",
114 action="store_false", default=True)
115
126116
127117 def main():
128118 args = parser.parse_args()
143133 if version in tags:
144134 fail('Version "%s" is already tagged', version)
145135 if release_date.date() != date.today():
146 fail('Release date is not today (%s != %s)' % (release_date.date(), date.today()))
136 fail('Release date is not today (%s != %s)' %
137 (release_date.date(), date.today()))
147138
148139 if not git_is_clean():
149140 fail('You have uncommitted changes in git')
150141
151 set_init_version(version)
152 set_setup_version(version)
153 set_doc_version(version)
142 set_version(version)
154143 make_git_commit('Bump version number to %s', version)
155144 make_git_tag(version)
156 if args.upload:
157 build_and_upload()
158 set_init_version(dev_version)
159 set_setup_version(dev_version)
160 set_doc_version(dev_version)
145 set_version(dev_version)
161146 make_git_commit('Bump version number to %s', dev_version)
162147
163148
00 #! /usr/bin/python
1 import subprocess
2 import os
1 import pip
32 import sys
4
5 def _execute(*args, **kwargs):
6 result = subprocess.call(*args, **kwargs)
7 if result != 0:
8 sys.exit(result)
93
104 if __name__ == '__main__':
115 python_version = sys.version_info
2317 else:
2418 deps.append("Jinja2")
2519 print("Setting up dependencies...")
26 _execute([os.path.join(os.path.dirname(sys.executable), "pip"), "install"] + deps, shell=False)
20 result = pip.main(["install"] + deps)
21 sys.exit(result)
1212 print("PyPy+Cython configuration skipped")
1313 else:
1414 sys.exit(
15 subprocess.call("make cybuild test" if use_cython else "make test", shell=True)
15 subprocess.call(
16 "make cybuild test" if use_cython else "make test", shell=True)
1617 )
44
55 [upload_docs]
66 upload-dir = docs/_build/html
7
8 [egg_info]
9 tag_date = true
10
11 [aliases]
12 release = egg_info -RDb ''
5555 import sys
5656 from setuptools import setup, Extension, Feature
5757 from distutils.command.build_ext import build_ext
58 from distutils.errors import CCompilerError, DistutilsExecError, \
59 DistutilsPlatformError
58 from distutils.errors import (
59 CCompilerError, DistutilsExecError, DistutilsPlatformError)
6060
6161
6262 extra = {}
9191
9292 cmdclass['build_ext'] = ve_build_ext
9393 # Don't try to compile the extension if we're running on PyPy
94 if os.path.isfile('logbook/_speedups.c') and not hasattr(sys, "pypy_translation_info"):
94 if (os.path.isfile('logbook/_speedups.c') and
95 not hasattr(sys, "pypy_translation_info")):
9596 speedups = Feature('optional C speed-enhancement module', standard=True,
9697 ext_modules=[Extension('logbook._speedups',
9798 ['logbook/_speedups.c'])])
9899 else:
99100 speedups = None
101
102
103 with open(os.path.join(os.path.dirname(__file__), "logbook", "__version__.py")) as version_file:
104 exec(version_file.read()) # pylint: disable=W0122
100105
101106
102107 def run_setup(with_binary):
105110 features['speedups'] = speedups
106111 setup(
107112 name='Logbook',
108 version='0.10.0',
113 version=__version__,
109114 license='BSD',
110115 url='http://logbook.pocoo.org/',
111116 author='Armin Ronacher, Georg Brandl',
116121 zip_safe=False,
117122 platforms='any',
118123 cmdclass=cmdclass,
124 classifiers=[
125 "Programming Language :: Python :: 2.6",
126 "Programming Language :: Python :: 2.7",
127 "Programming Language :: Python :: 3.2",
128 "Programming Language :: Python :: 3.3",
129 "Programming Language :: Python :: 3.4",
130 "Programming Language :: Python :: 3.5",
131 ],
119132 features=features,
120133 install_requires=[
121 'six>=1.4.0',
122134 ],
123135 **extra
124136 )
11 import pytest
22
33 logbook.StderrHandler().push_application()
4
45
56 @pytest.fixture
67 def logger():
9091
9192 try:
9293 import gevent
93
94 except ImportError:
95 pass
96 else:
9497 @pytest.fixture(scope="module", autouse=True, params=[False, True])
9598 def gevent(request):
9699 module_name = getattr(request.module, '__name__', '')
97 if not any(s in module_name for s in ('queues', 'processors')) and request.param:
100 if (not any(s in module_name for s in ('queues', 'processors'))
101 and request.param):
98102 from logbook.concurrency import enable_gevent, _disable_gevent
99103 enable_gevent()
100104
101105 @request.addfinalizer
102106 def fin():
103107 _disable_gevent()
104 except ImportError:
105 pass
2727 def test_deadlock_in_emit():
2828 logbook_logger = logbook.Logger("logbook")
2929 obj = MyObject(logbook_logger.info)
30 stream_handler = logbook.StreamHandler(stream=sys.stderr, level=logbook.DEBUG)
30 stream_handler = logbook.StreamHandler(stream=sys.stderr,
31 level=logbook.DEBUG)
3132 stream_handler.lock = FakeLock()
3233 with stream_handler.applicationbound():
3334 logbook_logger.info("format this: {}", obj)
88
99
1010 def test_file_handler(logfile, activation_strategy, logger):
11 handler = logbook.FileHandler(logfile,
12 format_string='{record.level_name}:{record.channel}:'
13 '{record.message}',)
11 handler = logbook.FileHandler(
12 logfile,
13 format_string='{record.level_name}:{record.channel}:{record.message}',)
1414 with activation_strategy(handler):
1515 logger.warn('warning message')
1616 handler.close()
2626
2727
2828 def test_file_handler_delay(logfile, activation_strategy, logger):
29 handler = logbook.FileHandler(logfile,
30 format_string='{record.level_name}:{record.channel}:'
31 '{record.message}', delay=True)
29 handler = logbook.FileHandler(
30 logfile,
31 format_string='{record.level_name}:{record.channel}:{record.message}',
32 delay=True)
3233 assert (not os.path.isfile(logfile))
3334 with activation_strategy(handler):
3435 logger.warn('warning message')
4041
4142 def test_monitoring_file_handler(logfile, activation_strategy, logger):
4243 if os.name == 'nt':
43 pytest.skip('unsupported on windows due to different IO (also unneeded)')
44 handler = logbook.MonitoringFileHandler(logfile,
45 format_string='{record.level_name}:{record.channel}:'
46 '{record.message}', delay=True)
44 pytest.skip(
45 'unsupported on windows due to different IO (also unneeded)')
46 handler = logbook.MonitoringFileHandler(
47 logfile,
48 format_string='{record.level_name}:{record.channel}:{record.message}',
49 delay=True)
4750 with activation_strategy(handler):
4851 logger.warn('warning message')
4952 os.rename(logfile, logfile + '.old')
7982 if x.startswith(basename)]
8083 files.sort()
8184
82 assert files == [basename, basename + '.1', basename + '.2', basename + '.3']
85