diff --git a/.appveyor/after_test.bat b/.appveyor/after_test.bat new file mode 100644 index 0000000..79efa01 --- /dev/null +++ b/.appveyor/after_test.bat @@ -0,0 +1,6 @@ +IF DEFINED CYBUILD ( + %WITH_COMPILER% python setup.py bdist_wheel + IF "%APPVEYOR_REPO_TAG%"=="true" ( + twine upload -u %PYPI_USERNAME% -p %PYPI_PASSWORD% dist\*.whl + ) +) \ No newline at end of file diff --git a/.appveyor/install.ps1 b/.appveyor/install.ps1 new file mode 100644 index 0000000..94d6f01 --- /dev/null +++ b/.appveyor/install.ps1 @@ -0,0 +1,229 @@ +# Sample script to install Python and pip under Windows +# Authors: Olivier Grisel, Jonathan Helmus, Kyle Kastner, and Alex Willmer +# License: CC0 1.0 Universal: http://creativecommons.org/publicdomain/zero/1.0/ + +$MINICONDA_URL = "http://repo.continuum.io/miniconda/" +$BASE_URL = "https://www.python.org/ftp/python/" +$GET_PIP_URL = "https://bootstrap.pypa.io/get-pip.py" +$GET_PIP_PATH = "C:\get-pip.py" + +$PYTHON_PRERELEASE_REGEX = @" +(?x) +(?\d+) +\. +(?\d+) +\. +(?\d+) +(?[a-z]{1,2}\d+) +"@ + + +function Download ($filename, $url) { + $webclient = New-Object System.Net.WebClient + + $basedir = $pwd.Path + "\" + $filepath = $basedir + $filename + if (Test-Path $filename) { + Write-Host "Reusing" $filepath + return $filepath + } + + # Download and retry up to 3 times in case of network transient errors. + Write-Host "Downloading" $filename "from" $url + $retry_attempts = 2 + for ($i = 0; $i -lt $retry_attempts; $i++) { + try { + $webclient.DownloadFile($url, $filepath) + break + } + Catch [Exception]{ + Start-Sleep 1 + } + } + if (Test-Path $filepath) { + Write-Host "File saved at" $filepath + } else { + # Retry once to get the error message if any at the last try + $webclient.DownloadFile($url, $filepath) + } + return $filepath +} + + +function ParsePythonVersion ($python_version) { + if ($python_version -match $PYTHON_PRERELEASE_REGEX) { + return ([int]$matches.major, [int]$matches.minor, [int]$matches.micro, + $matches.prerelease) + } + $version_obj = [version]$python_version + return ($version_obj.major, $version_obj.minor, $version_obj.build, "") +} + + +function DownloadPython ($python_version, $platform_suffix) { + $major, $minor, $micro, $prerelease = ParsePythonVersion $python_version + + if (($major -le 2 -and $micro -eq 0) ` + -or ($major -eq 3 -and $minor -le 2 -and $micro -eq 0) ` + ) { + $dir = "$major.$minor" + $python_version = "$major.$minor$prerelease" + } else { + $dir = "$major.$minor.$micro" + } + + if ($prerelease) { + if (($major -le 2) ` + -or ($major -eq 3 -and $minor -eq 1) ` + -or ($major -eq 3 -and $minor -eq 2) ` + -or ($major -eq 3 -and $minor -eq 3) ` + ) { + $dir = "$dir/prev" + } + } + + if (($major -le 2) -or ($major -le 3 -and $minor -le 4)) { + $ext = "msi" + if ($platform_suffix) { + $platform_suffix = ".$platform_suffix" + } + } else { + $ext = "exe" + if ($platform_suffix) { + $platform_suffix = "-$platform_suffix" + } + } + + $filename = "python-$python_version$platform_suffix.$ext" + $url = "$BASE_URL$dir/$filename" + $filepath = Download $filename $url + return $filepath +} + + +function InstallPython ($python_version, $architecture, $python_home) { + Write-Host "Installing Python" $python_version "for" $architecture "bit architecture to" $python_home + if (Test-Path $python_home) { + Write-Host $python_home "already exists, skipping." + return $false + } + if ($architecture -eq "32") { + $platform_suffix = "" + } else { + $platform_suffix = "amd64" + } + $installer_path = DownloadPython $python_version $platform_suffix + $installer_ext = [System.IO.Path]::GetExtension($installer_path) + Write-Host "Installing $installer_path to $python_home" + $install_log = $python_home + ".log" + if ($installer_ext -eq '.msi') { + InstallPythonMSI $installer_path $python_home $install_log + } else { + InstallPythonEXE $installer_path $python_home $install_log + } + if (Test-Path $python_home) { + Write-Host "Python $python_version ($architecture) installation complete" + } else { + Write-Host "Failed to install Python in $python_home" + Get-Content -Path $install_log + Exit 1 + } +} + + +function InstallPythonEXE ($exepath, $python_home, $install_log) { + $install_args = "/quiet InstallAllUsers=1 TargetDir=$python_home" + RunCommand $exepath $install_args +} + + +function InstallPythonMSI ($msipath, $python_home, $install_log) { + $install_args = "/qn /log $install_log /i $msipath TARGETDIR=$python_home" + $uninstall_args = "/qn /x $msipath" + RunCommand "msiexec.exe" $install_args + if (-not(Test-Path $python_home)) { + Write-Host "Python seems to be installed else-where, reinstalling." + RunCommand "msiexec.exe" $uninstall_args + RunCommand "msiexec.exe" $install_args + } +} + +function RunCommand ($command, $command_args) { + Write-Host $command $command_args + Start-Process -FilePath $command -ArgumentList $command_args -Wait -Passthru +} + + +function InstallPip ($python_home) { + $pip_path = $python_home + "\Scripts\pip.exe" + $python_path = $python_home + "\python.exe" + if (-not(Test-Path $pip_path)) { + Write-Host "Installing pip..." + $webclient = New-Object System.Net.WebClient + $webclient.DownloadFile($GET_PIP_URL, $GET_PIP_PATH) + Write-Host "Executing:" $python_path $GET_PIP_PATH + & $python_path $GET_PIP_PATH + } else { + Write-Host "pip already installed." + } +} + + +function DownloadMiniconda ($python_version, $platform_suffix) { + if ($python_version -eq "3.4") { + $filename = "Miniconda3-3.5.5-Windows-" + $platform_suffix + ".exe" + } else { + $filename = "Miniconda-3.5.5-Windows-" + $platform_suffix + ".exe" + } + $url = $MINICONDA_URL + $filename + $filepath = Download $filename $url + return $filepath +} + + +function InstallMiniconda ($python_version, $architecture, $python_home) { + Write-Host "Installing Python" $python_version "for" $architecture "bit architecture to" $python_home + if (Test-Path $python_home) { + Write-Host $python_home "already exists, skipping." + return $false + } + if ($architecture -eq "32") { + $platform_suffix = "x86" + } else { + $platform_suffix = "x86_64" + } + $filepath = DownloadMiniconda $python_version $platform_suffix + Write-Host "Installing" $filepath "to" $python_home + $install_log = $python_home + ".log" + $args = "/S /D=$python_home" + Write-Host $filepath $args + Start-Process -FilePath $filepath -ArgumentList $args -Wait -Passthru + if (Test-Path $python_home) { + Write-Host "Python $python_version ($architecture) installation complete" + } else { + Write-Host "Failed to install Python in $python_home" + Get-Content -Path $install_log + Exit 1 + } +} + + +function InstallMinicondaPip ($python_home) { + $pip_path = $python_home + "\Scripts\pip.exe" + $conda_path = $python_home + "\Scripts\conda.exe" + if (-not(Test-Path $pip_path)) { + Write-Host "Installing pip..." + $args = "install --yes pip" + Write-Host $conda_path $args + Start-Process -FilePath "$conda_path" -ArgumentList $args -Wait -Passthru + } else { + Write-Host "pip already installed." + } +} + +function main () { + InstallPython $env:PYTHON_VERSION $env:PYTHON_ARCH $env:PYTHON + InstallPip $env:PYTHON +} + +main \ No newline at end of file diff --git a/.appveyor/prepare.bat b/.appveyor/prepare.bat new file mode 100644 index 0000000..be1491f --- /dev/null +++ b/.appveyor/prepare.bat @@ -0,0 +1,13 @@ +pip install wheel +nuget install redis-64 -excludeversion +redis-64\redis-server.exe --service-install +redis-64\redis-server.exe --service-start +nuget install ZeroMQ +%WITH_COMPILER% pip install cython pyzmq +python scripts\test_setup.py +python setup.py develop +IF DEFINED CYBUILD ( + cython logbook\_speedups.pyx + %WITH_COMPILER% python setup.py build + pip install twine +) diff --git a/.appveyor/run_with_compiler.cmd b/.appveyor/run_with_compiler.cmd new file mode 100644 index 0000000..d549afe --- /dev/null +++ b/.appveyor/run_with_compiler.cmd @@ -0,0 +1,88 @@ +:: To build extensions for 64 bit Python 3, we need to configure environment +:: variables to use the MSVC 2010 C++ compilers from GRMSDKX_EN_DVD.iso of: +:: MS Windows SDK for Windows 7 and .NET Framework 4 (SDK v7.1) +:: +:: To build extensions for 64 bit Python 2, we need to configure environment +:: variables to use the MSVC 2008 C++ compilers from GRMSDKX_EN_DVD.iso of: +:: MS Windows SDK for Windows 7 and .NET Framework 3.5 (SDK v7.0) +:: +:: 32 bit builds, and 64-bit builds for 3.5 and beyond, do not require specific +:: environment configurations. +:: +:: Note: this script needs to be run with the /E:ON and /V:ON flags for the +:: cmd interpreter, at least for (SDK v7.0) +:: +:: More details at: +:: https://github.com/cython/cython/wiki/64BitCythonExtensionsOnWindows +:: http://stackoverflow.com/a/13751649/163740 +:: +:: Author: Olivier Grisel +:: License: CC0 1.0 Universal: http://creativecommons.org/publicdomain/zero/1.0/ +:: +:: Notes about batch files for Python people: +:: +:: Quotes in values are literally part of the values: +:: SET FOO="bar" +:: FOO is now five characters long: " b a r " +:: If you don't want quotes, don't include them on the right-hand side. +:: +:: The CALL lines at the end of this file look redundant, but if you move them +:: outside of the IF clauses, they do not run properly in the SET_SDK_64==Y +:: case, I don't know why. +@ECHO OFF + +SET COMMAND_TO_RUN=%* +SET WIN_SDK_ROOT=C:\Program Files\Microsoft SDKs\Windows +SET WIN_WDK=c:\Program Files (x86)\Windows Kits\10\Include\wdf + +:: Extract the major and minor versions, and allow for the minor version to be +:: more than 9. This requires the version number to have two dots in it. +SET MAJOR_PYTHON_VERSION=%PYTHON_VERSION:~0,1% +IF "%PYTHON_VERSION:~3,1%" == "." ( + SET MINOR_PYTHON_VERSION=%PYTHON_VERSION:~2,1% +) ELSE ( + SET MINOR_PYTHON_VERSION=%PYTHON_VERSION:~2,2% +) + +:: Based on the Python version, determine what SDK version to use, and whether +:: to set the SDK for 64-bit. +IF %MAJOR_PYTHON_VERSION% == 2 ( + SET WINDOWS_SDK_VERSION="v7.0" + SET SET_SDK_64=Y +) ELSE ( + IF %MAJOR_PYTHON_VERSION% == 3 ( + SET WINDOWS_SDK_VERSION="v7.1" + IF %MINOR_PYTHON_VERSION% LEQ 4 ( + SET SET_SDK_64=Y + ) ELSE ( + SET SET_SDK_64=N + IF EXIST "%WIN_WDK%" ( + :: See: https://connect.microsoft.com/VisualStudio/feedback/details/1610302/ + REN "%WIN_WDK%" 0wdf + ) + ) + ) ELSE ( + ECHO Unsupported Python version: "%MAJOR_PYTHON_VERSION%" + EXIT 1 + ) +) + +IF %PYTHON_ARCH% == 64 ( + IF %SET_SDK_64% == Y ( + ECHO Configuring Windows SDK %WINDOWS_SDK_VERSION% for Python %MAJOR_PYTHON_VERSION% on a 64 bit architecture + SET DISTUTILS_USE_SDK=1 + SET MSSdk=1 + "%WIN_SDK_ROOT%\%WINDOWS_SDK_VERSION%\Setup\WindowsSdkVer.exe" -q -version:%WINDOWS_SDK_VERSION% + "%WIN_SDK_ROOT%\%WINDOWS_SDK_VERSION%\Bin\SetEnv.cmd" /x64 /release + ECHO Executing: %COMMAND_TO_RUN% + call %COMMAND_TO_RUN% || EXIT 1 + ) ELSE ( + ECHO Using default MSVC build environment for 64 bit architecture + ECHO Executing: %COMMAND_TO_RUN% + call %COMMAND_TO_RUN% || EXIT 1 + ) +) ELSE ( + ECHO Using default MSVC build environment for 32 bit architecture + ECHO Executing: %COMMAND_TO_RUN% + call %COMMAND_TO_RUN% || EXIT 1 +) \ No newline at end of file diff --git a/.gitignore b/.gitignore index 9dbead5..11732d0 100644 --- a/.gitignore +++ b/.gitignore @@ -13,3 +13,4 @@ build .vagrant flycheck-* +.cache diff --git a/.travis.yml b/.travis.yml index 45ab6fa..43efd39 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,49 +1,53 @@ language: python - services: - - redis-server - +- redis-server python: - - "2.6" - - "2.7" - - "3.2" - - "3.3" - - "3.4" - - "pypy" - - "pypy3" - +- '2.6' +- '2.7' +- '3.2' +- '3.3' +- '3.4' +- '3.5' +- pypy +- pypy3 install: - # this fixes SemLock issues on travis - - "sudo rm -rf /dev/shm && sudo ln -s /run/shm /dev/shm" - - "sudo apt-add-repository -y ppa:chris-lea/zeromq" - - "sudo apt-get update" - - "sudo apt-get install -y libzmq3-dev" - - "pip install cython redis" - - "easy_install pyzmq" - - "make test_setup" - - "python setup.py develop" - +- sudo rm -rf /dev/shm && sudo ln -s /run/shm /dev/shm +- sudo apt-add-repository -y ppa:chris-lea/zeromq +- sudo apt-get update +- sudo apt-get install -y libzmq3-dev +- pip install cython redis +- easy_install pyzmq +- make test_setup +- python setup.py develop env: - - COMMAND="make test" - - COMMAND="make cybuild test" - -script: "$COMMAND" - +- COMMAND="make test" +- COMMAND="make cybuild test" +script: $COMMAND matrix: exclude: - - python: "pypy" - env: COMMAND="make cybuild test" - - python: "pypy3" - env: COMMAND="make cybuild test" - + - python: pypy + env: COMMAND="make cybuild test" + - python: pypy3 + env: COMMAND="make cybuild test" notifications: - email: + email: recipients: - - vmalloc@gmail.com + - vmalloc@gmail.com irc: channels: - - "chat.freenode.net#pocoo" + - chat.freenode.net#pocoo on_success: change on_failure: always use_notice: true skip_join: true +before_deploy: + - make logbook/_speedups.so +deploy: + provider: pypi + user: vmalloc + password: + secure: WFmuAbtBDIkeZArIFQRCwyO1TdvF2PaZpo75r3mFgnY+aWm75cdgjZKoNqVprF/f+v9EsX2kDdQ7ZfuhMLgP8MNziB+ty7579ZDGwh64jGoi+DIoeblAFu5xNAqjvhie540uCE8KySk9s+Pq5EpOA5w18V4zxTw+h6tnBQ0M9cQ= + on: + tags: true + repo: getlogbook/logbook + distributions: "sdist bdist_egg" diff --git a/AUTHORS b/AUTHORS index 4fb772c..46b5378 100644 --- a/AUTHORS +++ b/AUTHORS @@ -15,3 +15,6 @@ - Roman Valls Guimera - Guillermo Carrasco Hernández - Raphaël Vinot +- Rotem Yaari +- Frazer McLean + diff --git a/CHANGES b/CHANGES index 5a2abed..9f75b26 100644 --- a/CHANGES +++ b/CHANGES @@ -2,6 +2,50 @@ ================= Here you can see the full list of changes between each Logbook release. + +Version 0.12.0 +-------------- + +Released on November 24th 2015 + +- Added logbook.utils.deprecated to automatically emit warnings when certain functions are called (Thanks Ayala Shachar) +- Added logbook.utils.suppressed_deprecations context to temporarily suppress deprecations (Thanks Ayala Shachar) +- Added logbook.utils.logged_if_slow_context to emit logs when certain operations exceed a time threshold (Thanks Ayala Shachar) +- Many PEP8 fixes and code cleanups (thanks Taranjeet Singh and Frazer McLean) +- TestHandler constructor now receives an optional `force_heavy_init=True`, forcing all records to heavy-initialize + + +Version 0.11.3 +-------------- + +Released on November 5th 2015 + +- Windows-specific fixes and CI configuration (Thanks Frazer McLean) +- Several Unicode-specific fixes (Thanks Frazer McLean) +- Documentation cleanups + +Version 0.11.2 +-------------- + +Released on September 29th 2015 + +- Fix importing issue with SQLAlchemy ticketing handler + +Version 0.11.0 +-------------- + +Released on September 29th 2015 + +- Added TRACE log level for enabling logs below DEBUG +- Improved SQLAlchemy session management (thanks @fintan) +- Removed the ``bubble`` argument from NullHandler, preventing many future confusions + +Version 0.10.1 +-------------- + +Released on August 4th 2015 + +- Small bugfix supporting exc_info=False without breaking formatting Version 0.10.0 -------------- diff --git a/Makefile b/Makefile index c9258b9..d5b503f 100644 --- a/Makefile +++ b/Makefile @@ -9,7 +9,7 @@ @python scripts/test_setup.py test: - @py.test tests + @py.test -r s tests toxtest: @tox @@ -21,13 +21,10 @@ bench: @python benchmark/run.py -upload-docs: docs - python setup.py upload_docs - docs: make -C docs html SPHINXOPTS=-Aonline=1 -release: logbook/_speedups.so upload-docs +release: logbook/_speedups.so python scripts/make-release.py logbook/_speedups.so: logbook/_speedups.pyx diff --git a/README.md b/README.md new file mode 100644 index 0000000..a241711 --- /dev/null +++ b/README.md @@ -0,0 +1,25 @@ +# Welcome to Logbook + +| | | +|--------------------|-----------------------------| +| Travis | [![Build Status][ti]][tl] | +| AppVeyor | [![Build Status][ai]][al] | +| Supported Versions | ![Supported Versions][vi] | +| Downloads | ![Downloads][di] | +| Latest Version | [![Latest Version][pi]][pl] | + + +Logbook is a nice logging replacement. + +It should be easy to setup, use and configure and support web applications :) + +For more information: http://logbook.readthedocs.org + +[ti]: https://secure.travis-ci.org/getlogbook/logbook.svg?branch=master +[tl]: https://travis-ci.org/getlogbook/logbook +[ai]: https://ci.appveyor.com/api/projects/status/quu99exa26e06npp?svg=true +[vi]: https://img.shields.io/pypi/pyversions/logbook.svg +[di]: https://img.shields.io/pypi/dm/logbook.svg +[al]: https://ci.appveyor.com/project/vmalloc/logbook +[pi]: https://img.shields.io/pypi/v/logbook.svg +[pl]: https://pypi.python.org/pypi/Logbook diff --git a/README.rst b/README.rst deleted file mode 100644 index 4853f10..0000000 --- a/README.rst +++ /dev/null @@ -1,17 +0,0 @@ -Welcome to Logbook -================== - -.. image:: https://secure.travis-ci.org/mitsuhiko/logbook.png - :target: https://travis-ci.org/mitsuhiko/logbook - -.. image:: https://pypip.in/d/Logbook/badge.png - :target: https://crate.io/packages/Logbook - -.. image:: https://pypip.in/v/Logbook/badge.png - :target: https://crate.io/packages/Logbook - -Logbook is a nice logging replacement. - -It should be easy to setup, use and configure and support web applications :) - -For more information look at http://pythonhosted.org/Logbook diff --git a/appveyor.yml b/appveyor.yml new file mode 100644 index 0000000..c50b6b6 --- /dev/null +++ b/appveyor.yml @@ -0,0 +1,168 @@ +cache: + - C:\Users\appveyor\AppData\Local\pip\Cache\wheels + +environment: + global: + # SDK v7.0 MSVC Express 2008's SetEnv.cmd script will fail if the + # /E:ON and /V:ON options are not enabled in the batch script intepreter + # See: http://stackoverflow.com/a/13751649/163740 + WITH_COMPILER: "cmd /E:ON /V:ON /C .\\.appveyor\\run_with_compiler.cmd" + PYPI_USERNAME: + secure: ixvjwUN/HsSfGkU3OvtQ8Q== + PYPI_PASSWORD: + secure: KOr+oEHZJmo1el3bT+ivmQ== + + matrix: + # Python 2.6.6 is the latest Python 2.6 with a Windows installer + # See: https://github.com/ogrisel/python-appveyor-demo/issues/10 + + - PYTHON: "C:\\Python266" + PYTHON_VERSION: "2.6.6" + PYTHON_ARCH: "32" + + - PYTHON: "C:\\Python266" + PYTHON_VERSION: "2.6.6" + PYTHON_ARCH: "32" + CYBUILD: "TRUE" + + - PYTHON: "C:\\Python266-x64" + PYTHON_VERSION: "2.6.6" + PYTHON_ARCH: "64" + + - PYTHON: "C:\\Python266-x64" + PYTHON_VERSION: "2.6.6" + PYTHON_ARCH: "64" + CYBUILD: "TRUE" + + # Pre-installed Python versions, which Appveyor may upgrade to + # a later point release. + # See: http://www.appveyor.com/docs/installed-software#python + + - PYTHON: "C:\\Python27" + PYTHON_VERSION: "2.7.x" + PYTHON_ARCH: "32" + + - PYTHON: "C:\\Python27" + PYTHON_VERSION: "2.7.x" + PYTHON_ARCH: "32" + CYBUILD: "TRUE" + + - PYTHON: "C:\\Python27-x64" + PYTHON_VERSION: "2.7.x" + PYTHON_ARCH: "64" + + - PYTHON: "C:\\Python27-x64" + PYTHON_VERSION: "2.7.x" + PYTHON_ARCH: "64" + CYBUILD: "TRUE" + + # Python 3.2 isn't preinstalled + + - PYTHON: "C:\\Python325" + PYTHON_VERSION: "3.2.5" + PYTHON_ARCH: "32" + + - PYTHON: "C:\\Python325" + PYTHON_VERSION: "3.2.5" + PYTHON_ARCH: "32" + CYBUILD: "TRUE" + + - PYTHON: "C:\\Python325-x64" + PYTHON_VERSION: "3.2.5" + PYTHON_ARCH: "64" + + - PYTHON: "C:\\Python325-x64" + PYTHON_VERSION: "3.2.5" + PYTHON_ARCH: "64" + CYBUILD: "TRUE" + + # Pre-installed Python versions, which Appveyor may upgrade to + # a later point release. + # See: http://www.appveyor.com/docs/installed-software#python + + - PYTHON: "C:\\Python33" + PYTHON_VERSION: "3.3.x" + PYTHON_ARCH: "32" + + - PYTHON: "C:\\Python33" + PYTHON_VERSION: "3.3.x" + PYTHON_ARCH: "32" + CYBUILD: "TRUE" + + - PYTHON: "C:\\Python33-x64" + PYTHON_VERSION: "3.3.x" + PYTHON_ARCH: "64" + + - PYTHON: "C:\\Python33-x64" + PYTHON_VERSION: "3.3.x" + PYTHON_ARCH: "64" + CYBUILD: "TRUE" + + - PYTHON: "C:\\Python34" + PYTHON_VERSION: "3.4.x" + PYTHON_ARCH: "32" + + - PYTHON: "C:\\Python34" + PYTHON_VERSION: "3.4.x" + PYTHON_ARCH: "32" + CYBUILD: "TRUE" + + - PYTHON: "C:\\Python34-x64" + PYTHON_VERSION: "3.4.x" + PYTHON_ARCH: "64" + + - PYTHON: "C:\\Python34-x64" + PYTHON_VERSION: "3.4.x" + PYTHON_ARCH: "64" + CYBUILD: "TRUE" + + - PYTHON: "C:\\Python35" + PYTHON_VERSION: "3.5.x" + PYTHON_ARCH: "32" + + - PYTHON: "C:\\Python35" + PYTHON_VERSION: "3.5.x" + PYTHON_ARCH: "32" + CYBUILD: "TRUE" + + - PYTHON: "C:\\Python35-x64" + PYTHON_VERSION: "3.5.x" + PYTHON_ARCH: "64" + + - PYTHON: "C:\\Python35-x64" + PYTHON_VERSION: "3.5.x" + PYTHON_ARCH: "64" + CYBUILD: "TRUE" + + +init: + - echo %PYTHON% %PYTHON_VERSION% %PYTHON_ARCH% + - set PATH=%PYTHON%;%PYTHON%\Scripts;%PATH% + +install: + - powershell .appveyor\\install.ps1 + - ".appveyor\\prepare.bat" + - ps: if (Test-Path Env:\CYBUILD) {Copy-Item build\*\logbook\*.pyd logbook\} + +build: off + +test_script: + - py.test -r s tests + +after_test: + - ".appveyor\\after_test.bat" + +artifacts: + # Archive the generated packages in the ci.appveyor.com build report. + - path: dist\* + +deploy: + description: '' + provider: GitHub + auth_token: + secure: 0yLUo/V+wwSvSFk9nBW/77RN9iTjJA1B5p/TM1XgVLPPFEZWkH756jyJ0FOmtJPt + artifact: /.*\.whl/ + draft: true + prerelease: false + on: + appveyor_repo_tag: true diff --git a/benchmark/bench_stack_manipulation.py b/benchmark/bench_stack_manipulation.py index 54613cd..0f546ea 100644 --- a/benchmark/bench_stack_manipulation.py +++ b/benchmark/bench_stack_manipulation.py @@ -1,6 +1,6 @@ """Tests basic stack manipulation performance""" from logbook import Handler, NullHandler, StreamHandler, FileHandler, \ - ERROR, WARNING + ERROR, WARNING from tempfile import NamedTemporaryFile from cStringIO import StringIO diff --git a/benchmark/run.py b/benchmark/run.py index f946ac2..4163c61 100644 --- a/benchmark/run.py +++ b/benchmark/run.py @@ -40,7 +40,8 @@ def bench_wrapper(use_gevent=False): print '=' * 80 - print 'Running benchmark with Logbook %s (gevent enabled=%s)' % (version, use_gevent) + print 'Running benchmark with Logbook %s (gevent enabled=%s)' % \ + (version, use_gevent) print '-' * 80 os.chdir(bench_directory) for bench in list_benchmarks(): diff --git a/docs/api/queues.rst b/docs/api/queues.rst index 313f9b9..3e961b9 100644 --- a/docs/api/queues.rst +++ b/docs/api/queues.rst @@ -18,10 +18,13 @@ :members: :inherited-members: -Redis ------ +AMQP Message Queues +------------------- -.. autoclass:: RedisHandler +.. autoclass:: MessageQueueHandler + :members: + +.. autoclass:: MessageQueueSubscriber :members: MultiProcessing diff --git a/docs/api/utilities.rst b/docs/api/utilities.rst index d210b9a..17ae09b 100644 --- a/docs/api/utilities.rst +++ b/docs/api/utilities.rst @@ -1,5 +1,8 @@ Utilities ========= + +Misc. Utilities +--------------- This documents general purpose utility functions available in Logbook. @@ -26,3 +29,18 @@ .. autofunction:: log .. autofunction:: set_datetime_format + +Slow Operations Logging +----------------------- + +.. module:: logbook.utils +.. autofunction:: logged_if_slow_context + + +Deprecations +------------ + +.. autofunction:: deprecated + +.. autofunction:: suppressed_deprecations + diff --git a/docs/compat.rst b/docs/compat.rst index 00cec10..f611bf9 100644 --- a/docs/compat.rst +++ b/docs/compat.rst @@ -19,12 +19,14 @@ This also means you don't have to call :func:`logging.basicConfig`: ->>> from logbook.compat import redirect_logging +>>> from logbook.compat import redirect_logging, StreamHandler +>>> import sys +>>> StreamHandler(sys.stdout).push_application() >>> redirect_logging() >>> from logging import getLogger >>> log = getLogger('My Logger') >>> log.warn('This is a warning') -[2010-07-25 00:24] WARNING: My Logger: This is a warning +[2015-10-05 19:13:37.524346] WARNING: My Logger: This is a warning Advanced Setup -------------- @@ -35,7 +37,9 @@ active logbook handlers. This handler can then be added to specific logging loggers if you want: ->>> from logging import getLogger +>>> from logging import getLogger, StreamHandler +>>> import sys +>>> StreamHandler(sys.stdout).push_application() >>> mylog = getLogger('My Log') >>> from logbook.compat import RedirectLoggingHandler >>> mylog.addHandler(RedirectLoggingHandler()) @@ -43,7 +47,7 @@ >>> otherlog.warn('logging is deprecated') No handlers could be found for logger "Other Log" >>> mylog.warn('but logbook is awesome') -[2010-07-25 00:29] WARNING: My Log: but logbook is awesome +[2015-10-05 19:13:37.524346] WARNING: My Log: but logbook is awesome Reverse Redirects ----------------- diff --git a/docs/conf.py b/docs/conf.py index 409cdd1..77530e5 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -3,7 +3,8 @@ # Logbook documentation build configuration file, created by # sphinx-quickstart on Fri Jul 23 16:54:49 2010. # -# This file is execfile()d with the current directory set to its containing dir. +# This file is execfile()d with the current directory set to its containing +# dir. # # Note that not all possible configuration values are present in this # autogenerated file. @@ -11,20 +12,21 @@ # All configuration values have a default; values that are commented out # serve to show the default. -import sys, os +import sys +import os # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. sys.path.extend((os.path.abspath('.'), os.path.abspath('..'))) -# -- General configuration ----------------------------------------------------- +# -- General configuration ---------------------------------------------------- # If your documentation needs a minimal Sphinx version, state it here. -#needs_sphinx = '1.0' - -# Add any Sphinx extension module names here, as strings. They can be extensions -# coming with Sphinx (named 'sphinx.ext.*') or your custom ones. +# needs_sphinx = '1.0' + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones. extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx'] # Add any paths that contain templates here, relative to this directory. @@ -34,7 +36,7 @@ source_suffix = '.rst' # The encoding of source files. -#source_encoding = 'utf-8-sig' +# source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' @@ -48,46 +50,47 @@ # built documents. # # The short X.Y version. -version = '0.10.0' -# The full version, including alpha/beta/rc tags. -release = '0.10.0' +with open(os.path.join(os.path.dirname(__file__), "..", "logbook", "__version__.py")) as version_file: + # can't use import here... + version = release = version_file.read().strip().split("=")[1].strip()[1:-1] # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. -#language = None +# language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: -#today = '' +# today = '' # Else, today_fmt is used as the format for a strftime call. -#today_fmt = '%B %d, %Y' +# today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = ['_build'] -# The reST default role (used for this markup: `text`) to use for all documents. -#default_role = None +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. -#add_function_parentheses = True +# add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). -#add_module_names = True +# add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. -#show_authors = False +# show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. -#modindex_common_prefix = [] - - -# -- Options for HTML output --------------------------------------------------- +# modindex_common_prefix = [] + + +# -- Options for HTML output -------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. @@ -112,105 +115,106 @@ # The name of an image file (relative to this directory) to place at the top # of the sidebar. -#html_logo = None +# html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. -#html_favicon = None +# html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". -#html_static_path = ['_static'] +# html_static_path = ['_static'] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. -#html_last_updated_fmt = '%b %d, %Y' +# html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. -#html_use_smartypants = True +# html_use_smartypants = True # Custom sidebar templates, maps document names to template names. -#html_sidebars = {} +# html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. -#html_additional_pages = {} +# html_additional_pages = {} # If false, no module index is generated. -#html_domain_indices = True +# html_domain_indices = True # If false, no index is generated. -#html_use_index = True +# html_use_index = True # If true, the index is split into individual pages for each letter. -#html_split_index = False +# html_split_index = False # If true, links to the reST sources are added to the pages. -#html_show_sourcelink = True +# html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -#html_show_sphinx = True +# html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -#html_show_copyright = True +# html_show_copyright = True html_add_permalinks = False # If true, an OpenSearch description file will be output, and all pages will # contain a tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. -#html_use_opensearch = '' +# html_use_opensearch = '' # If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml"). -#html_file_suffix = '' +# html_file_suffix = '' # Output file base name for HTML help builder. htmlhelp_basename = 'Logbookdoc' -# -- Options for LaTeX output -------------------------------------------------- +# -- Options for LaTeX output ------------------------------------------------- # The paper size ('letter' or 'a4'). -#latex_paper_size = 'letter' +# latex_paper_size = 'letter' # The font size ('10pt', '11pt' or '12pt'). -#latex_font_size = '10pt' +# latex_font_size = '10pt' # Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, author, documentclass [howto/manual]). +# (source start file, target name, title, author, +# documentclass [howto/manual]). latex_documents = [ - ('index', 'Logbook.tex', u'Logbook Documentation', - u'Armin Ronacher, Georg Brandl', 'manual'), + ('index', 'Logbook.tex', u'Logbook Documentation', + u'Armin Ronacher, Georg Brandl', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. -#latex_logo = None +# latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. -#latex_use_parts = False +# latex_use_parts = False # If true, show page references after internal links. -#latex_show_pagerefs = False +# latex_show_pagerefs = False # If true, show URL addresses after external links. -#latex_show_urls = False +# latex_show_urls = False # Additional stuff for the LaTeX preamble. -#latex_preamble = '' +# latex_preamble = '' # Documents to append as an appendix to all manuals. -#latex_appendices = [] +# latex_appendices = [] # If false, no module index is generated. -#latex_domain_indices = True - - -# -- Options for manual page output -------------------------------------------- +# latex_domain_indices = True + + +# -- Options for manual page output ------------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). diff --git a/docs/designexplained.rst b/docs/designexplained.rst index bd4fe36..305272d 100644 --- a/docs/designexplained.rst +++ b/docs/designexplained.rst @@ -51,10 +51,11 @@ default dispatching can be triggered from a function :func:`~logbook.base.dispatch_record`: ->>> from logbook import dispatch_record, LogRecord, INFO +>>> from logbook import dispatch_record, LogRecord, INFO, StreamHandler +>>> import sys >>> record = LogRecord('My channel', INFO, 'Hello World!') >>> dispatch_record(record) -[2010-09-04 15:56] INFO: My channel: Hello World! +[2015-10-05 19:18:52.211472] INFO: My channel: Hello World! It is pretty common for log records to be created without a dispatcher. Here some common use cases for log records without a dispatcher: diff --git a/docs/features.rst b/docs/features.rst index ccec1e1..d44ab1a 100644 --- a/docs/features.rst +++ b/docs/features.rst @@ -107,15 +107,10 @@ This is how easy it is to get started with Logbook:: - from logbook import warn + from logbook import warn, StreamHandler + import sys + StreamHandler(sys.stdout).push_application() warn('This is a warning') - -That will use the default logging channel. But you can create as many as -you like:: - - from logbook import Logger - log = Logger('My Logger') - log.warn('This is a warning') Roadmap ------- diff --git a/docs/index.rst b/docs/index.rst index 5eb9203..70a6081 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -1,14 +1,16 @@ Welcome to Logbook ================== -Logbook is a logging sytem for Python that replaces the standard library's +Logbook is a logging system for Python that replaces the standard library's logging module. It was designed with both complex and simple applications in mind and the idea to make logging fun: ->>> from logbook import Logger +>>> from logbook import Logger, StreamHandler +>>> import sys +>>> StreamHandler(sys.stdout).push_application() >>> log = Logger('Logbook') >>> log.info('Hello, World!') -[2010-07-23 16:34] INFO: Logbook: Hello, World! +[2015-10-05 18:55:56.937141] INFO: Logbook: Hello, World! What makes it fun? What about getting log messages on your phone or desktop notification system? :ref:`Logbook can do that `. @@ -49,5 +51,5 @@ * IRC: ``#pocoo`` on freenode .. _Download from PyPI: http://pypi.python.org/pypi/Logbook -.. _Master repository on GitHub: https://github.com/mitsuhiko/logbook +.. _Master repository on GitHub: https://github.com/getlogbook/logbook .. _Mailing list: http://groups.google.com/group/pocoo-libs diff --git a/docs/quickstart.rst b/docs/quickstart.rst index 3f0c62a..dc914ee 100644 --- a/docs/quickstart.rst +++ b/docs/quickstart.rst @@ -6,10 +6,12 @@ Logbook makes it very easy to get started with logging. Just import the logger class, create yourself a logger and you are set: ->>> from logbook import Logger +>>> from logbook import Logger, StreamHandler +>>> import sys +>>> StreamHandler(sys.stdout).push_application() >>> log = Logger('My Awesome Logger') >>> log.warn('This is too cool for stdlib') -[2010-07-23 16:34:42.687111] WARNING: My Awesome Logger: This is too cool for stdlib +[2015-10-05 19:02:03.575723] WARNING: My Awesome Logger: This is too cool for stdlib A logger is a so-called :class:`~logbook.base.RecordDispatcher`, which is commonly referred to as a "logging channel". The name you give such a channel @@ -109,8 +111,7 @@ error_handler = SyslogHandler('logbook example', level='ERROR', bubble=True) with error_handler.applicationbound(): # whatever is executed here and an error is logged to the - # error handler but it will also bubble up to the default - # stderr handler. + # error handler but it will also bubble up other handles. ... So what if you want to only log errors to the syslog and nothing to diff --git a/docs/sheet/layout.html b/docs/sheet/layout.html index 013f9ff..e777773 100644 --- a/docs/sheet/layout.html +++ b/docs/sheet/layout.html @@ -19,7 +19,7 @@ {% block footer %} {% if online %} - + Fork me on GitHub diff --git a/logbook/__init__.py b/logbook/__init__.py index acb94f2..86a1a2c 100644 --- a/logbook/__init__.py +++ b/logbook/__init__.py @@ -11,24 +11,26 @@ """ import os -from logbook.base import LogRecord, Logger, LoggerGroup, NestedSetup, \ - Processor, Flags, get_level_name, lookup_level, dispatch_record, \ - CRITICAL, ERROR, WARNING, NOTICE, INFO, DEBUG, NOTSET, \ - set_datetime_format -from logbook.handlers import Handler, StreamHandler, FileHandler, \ - MonitoringFileHandler, StderrHandler, RotatingFileHandler, \ - TimedRotatingFileHandler, TestHandler, MailHandler, GMailHandler, SyslogHandler, \ - NullHandler, NTEventLogHandler, create_syshandler, StringFormatter, \ - StringFormatterHandlerMixin, HashingHandlerMixin, \ - LimitingHandlerMixin, WrapperHandler, FingersCrossedHandler, \ - GroupHandler +from .base import ( + LogRecord, Logger, LoggerGroup, NestedSetup, Processor, Flags, + get_level_name, lookup_level, dispatch_record, CRITICAL, ERROR, WARNING, + NOTICE, INFO, DEBUG, TRACE, NOTSET, set_datetime_format) +from .handlers import ( + Handler, StreamHandler, FileHandler, MonitoringFileHandler, StderrHandler, + RotatingFileHandler, TimedRotatingFileHandler, TestHandler, MailHandler, + GMailHandler, SyslogHandler, NullHandler, NTEventLogHandler, + create_syshandler, StringFormatter, StringFormatterHandlerMixin, + HashingHandlerMixin, LimitingHandlerMixin, WrapperHandler, + FingersCrossedHandler, GroupHandler) +from . import compat -__version__ = '0.10.0' +__version__ = '0.11.4-dev' # create an anonymous default logger and provide all important # methods of that logger as global functions _default_logger = Logger('Generic') _default_logger.suppress_dispatcher = True +trace = _default_logger.trace debug = _default_logger.debug info = _default_logger.info warn = _default_logger.warn diff --git a/logbook/__version__.py b/logbook/__version__.py new file mode 100644 index 0000000..8e1395b --- /dev/null +++ b/logbook/__version__.py @@ -0,0 +1 @@ +__version__ = "0.12.3" diff --git a/logbook/_fallback.py b/logbook/_fallback.py index a5f1631..9f7540e 100644 --- a/logbook/_fallback.py +++ b/logbook/_fallback.py @@ -10,9 +10,9 @@ """ from itertools import count from logbook.helpers import get_iterator_next_method -from logbook.concurrency import (thread_get_ident, greenlet_get_ident, - thread_local, greenlet_local, - ThreadLock, GreenletRLock, is_gevent_enabled) +from logbook.concurrency import ( + thread_get_ident, greenlet_get_ident, thread_local, greenlet_local, + ThreadLock, GreenletRLock, is_gevent_enabled) _missing = object() _MAX_CONTEXT_OBJECT_CACHE = 256 @@ -30,8 +30,10 @@ if self.group is None: return default return getattr(self.group, name) + def _set(self, value): setattr(self, '_' + name, value) + def _del(self): delattr(self, '_' + name) return property(_get, _set, _del) @@ -149,7 +151,8 @@ def push_greenlet(self, obj): self._greenlet_context_lock.acquire() try: - self._cache.pop(greenlet_get_ident(), None) # remote chance to conflict with thread ids + # remote chance to conflict with thread ids + self._cache.pop(greenlet_get_ident(), None) item = (self._stackop(), obj) stack = getattr(self._greenlet_context, 'stack', None) if stack is None: @@ -162,7 +165,8 @@ def pop_greenlet(self): self._greenlet_context_lock.acquire() try: - self._cache.pop(greenlet_get_ident(), None) # remote chance to conflict with thread ids + # remote chance to conflict with thread ids + self._cache.pop(greenlet_get_ident(), None) stack = getattr(self._greenlet_context, 'stack', None) assert stack, 'no objects on stack' return stack.pop()[1] diff --git a/logbook/_termcolors.py b/logbook/_termcolors.py index d17489a..0554197 100644 --- a/logbook/_termcolors.py +++ b/logbook/_termcolors.py @@ -12,11 +12,11 @@ esc = "\x1b[" codes = {} -codes[""] = "" -codes["reset"] = esc + "39;49;00m" +codes[""] = "" +codes["reset"] = esc + "39;49;00m" -dark_colors = ["black", "darkred", "darkgreen", "brown", "darkblue", - "purple", "teal", "lightgray"] +dark_colors = ["black", "darkred", "darkgreen", "brown", "darkblue", + "purple", "teal", "lightgray"] light_colors = ["darkgray", "red", "green", "yellow", "blue", "fuchsia", "turquoise", "white"] @@ -28,9 +28,9 @@ del d, l, x -codes["darkteal"] = codes["turquoise"] +codes["darkteal"] = codes["turquoise"] codes["darkyellow"] = codes["brown"] -codes["fuscia"] = codes["fuchsia"] +codes["fuscia"] = codes["fuchsia"] def _str_to_type(obj, strtype): @@ -42,5 +42,5 @@ def colorize(color_key, text): """Returns an ANSI formatted text with the given color.""" - return _str_to_type(codes[color_key], text) + text + \ - _str_to_type(codes["reset"], text) + return (_str_to_type(codes[color_key], text) + text + + _str_to_type(codes["reset"], text)) diff --git a/logbook/base.py b/logbook/base.py index 474473a..419c559 100644 --- a/logbook/base.py +++ b/logbook/base.py @@ -14,31 +14,37 @@ from itertools import chain from weakref import ref as weakref from datetime import datetime -from logbook import helpers -from logbook.concurrency import thread_get_name, thread_get_ident, greenlet_get_ident - -from logbook.helpers import to_safe_json, parse_iso8601, cached_property, \ - PY2, u, string_types, iteritems, integer_types +from logbook.concurrency import ( + thread_get_name, thread_get_ident, greenlet_get_ident) + +from logbook.helpers import ( + to_safe_json, parse_iso8601, cached_property, PY2, u, string_types, + iteritems, integer_types, xrange) try: - from logbook._speedups import group_reflected_property, \ - ContextStackManager, StackedObject + from logbook._speedups import ( + group_reflected_property, ContextStackManager, StackedObject) except ImportError: - from logbook._fallback import group_reflected_property, \ - ContextStackManager, StackedObject + from logbook._fallback import ( + group_reflected_property, ContextStackManager, StackedObject) _datetime_factory = datetime.utcnow + + def set_datetime_format(datetime_format): """ Set the format for the datetime objects created, which are then made available as the :py:attr:`LogRecord.time` attribute of :py:class:`LogRecord` instances. - :param datetime_format: Indicates how to generate datetime objects. Possible values are: + :param datetime_format: Indicates how to generate datetime objects. + Possible values are: "utc" - :py:attr:`LogRecord.time` will be a datetime in UTC time zone (but not time zone aware) + :py:attr:`LogRecord.time` will be a datetime in UTC time zone + (but not time zone aware) "local" - :py:attr:`LogRecord.time` will be a datetime in local time zone (but not time zone aware) + :py:attr:`LogRecord.time` will be a datetime in local time zone + (but not time zone aware) This function defaults to creating datetime objects in UTC time, using `datetime.utcnow() @@ -66,15 +72,17 @@ elif datetime_format == "local": _datetime_factory = datetime.now else: - raise ValueError("Invalid value %r. Valid values are 'utc' and 'local'." % (datetime_format,)) + raise ValueError("Invalid value %r. Valid values are 'utc' and " + "'local'." % (datetime_format,)) # make sure to sync these up with _speedups.pyx -CRITICAL = 6 -ERROR = 5 -WARNING = 4 -NOTICE = 3 -INFO = 2 -DEBUG = 1 +CRITICAL = 15 +ERROR = 14 +WARNING = 13 +NOTICE = 12 +INFO = 11 +DEBUG = 10 +TRACE = 9 NOTSET = 0 _level_names = { @@ -84,6 +92,7 @@ NOTICE: 'NOTICE', INFO: 'INFO', DEBUG: 'DEBUG', + TRACE: 'TRACE', NOTSET: 'NOTSET' } _reverse_level_names = dict((v, k) for (k, v) in iteritems(_level_names)) @@ -343,8 +352,8 @@ """ _pullable_information = frozenset(( 'func_name', 'module', 'filename', 'lineno', 'process_name', 'thread', - 'thread_name', 'greenlet', 'formatted_exception', 'message', 'exception_name', - 'exception_message' + 'thread_name', 'greenlet', 'formatted_exception', 'message', + 'exception_name', 'exception_message' )) _noned_on_close = frozenset(('exc_info', 'frame', 'calling_frame')) @@ -369,7 +378,8 @@ information_pulled = False def __init__(self, channel, level, msg, args=None, kwargs=None, - exc_info=None, extra=None, frame=None, dispatcher=None, frame_correction=0): + exc_info=None, extra=None, frame=None, dispatcher=None, + frame_correction=0): #: the name of the logger that created it or any other textual #: channel description. This is a descriptive name and can be #: used for filtering. @@ -385,8 +395,12 @@ #: optional exception information. If set, this is a tuple in the #: form ``(exc_type, exc_value, tb)`` as returned by #: :func:`sys.exc_info`. - #: This parameter can also be ``True``, which would cause the exception info tuple - #: to be fetched for you. + #: This parameter can also be ``True``, which would cause the exception + #: info tuple to be fetched for you. + if not exc_info: + # this is a special case where exc_info=False can be passed in + # theory, and it should be the same as exc_info=None + exc_info = None self.exc_info = exc_info #: optional extra information as dictionary. This is the place #: where custom log processors can attach custom context sensitive @@ -508,14 +522,17 @@ return self.msg try: try: - return self._format_message(self.msg, *self.args, **self.kwargs) + return self._format_message(self.msg, *self.args, + **self.kwargs) except UnicodeDecodeError: # Assume an unicode message but mixed-up args msg = self.msg.encode('utf-8', 'replace') return self._format_message(msg, *self.args, **self.kwargs) except (UnicodeEncodeError, AttributeError): - # we catch AttributeError since if msg is bytes, it won't have the 'format' method - if sys.exc_info()[0] is AttributeError and (PY2 or not isinstance(self.msg, bytes)): + # we catch AttributeError since if msg is bytes, + # it won't have the 'format' method + if (sys.exc_info()[0] is AttributeError + and (PY2 or not isinstance(self.msg, bytes))): # this is not the case we thought it is... raise # Assume encoded message with unicode args. @@ -532,9 +549,9 @@ # that. e = sys.exc_info()[1] errormsg = ('Could not format message with provided ' - 'arguments: {err}\n msg={msg!r}\n ' - 'args={args!r} \n kwargs={kwargs!r}.\n' - 'Happened in file {file}, line {lineno}').format( + 'arguments: {err}\n msg={msg!r}\n ' + 'args={args!r} \n kwargs={kwargs!r}.\n' + 'Happened in file {file}, line {lineno}').format( err=e, msg=self.msg, args=self.args, kwargs=self.kwargs, file=self.filename, lineno=self.lineno @@ -555,7 +572,7 @@ while frm is not None and frm.f_globals is globs: frm = frm.f_back - for _ in helpers.xrange(self.frame_correction): + for _ in xrange(self.frame_correction): frm = frm.f_back return frm @@ -604,8 +621,8 @@ @cached_property def greenlet(self): """The ident of the greenlet. This is evaluated late and means that - if the log record is passed to another greenlet, :meth:`pull_information` - was called in the old greenlet. + if the log record is passed to another greenlet, + :meth:`pull_information` was called in the old greenlet. """ return greenlet_get_ident() @@ -698,6 +715,13 @@ #: The name of the minimium logging level required for records to be #: created. level_name = level_name_property() + + def trace(self, *args, **kwargs): + """Logs a :class:`~logbook.LogRecord` with the level set + to :data:`~logbook.TRACE`. + """ + if not self.disabled and TRACE >= self.level: + self._log(TRACE, args, kwargs) def debug(self, *args, **kwargs): """Logs a :class:`~logbook.LogRecord` with the level set @@ -884,7 +908,8 @@ continue # first case of blackhole (without filter). - # this should discard all further processing and we don't have to heavy_init to know that... + # this should discard all further processing and + # we don't have to heavy_init to know that... if handler.filter is None and handler.blackhole: break @@ -897,21 +922,19 @@ self.process_record(record) record_initialized = True - # a filter can still veto the handling of the record. This # however is already operating on an initialized and processed # record. The impact is that filters are slower than the # handler's should_handle function in case there is no default # handler that would handle the record (delayed init). - if handler.filter is not None \ - and not handler.filter(record, handler): + if (handler.filter is not None + and not handler.filter(record, handler)): continue # We might have a filter, so now that we know we *should* handle # this record, we should consider the case of us being a black hole... if handler.blackhole: break - # handle the record. If the record was handled and # the record is not bubbling we can abort now. diff --git a/logbook/compat.py b/logbook/compat.py index 0b42caf..c3896db 100644 --- a/logbook/compat.py +++ b/logbook/compat.py @@ -25,8 +25,9 @@ removes all otherwise registered handlers on root logger of the logging system but leaves the other loggers untouched. - :param set_root_logger_level: controls of the default level of the legacy root logger is changed - so that all legacy log messages get redirected to Logbook + :param set_root_logger_level: controls of the default level of the legacy + root logger is changed so that all legacy log messages get redirected + to Logbook """ del logging.root.handlers[:] logging.root.addHandler(RedirectLoggingHandler()) @@ -108,9 +109,9 @@ """Tries to find the caller that issued the call.""" frm = sys._getframe(2) while frm is not None: - if frm.f_globals is globals() or \ - frm.f_globals is logbook.base.__dict__ or \ - frm.f_globals is logging.__dict__: + if (frm.f_globals is globals() or + frm.f_globals is logbook.base.__dict__ or + frm.f_globals is logging.__dict__): frm = frm.f_back else: return frm @@ -124,11 +125,11 @@ def convert_record(self, old_record): """Converts an old logging record into a logbook log record.""" record = LoggingCompatRecord(old_record.name, - self.convert_level(old_record.levelno), - old_record.msg, old_record.args, - None, old_record.exc_info, - self.find_extra(old_record), - self.find_caller(old_record)) + self.convert_level(old_record.levelno), + old_record.msg, old_record.args, + None, old_record.exc_info, + self.find_extra(old_record), + self.find_caller(old_record)) record.time = self.convert_time(old_record.created) return record diff --git a/logbook/concurrency.py b/logbook/concurrency.py index 80af112..ccf80ad 100644 --- a/logbook/concurrency.py +++ b/logbook/concurrency.py @@ -49,20 +49,24 @@ def __repr__(self): owner = self._owner - return "<%s owner=%r count=%d>" % (self.__class__.__name__, owner, self._count) + return "<%s owner=%r count=%d>" % (self.__class__.__name__, owner, + self._count) def acquire(self, blocking=1): tid = thread_get_ident() gid = greenlet_get_ident() tid_gid = (tid, gid) - if tid_gid == self._owner: # We trust the GIL here so we can do this comparison w/o locking. + + # We trust the GIL here so we can do this comparison w/o locking. + if tid_gid == self._owner: self._count = self._count + 1 return True greenlet_lock = self._get_greenlet_lock() self._wait_queue.append(gid) - # this is a safety in case an exception is raised somewhere and we must make sure we're not in the queue + # this is a safety in case an exception is raised somewhere + # and we must make sure we're not in the queue # otherwise it'll get stuck forever. remove_from_queue_on_return = True try: @@ -74,13 +78,17 @@ # Hurray, we can have the lock. self._owner = tid_gid self._count = 1 - remove_from_queue_on_return = False # don't remove us from the queue + + # don't remove us from the queue + remove_from_queue_on_return = False return True else: - # we already hold the greenlet lock so obviously the owner is not in our thread. + # we already hold the greenlet lock so obviously + # the owner is not in our thread. greenlet_lock.release() if blocking: - gevent.sleep(0.0005) # 500 us -> initial delay of 1 ms + # 500 us -> initial delay of 1 ms + gevent.sleep(0.0005) else: return False finally: @@ -114,11 +122,14 @@ def _is_owned(self): return self._owner == (thread_get_ident(), greenlet_get_ident()) else: - from threading import Lock as ThreadLock, RLock as ThreadRLock, currentThread + from threading import ( + Lock as ThreadLock, RLock as ThreadRLock, currentThread) try: - from thread import get_ident as thread_get_ident, _local as thread_local + from thread import ( + get_ident as thread_get_ident, _local as thread_local) except ImportError: - from _thread import get_ident as thread_get_ident, _local as thread_local + from _thread import ( + get_ident as thread_get_ident, _local as thread_local) def thread_get_name(): return currentThread().getName() @@ -140,6 +151,7 @@ def __exit__(self, t, v, tb): pass + def new_fine_grained_lock(): global use_gevent if use_gevent: diff --git a/logbook/handlers.py b/logbook/handlers.py index 76a9e6c..821e85f 100644 --- a/logbook/handlers.py +++ b/logbook/handlers.py @@ -8,6 +8,7 @@ :copyright: (c) 2010 by Armin Ronacher, Georg Brandl. :license: BSD, see LICENSE for more details. """ +import io import os import re import sys @@ -21,58 +22,60 @@ import traceback from datetime import datetime, timedelta from collections import deque - -from six import add_metaclass - -from logbook.base import CRITICAL, ERROR, WARNING, NOTICE, INFO, DEBUG, \ - NOTSET, level_name_property, _missing, lookup_level, \ - Flags, ContextObject, ContextStackManager -from logbook.helpers import rename, b, _is_text_stream, is_unicode, PY2, \ - zip, xrange, string_types, integer_types, reraise, u +from textwrap import dedent + +from logbook.base import ( + CRITICAL, ERROR, WARNING, NOTICE, INFO, DEBUG, NOTSET, level_name_property, + _missing, lookup_level, Flags, ContextObject, ContextStackManager) +from logbook.helpers import ( + rename, b, _is_text_stream, is_unicode, PY2, zip, xrange, string_types, + integer_types, reraise, u, with_metaclass) from logbook.concurrency import new_fine_grained_lock -DEFAULT_FORMAT_STRING = ( - u('[{record.time:%Y-%m-%d %H:%M:%S.%f}] ') + - u('{record.level_name}: {record.channel}: {record.message}') -) +DEFAULT_FORMAT_STRING = u( + '[{record.time:%Y-%m-%d %H:%M:%S.%f}] ' + '{record.level_name}: {record.channel}: {record.message}') + SYSLOG_FORMAT_STRING = u('{record.channel}: {record.message}') -NTLOG_FORMAT_STRING = u('''\ -Message Level: {record.level_name} -Location: {record.filename}:{record.lineno} -Module: {record.module} -Function: {record.func_name} -Exact Time: {record.time:%Y-%m-%d %H:%M:%S} - -Event provided Message: - -{record.message} -''') -TEST_FORMAT_STRING = \ -u('[{record.level_name}] {record.channel}: {record.message}') -MAIL_FORMAT_STRING = u('''\ -Subject: {handler.subject} - -Message type: {record.level_name} -Location: {record.filename}:{record.lineno} -Module: {record.module} -Function: {record.func_name} -Time: {record.time:%Y-%m-%d %H:%M:%S} - -Message: - -{record.message} -''') -MAIL_RELATED_FORMAT_STRING = u('''\ -Message type: {record.level_name} -Location: {record.filename}:{record.lineno} -Module: {record.module} -Function: {record.func_name} -{record.message} -''') +NTLOG_FORMAT_STRING = dedent(u(''' + Message Level: {record.level_name} + Location: {record.filename}:{record.lineno} + Module: {record.module} + Function: {record.func_name} + Exact Time: {record.time:%Y-%m-%d %H:%M:%S} + + Event provided Message: + + {record.message} + ''')).lstrip() + +TEST_FORMAT_STRING = u('[{record.level_name}] {record.channel}: {record.message}') +MAIL_FORMAT_STRING = dedent(u(''' + Subject: {handler.subject} + + Message type: {record.level_name} + Location: {record.filename}:{record.lineno} + Module: {record.module} + Function: {record.func_name} + Time: {record.time:%Y-%m-%d %H:%M:%S} + + Message: + + {record.message} + ''')).lstrip() + +MAIL_RELATED_FORMAT_STRING = dedent(u(''' + Message type: {record.level_name} + Location: {record.filename}:{record.lineno} + Module: {record.module} + Function: {record.func_name} + {record.message} + ''')).lstrip() SYSLOG_PORT = 514 REGTYPE = type(re.compile("I'm a regular expression!")) + def create_syshandler(application_name, level=NOTSET): """Creates the handler the operating system provides. On Unix systems @@ -94,8 +97,8 @@ def __new__(cls, name, bases, d): # aha, that thing has a custom close method. We will need a magic # __del__ for it to be called on cleanup. - if bases != (ContextObject,) and 'close' in d and '__del__' not in d \ - and not any(hasattr(x, '__del__') for x in bases): + if (bases != (ContextObject,) and 'close' in d and '__del__' not in d + and not any(hasattr(x, '__del__') for x in bases)): def _magic_del(self): try: self.close() @@ -107,8 +110,7 @@ return type.__new__(cls, name, bases, d) -@add_metaclass(_HandlerType) -class Handler(ContextObject): +class Handler(with_metaclass(_HandlerType), ContextObject): """Handler instances dispatch logging events to specific destinations. The base handler class. Acts as a placeholder which defines the Handler @@ -117,8 +119,9 @@ the 'raw' message as determined by record.message is logged. To bind a handler you can use the :meth:`push_application`, - :meth:`push_thread` or :meth:`push_greenlet` methods. This will push the handler on a stack of - handlers. To undo this, use the :meth:`pop_application`, + :meth:`push_thread` or :meth:`push_greenlet` methods. + This will push the handler on a stack of handlers. + To undo this, use the :meth:`pop_application`, :meth:`pop_thread` methods and :meth:`pop_greenlet`:: handler = MyHandler() @@ -307,6 +310,10 @@ """ blackhole = True + def __init__(self, level=NOTSET, filter=None): + super(NullHandler, self).__init__(level=level, filter=filter, + bubble=False) + class WrapperHandler(Handler): """A class that can wrap another handler and redirect all calls to the @@ -438,6 +445,7 @@ _NUMBER_TYPES = integer_types + (float,) + class LimitingHandlerMixin(HashingHandlerMixin): """Mixin class for handlers that want to limit emitting records. @@ -484,12 +492,12 @@ first_count = last_count old_count = suppression_count - if not suppression_count and \ - len(self._record_limits) >= self.max_record_cache: + if (not suppression_count and + len(self._record_limits) >= self.max_record_cache): cache_items = self._record_limits.items() cache_items.sort() - del cache_items[:int(self._record_limits) \ - * self.record_cache_prune] + del cache_items[:int(self._record_limits) + * self.record_cache_prune] self._record_limits = dict(cache_items) self._record_limits[hash] = (first_count, old_count + 1) @@ -555,8 +563,8 @@ """Encodes the message to the stream encoding.""" stream = self.stream rv = msg + '\n' - if (PY2 and is_unicode(rv)) or \ - not (PY2 or is_unicode(rv) or _is_text_stream(stream)): + if ((PY2 and is_unicode(rv)) or + not (PY2 or is_unicode(rv) or _is_text_stream(stream))): enc = self.encoding if enc is None: enc = getattr(stream, 'encoding', None) or 'utf-8' @@ -603,11 +611,11 @@ def _open(self, mode=None): if mode is None: mode = self._mode - self.stream = open(self._filename, mode) + self.stream = io.open(self._filename, mode, encoding=self.encoding) def write(self, item): self.ensure_stream_is_open() - if not PY2 and isinstance(item, bytes): + if isinstance(item, bytes): self.stream.buffer.write(item) else: self.stream.write(item) @@ -723,8 +731,8 @@ format_string, delay, filter, bubble) self.max_size = max_size self.backup_count = backup_count - assert backup_count > 0, 'at least one backup file has to be ' \ - 'specified' + assert backup_count > 0, ('at least one backup file has to be ' + 'specified') def should_rollover(self, record, bytes): self.stream.seek(0, 2) @@ -787,8 +795,8 @@ self._filename = None def _get_timed_filename(self, datetime): - return datetime.strftime('-' + self.date_format) \ - .join(self._fn_parts) + return (datetime.strftime('-' + self.date_format) + .join(self._fn_parts)) def should_rollover(self, record): fn = self._get_timed_filename(record.time) @@ -806,12 +814,14 @@ files = [] for filename in os.listdir(directory): filename = os.path.join(directory, filename) - if filename.startswith(self._fn_parts[0] + '-') and \ - filename.endswith(self._fn_parts[1]): + if (filename.startswith(self._fn_parts[0] + '-') and + filename.endswith(self._fn_parts[1])): files.append((os.path.getmtime(filename), filename)) files.sort() - return files[:-self.backup_count + 1] if self.backup_count > 1\ - else files[:] + if self.backup_count > 1: + return files[:-self.backup_count + 1] + else: + return files[:] def perform_rollover(self): self.stream.close() @@ -847,13 +857,14 @@ default_format_string = TEST_FORMAT_STRING def __init__(self, level=NOTSET, format_string=None, filter=None, - bubble=False): + bubble=False, force_heavy_init=False): Handler.__init__(self, level, filter, bubble) StringFormatterHandlerMixin.__init__(self, format_string) #: captures the :class:`LogRecord`\s as instances self.records = [] self._formatted_records = [] self._formatted_record_cache = [] + self._force_heavy_init = force_heavy_init def close(self): """Close all records down when the handler is closed.""" @@ -865,14 +876,16 @@ # call to the emit function. If we don't do that, the traceback # attribute and other things will already be removed. record.keep_open = True + if self._force_heavy_init: + record.heavy_init() self.records.append(record) @property def formatted_records(self): """Captures the formatted log records as unicode strings.""" - if len(self._formatted_record_cache) != len(self.records) or \ - any(r1 != r2 for r1, r2 in - zip(self.records, self._formatted_record_cache)): + if (len(self._formatted_record_cache) != len(self.records) or + any(r1 != r2 for r1, r2 in + zip(self.records, self._formatted_record_cache))): self._formatted_records = [self.format(r) for r in self.records] self._formatted_record_cache = list(self.records) return self._formatted_records @@ -1045,13 +1058,14 @@ def _get_related_format_string(self): if isinstance(self.related_formatter, StringFormatter): return self.related_formatter.format_string + def _set_related_format_string(self, value): if value is None: self.related_formatter = None else: self.related_formatter = self.formatter_class(value) related_format_string = property(_get_related_format_string, - _set_related_format_string) + _set_related_format_string) del _get_related_format_string, _set_related_format_string def get_recipients(self, record): @@ -1087,8 +1101,8 @@ body = '\r\n'.join(lineiter) if suppressed: - body += '\r\n\r\nThis message occurred additional %d ' \ - 'time(s) and was suppressed' % suppressed + body += ('\r\n\r\nThis message occurred additional %d ' + 'time(s) and was suppressed' % suppressed) # inconsistency in Python 2.5 # other versions correctly return msg.get_payload() as str @@ -1201,16 +1215,20 @@ class GMailHandler(MailHandler): """ - A customized mail handler class for sending emails via GMail (or Google Apps mail):: - - handler = GMailHandler("my_user@gmail.com", "mypassword", ["to_user@some_mail.com"], ...) # other arguments same as MailHandler + A customized mail handler class for sending emails via GMail (or Google + Apps mail):: + + handler = GMailHandler( + "my_user@gmail.com", "mypassword", ["to_user@some_mail.com"], + ...) # other arguments same as MailHandler .. versionadded:: 0.6.0 """ def __init__(self, account_id, password, recipients, **kw): super(GMailHandler, self).__init__( - account_id, recipients, secure=(), server_addr=("smtp.gmail.com", 587), + account_id, recipients, secure=(), + server_addr=("smtp.gmail.com", 587), credentials=(account_id, password), **kw) @@ -1221,38 +1239,38 @@ default_format_string = SYSLOG_FORMAT_STRING # priorities - LOG_EMERG = 0 # system is unusable - LOG_ALERT = 1 # action must be taken immediately - LOG_CRIT = 2 # critical conditions - LOG_ERR = 3 # error conditions - LOG_WARNING = 4 # warning conditions - LOG_NOTICE = 5 # normal but significant condition - LOG_INFO = 6 # informational - LOG_DEBUG = 7 # debug-level messages + LOG_EMERG = 0 # system is unusable + LOG_ALERT = 1 # action must be taken immediately + LOG_CRIT = 2 # critical conditions + LOG_ERR = 3 # error conditions + LOG_WARNING = 4 # warning conditions + LOG_NOTICE = 5 # normal but significant condition + LOG_INFO = 6 # informational + LOG_DEBUG = 7 # debug-level messages # facility codes - LOG_KERN = 0 # kernel messages - LOG_USER = 1 # random user-level messages - LOG_MAIL = 2 # mail system - LOG_DAEMON = 3 # system daemons - LOG_AUTH = 4 # security/authorization messages - LOG_SYSLOG = 5 # messages generated internally by syslogd - LOG_LPR = 6 # line printer subsystem - LOG_NEWS = 7 # network news subsystem - LOG_UUCP = 8 # UUCP subsystem - LOG_CRON = 9 # clock daemon - LOG_AUTHPRIV = 10 # security/authorization messages (private) - LOG_FTP = 11 # FTP daemon + LOG_KERN = 0 # kernel messages + LOG_USER = 1 # random user-level messages + LOG_MAIL = 2 # mail system + LOG_DAEMON = 3 # system daemons + LOG_AUTH = 4 # security/authorization messages + LOG_SYSLOG = 5 # messages generated internally by syslogd + LOG_LPR = 6 # line printer subsystem + LOG_NEWS = 7 # network news subsystem + LOG_UUCP = 8 # UUCP subsystem + LOG_CRON = 9 # clock daemon + LOG_AUTHPRIV = 10 # security/authorization messages (private) + LOG_FTP = 11 # FTP daemon # other codes through 15 reserved for system use - LOG_LOCAL0 = 16 # reserved for local use - LOG_LOCAL1 = 17 # reserved for local use - LOG_LOCAL2 = 18 # reserved for local use - LOG_LOCAL3 = 19 # reserved for local use - LOG_LOCAL4 = 20 # reserved for local use - LOG_LOCAL5 = 21 # reserved for local use - LOG_LOCAL6 = 22 # reserved for local use - LOG_LOCAL7 = 23 # reserved for local use + LOG_LOCAL0 = 16 # reserved for local use + LOG_LOCAL1 = 17 # reserved for local use + LOG_LOCAL2 = 18 # reserved for local use + LOG_LOCAL3 = 19 # reserved for local use + LOG_LOCAL4 = 20 # reserved for local use + LOG_LOCAL5 = 21 # reserved for local use + LOG_LOCAL6 = 22 # reserved for local use + LOG_LOCAL7 = 23 # reserved for local use facility_names = { 'auth': LOG_AUTH, @@ -1534,8 +1552,8 @@ self.buffered_records.append(record) if self._buffer_full: self.buffered_records.popleft() - elif self.buffer_size and \ - len(self.buffered_records) >= self.buffer_size: + elif (self.buffer_size and + len(self.buffered_records) >= self.buffer_size): self._buffer_full = True return record.level >= self._level return False diff --git a/logbook/helpers.py b/logbook/helpers.py index 939d8e6..5c228f0 100644 --- a/logbook/helpers.py +++ b/logbook/helpers.py @@ -60,7 +60,7 @@ from http import client as http_client if PY2: - #Yucky, but apparently that's the only way to do this + # Yucky, but apparently that's the only way to do this exec(""" def reraise(tp, value, tb=None): raise tp, value, tb @@ -82,19 +82,23 @@ ) _missing = object() if PY2: - def b(x): return x - def _is_text_stream(x): return True + def b(x): + return x + + def _is_text_stream(x): + return True else: import io - def b(x): return x.encode('ascii') - def _is_text_stream(stream): return isinstance(stream, io.TextIOBase) + + def b(x): + return x.encode('ascii') + + def _is_text_stream(stream): + return isinstance(stream, io.TextIOBase) can_rename_open_file = False -if os.name == 'nt': # pragma: no cover - _rename = lambda src, dst: False - _rename_atomic = lambda src, dst: False - +if os.name == 'nt': try: import ctypes @@ -114,7 +118,7 @@ rv = False while not rv and retry < 100: rv = _MoveFileEx(src, dst, _MOVEFILE_REPLACE_EXISTING | - _MOVEFILE_WRITE_THROUGH) + _MOVEFILE_WRITE_THROUGH) if not rv: time.sleep(0.001) retry += 1 @@ -148,7 +152,11 @@ finally: _CloseHandle(ta) except Exception: - pass + def _rename(src, dst): + return False + + def _rename_atomic(src, dst): + return False def rename(src, dst): # Try atomic or pseudo-atomic rename @@ -173,6 +181,7 @@ can_rename_open_file = True _JSON_SIMPLE_TYPES = (bool, float) + integer_types + string_types + def to_safe_json(data): """Makes a data structure safe for JSON silently discarding invalid @@ -273,11 +282,26 @@ obj.__dict__[self.__name__] = value return value + def get_iterator_next_method(it): return lambda: next(it) + # python 2 support functions and aliases def is_unicode(x): if PY2: return isinstance(x, unicode) return isinstance(x, str) + +if PY2: + exec("""def with_metaclass(meta): + class _WithMetaclassBase(object): + __metaclass__ = meta + return _WithMetaclassBase +""") +else: + exec("""def with_metaclass(meta): + class _WithMetaclassBase(object, metaclass=meta): + pass + return _WithMetaclassBase +""") diff --git a/logbook/more.py b/logbook/more.py index c43634a..a61f736 100644 --- a/logbook/more.py +++ b/logbook/more.py @@ -14,9 +14,10 @@ from cgi import parse_qsl from functools import partial -from logbook.base import RecordDispatcher, dispatch_record, NOTSET, ERROR, NOTICE -from logbook.handlers import Handler, StringFormatter, \ - StringFormatterHandlerMixin, StderrHandler +from logbook.base import ( + RecordDispatcher, dispatch_record, NOTSET, ERROR, NOTICE) +from logbook.handlers import ( + Handler, StringFormatter, StringFormatterHandlerMixin, StderrHandler) from logbook._termcolors import colorize from logbook.helpers import PY2, string_types, iteritems, u @@ -29,8 +30,8 @@ from urllib.parse import urlencode _ws_re = re.compile(r'(\s+)(?u)') -TWITTER_FORMAT_STRING = \ -u('[{record.channel}] {record.level_name}: {record.message}') +TWITTER_FORMAT_STRING = u( + '[{record.channel}] {record.level_name}: {record.message}') TWITTER_ACCESS_TOKEN_URL = 'https://twitter.com/oauth/access_token' NEW_TWEET_URL = 'https://api.twitter.com/1/statuses/update.json' @@ -209,7 +210,8 @@ def tweet(self, status): """Tweets a given status. Status must not exceed 140 chars.""" client = self.make_client() - resp, content = client.request(NEW_TWEET_URL, 'POST', + resp, content = client.request( + NEW_TWEET_URL, 'POST', body=urlencode({'status': status.encode('utf-8')}), headers={'Content-Type': 'application/x-www-form-urlencoded'}) return resp['status'] == '200' @@ -268,11 +270,11 @@ self._subprocess = subprocess def emit(self, record): - args = [arg.format(record=record).encode(self.encoding) + args = [arg.format(record=record) for arg in self._arguments] if self._stdin_format is not None: - stdin_data = self._stdin_format.format(record=record) \ - .encode(self.encoding) + stdin_data = (self._stdin_format.format(record=record) + .encode(self.encoding)) stdin = self._subprocess.PIPE else: stdin = None @@ -325,14 +327,16 @@ # backwards compat. Should go away in some future releases -from logbook.handlers import FingersCrossedHandler as \ - FingersCrossedHandlerBase +from logbook.handlers import ( + FingersCrossedHandler as FingersCrossedHandlerBase) + + class FingersCrossedHandler(FingersCrossedHandlerBase): def __init__(self, *args, **kwargs): FingersCrossedHandlerBase.__init__(self, *args, **kwargs) from warnings import warn warn(PendingDeprecationWarning('fingers crossed handler changed ' - 'location. It\'s now a core component of Logbook.')) + 'location. It\'s now a core component of Logbook.')) class ExceptionHandler(Handler, StringFormatterHandlerMixin): @@ -360,10 +364,12 @@ raise self.exc_type(self.format(record)) return False + class DedupHandler(Handler): """A handler that deduplicates log messages. - It emits each unique log record once, along with the number of times it was emitted. + It emits each unique log record once, along with the number of times it was + emitted. Example::: with logbook.more.DedupHandler(): @@ -376,7 +382,9 @@ message repeated 2 times: foo message repeated 1 times: bar """ - def __init__(self, format_string='message repeated {count} times: {message}', *args, **kwargs): + def __init__(self, + format_string='message repeated {count} times: {message}', + *args, **kwargs): Handler.__init__(self, bubble=False, *args, **kwargs) self._format_string = format_string self.clear() @@ -398,16 +406,21 @@ self.flush() def handle(self, record): - if not record.message in self._message_to_count: + if record.message not in self._message_to_count: self._unique_ordered_records.append(record) self._message_to_count[record.message] += 1 return True def flush(self): for record in self._unique_ordered_records: - record.message = self._format_string.format(message=record.message, count=self._message_to_count[record.message]) - # record.dispatcher is the logger who created the message, it's sometimes supressed (by logbook.info for example) - dispatch = record.dispatcher.call_handlers if record.dispatcher is not None else dispatch_record + record.message = self._format_string.format( + message=record.message, + count=self._message_to_count[record.message]) + # record.dispatcher is the logger who created the message, + # it's sometimes supressed (by logbook.info for example) + if record.dispatcher is not None: + dispatch = record.dispatcher.call_handlers + else: + dispatch = dispatch_record dispatch(record) self.clear() - diff --git a/logbook/notifiers.py b/logbook/notifiers.py index cf94023..a83ed67 100644 --- a/logbook/notifiers.py +++ b/logbook/notifiers.py @@ -15,14 +15,16 @@ from logbook.base import NOTSET, ERROR, WARNING from logbook.handlers import Handler, LimitingHandlerMixin -from logbook.helpers import get_application_name, PY2, http_client +from logbook.helpers import get_application_name, PY2, http_client, u if PY2: from urllib import urlencode else: from urllib.parse import urlencode -def create_notification_handler(application_name=None, level=NOTSET, icon=None): + +def create_notification_handler(application_name=None, level=NOTSET, + icon=None): """Creates a handler perfectly fit the current platform. On Linux systems this creates a :class:`LibNotifyHandler`, on OS X systems it will create a :class:`GrowlHandler`. @@ -151,7 +153,7 @@ try: from gtk import gdk except ImportError: - #TODO: raise a warning? + # TODO: raise a warning? raise RuntimeError('The gtk.gdk module is required to set an icon.') if icon is not None: @@ -197,8 +199,8 @@ def __init__(self, email, password, record_limit=None, record_delta=None, level=NOTSET, filter=None, bubble=False): - NotificationBaseHandler.__init__(self, None, record_limit, record_delta, - level, filter, bubble) + NotificationBaseHandler.__init__(self, None, record_limit, + record_delta, level, filter, bubble) self.email = email self.password = password @@ -219,8 +221,8 @@ con = http_client.HTTPSConnection('boxcar.io') con.request('POST', '/notifications/', headers={ 'Authorization': 'Basic ' + - base64.b64encode((u('%s:%s') % - (self.email, self.password)).encode('utf-8')).strip(), + base64.b64encode((u('%s:%s') % (self.email, self.password)) + .encode('utf-8')).strip(), }, body=body) con.close() @@ -231,8 +233,8 @@ """ def __init__(self, application_name=None, username=None, secret=None, - record_limit=None, record_delta=None, level=NOTSET, filter=None, - bubble=False, hide_level=False): + record_limit=None, record_delta=None, level=NOTSET, + filter=None, bubble=False, hide_level=False): try: import notifo except ImportError: @@ -240,14 +242,13 @@ 'The notifo module is not available. You have ' 'to install notifo to use the NotifoHandler.' ) - NotificationBaseHandler.__init__(self, None, record_limit, record_delta, - level, filter, bubble) + NotificationBaseHandler.__init__(self, None, record_limit, + record_delta, level, filter, bubble) self._notifo = notifo self.application_name = application_name self.username = username self.secret = secret self.hide_level = hide_level - def emit(self, record): diff --git a/logbook/queues.py b/logbook/queues.py index 8c6cb52..cdb58ed 100644 --- a/logbook/queues.py +++ b/logbook/queues.py @@ -33,8 +33,8 @@ handler = RedisHandler('http://127.0.0.1', port='9200', key='redis') - If your Redis instance is password protected, you can securely connect passing - your password when creating a RedisHandler object. + If your Redis instance is password protected, you can securely connect + passing your password when creating a RedisHandler object. Example:: @@ -42,9 +42,10 @@ More info about the default buffer size: wp.me/p3tYJu-3b """ - def __init__(self, host='127.0.0.1', port=6379, key='redis', extra_fields={}, - flush_threshold=128, flush_time=1, level=NOTSET, filter=None, - password=False, bubble=True, context=None, push_method='rpush'): + def __init__(self, host='127.0.0.1', port=6379, key='redis', + extra_fields={}, flush_threshold=128, flush_time=1, + level=NOTSET, filter=None, password=False, bubble=True, + context=None, push_method='rpush'): Handler.__init__(self, level, filter, bubble) try: import redis @@ -53,11 +54,13 @@ raise RuntimeError('The redis library is required for ' 'the RedisHandler') - self.redis = redis.Redis(host=host, port=port, password=password, decode_responses=True) + self.redis = redis.Redis(host=host, port=port, password=password, + decode_responses=True) try: self.redis.ping() except ResponseError: - raise ResponseError('The password provided is apparently incorrect') + raise ResponseError( + 'The password provided is apparently incorrect') self.key = key self.extra_fields = extra_fields self.flush_threshold = flush_threshold @@ -65,13 +68,13 @@ self.lock = Lock() self.push_method = push_method - #Set up a thread that flushes the queue every specified seconds + # Set up a thread that flushes the queue every specified seconds self._stop_event = threading.Event() self._flushing_t = threading.Thread(target=self._flush_task, - args=(flush_time, self._stop_event)) + args=(flush_time, + self._stop_event)) self._flushing_t.daemon = True self._flushing_t.start() - def _flush_task(self, time, stop_event): """Calls the method _flush_buffer every certain time. @@ -81,7 +84,6 @@ self._flush_buffer() self._stop_event.wait(time) - def _flush_buffer(self): """Flushes the messaging queue into Redis. @@ -93,7 +95,6 @@ getattr(self.redis, self.push_method)(self.key, *self.queue) self.queue = [] - def disable_buffering(self): """Disables buffering. @@ -102,13 +103,12 @@ self._stop_event.set() self.flush_threshold = 1 - def emit(self, record): """Emits a pair (key, value) to redis. - The key is the one provided when creating the handler, or redis if none was - provided. The value contains both the message and the hostname. Extra values - are also appended to the message. + The key is the one provided when creating the handler, or redis if none + was provided. The value contains both the message and the hostname. + Extra values are also appended to the message. """ with self.lock: r = {"message": record.msg, @@ -121,7 +121,6 @@ if len(self.queue) == self.flush_threshold: self._flush_buffer() - def close(self): self._flush_buffer() @@ -133,13 +132,28 @@ The queue will be filled with JSON exported log records. To receive such log records from a queue you can use the :class:`MessageQueueSubscriber`. - Example setup:: + For an AMQP backend such as RabbitMQ:: + + handler = MessageQueueHandler('amqp://guest:guest@localhost//') + + This requires the py-amqp or the librabbitmq client library. + + For Redis (requires redis client library):: + + handler = MessageQueueHandler('redis://localhost:8889/0') + + For MongoDB (requires pymongo):: handler = MessageQueueHandler('mongodb://localhost:27017/logging') + + Several other backends are also supported. + Refer to the `kombu`_ documentation + + .. _kombu: http://kombu.readthedocs.org/en/latest/introduction.html """ def __init__(self, uri=None, queue='logging', level=NOTSET, - filter=None, bubble=False, context=None): + filter=None, bubble=False): Handler.__init__(self, level, filter, bubble) try: import kombu @@ -206,24 +220,24 @@ if uri is not None: self.socket.bind(uri) - def export_record(self, record): """Exports the record into a dictionary ready for JSON dumping.""" return record.to_dict(json_safe=True) def emit(self, record): - self.socket.send(json.dumps(self.export_record(record)).encode("utf-8")) + self.socket.send(json.dumps( + self.export_record(record)).encode("utf-8")) def close(self, linger=-1): self.socket.close(linger) def __del__(self): - # When the Handler is deleted we must close our socket in a non-blocking - # fashion (using linger). + # When the Handler is deleted we must close our socket in a + # non-blocking fashion (using linger). # Otherwise it can block indefinitely, for example if the Subscriber is # not reachable. - # If messages are pending on the socket, we wait 100ms for them to be sent - # then we discard them. + # If messages are pending on the socket, we wait 100ms for them to be + # sent then we discard them. self.close(linger=100) @@ -269,9 +283,9 @@ """Baseclass for all subscribers.""" def recv(self, timeout=None): - """Receives a single record from the socket. Timeout of 0 means nonblocking, - `None` means blocking and otherwise it's a timeout in seconds after which - the function just returns with `None`. + """Receives a single record from the socket. Timeout of 0 means + nonblocking, `None` means blocking and otherwise it's a timeout in + seconds after which the function just returns with `None`. Subclasses have to override this. """ @@ -355,9 +369,9 @@ self.queue.close() def recv(self, timeout=None): - """Receives a single record from the socket. Timeout of 0 means nonblocking, - `None` means blocking and otherwise it's a timeout in seconds after which - the function just returns with `None`. + """Receives a single record from the socket. Timeout of 0 means + nonblocking, `None` means blocking and otherwise it's a timeout in + seconds after which the function just returns with `None`. """ if timeout == 0: try: @@ -445,9 +459,9 @@ self.socket.close() def recv(self, timeout=None): - """Receives a single record from the socket. Timeout of 0 means nonblocking, - `None` means blocking and otherwise it's a timeout in seconds after which - the function just returns with `None`. + """Receives a single record from the socket. Timeout of 0 means + nonblocking, `None` means blocking and otherwise it's a timeout in + seconds after which the function just returns with `None`. """ if timeout is None: rv = self.socket.recv() @@ -577,7 +591,7 @@ try: rv = self.channel.receive(timeout=timeout) except self.channel.RemoteError: - #XXX: handle + # XXX: handle return None except (self.channel.TimeoutError, EOFError): return None @@ -670,7 +684,7 @@ if record: try: self.queue.put(record, timeout=0.05) - except Queue.Full: + except Full: pass finally: if self.setup is not None: diff --git a/logbook/ticketing.py b/logbook/ticketing.py index aaf7e0e..1d882c7 100644 --- a/logbook/ticketing.py +++ b/logbook/ticketing.py @@ -15,6 +15,7 @@ from logbook.handlers import Handler, HashingHandlerMixin from logbook.helpers import cached_property, b, PY2, u + class Ticket(object): """Represents a ticket from the database.""" @@ -89,7 +90,8 @@ """Returns the number of tickets.""" raise NotImplementedError() - def get_tickets(self, order_by='-last_occurrence_time', limit=50, offset=0): + def get_tickets(self, order_by='-last_occurrence_time', + limit=50, offset=0): """Selects tickets from the database.""" raise NotImplementedError() @@ -131,6 +133,7 @@ def setup_backend(self): from sqlalchemy import create_engine, MetaData + from sqlalchemy.orm import sessionmaker, scoped_session engine_or_uri = self.options.pop('uri', None) metadata = self.options.pop('metadata', None) table_prefix = self.options.pop('table_prefix', 'logbook_') @@ -138,7 +141,22 @@ if hasattr(engine_or_uri, 'execute'): self.engine = engine_or_uri else: - self.engine = create_engine(engine_or_uri, convert_unicode=True) + # Pool recycle keeps connections from going stale, + # which happens in MySQL Databases + # Pool size is more custom for out stack + self.engine = create_engine(engine_or_uri, convert_unicode=True, + pool_recycle=360, pool_size=1000) + + # Create session factory using session maker + session = sessionmaker() + + # Bind to the engined + session.configure(bind=self.engine) + + # Scoped session is a thread safe solution for + # interaction with the Database + self.session = scoped_session(session) + if metadata is None: metadata = MetaData() self.table_prefix = table_prefix @@ -152,29 +170,32 @@ metadata. """ import sqlalchemy as db + def table(name, *args, **kwargs): return db.Table(self.table_prefix + name, self.metadata, *args, **kwargs) self.tickets = table('tickets', - db.Column('ticket_id', db.Integer, primary_key=True), - db.Column('record_hash', db.String(40), unique=True), - db.Column('level', db.Integer), - db.Column('channel', db.String(120)), - db.Column('location', db.String(512)), - db.Column('module', db.String(256)), - db.Column('last_occurrence_time', db.DateTime), - db.Column('occurrence_count', db.Integer), - db.Column('solved', db.Boolean), - db.Column('app_id', db.String(80)) - ) + db.Column('ticket_id', db.Integer, + primary_key=True), + db.Column('record_hash', db.String(40), + unique=True), + db.Column('level', db.Integer), + db.Column('channel', db.String(120)), + db.Column('location', db.String(512)), + db.Column('module', db.String(256)), + db.Column('last_occurrence_time', db.DateTime), + db.Column('occurrence_count', db.Integer), + db.Column('solved', db.Boolean), + db.Column('app_id', db.String(80))) self.occurrences = table('occurrences', - db.Column('occurrence_id', db.Integer, primary_key=True), - db.Column('ticket_id', db.Integer, - db.ForeignKey(self.table_prefix + 'tickets.ticket_id')), - db.Column('time', db.DateTime), - db.Column('data', db.Text), - db.Column('app_id', db.String(80)) - ) + db.Column('occurrence_id', + db.Integer, primary_key=True), + db.Column('ticket_id', db.Integer, + db.ForeignKey(self.table_prefix + + 'tickets.ticket_id')), + db.Column('time', db.DateTime), + db.Column('data', db.Text), + db.Column('app_id', db.String(80))) def _order(self, q, table, order_by): if order_by[0] == '-': @@ -183,13 +204,13 @@ def record_ticket(self, record, data, hash, app_id): """Records a log record as ticket.""" - cnx = self.engine.connect() - trans = cnx.begin() + # Can use the session instead engine.connection and transaction + s = self.session try: q = self.tickets.select(self.tickets.c.record_hash == hash) - row = cnx.execute(q).fetchone() + row = s.execute(q).fetchone() if row is None: - row = cnx.execute(self.tickets.insert().values( + row = s.execute(self.tickets.insert().values( record_hash=hash, level=record.level, channel=record.channel or u(''), @@ -202,27 +223,30 @@ ticket_id = row.inserted_primary_key[0] else: ticket_id = row['ticket_id'] - cnx.execute(self.occurrences.insert() - .values(ticket_id=ticket_id, - time=record.time, - app_id=app_id, - data=json.dumps(data))) - cnx.execute(self.tickets.update() + s.execute(self.occurrences.insert() + .values(ticket_id=ticket_id, + time=record.time, + app_id=app_id, + data=json.dumps(data))) + s.execute( + self.tickets.update() .where(self.tickets.c.ticket_id == ticket_id) .values(occurrence_count=self.tickets.c.occurrence_count + 1, last_occurrence_time=record.time, solved=False)) - trans.commit() + s.commit() except Exception: - trans.rollback() + s.rollback() raise - cnx.close() + # Closes the session and removes it from the pool + s.remove() def count_tickets(self): """Returns the number of tickets.""" return self.engine.execute(self.tickets.count()).fetchone()[0] - def get_tickets(self, order_by='-last_occurrence_time', limit=50, offset=0): + def get_tickets(self, order_by='-last_occurrence_time', limit=50, + offset=0): """Selects tickets from the database.""" return [Ticket(self, row) for row in self.engine.execute( self._order(self.tickets.select(), self.tickets, order_by) @@ -231,15 +255,15 @@ def solve_ticket(self, ticket_id): """Marks a ticket as solved.""" self.engine.execute(self.tickets.update() - .where(self.tickets.c.ticket_id == ticket_id) - .values(solved=True)) + .where(self.tickets.c.ticket_id == ticket_id) + .values(solved=True)) def delete_ticket(self, ticket_id): """Deletes a ticket from the database.""" self.engine.execute(self.occurrences.delete() - .where(self.occurrences.c.ticket_id == ticket_id)) + .where(self.occurrences.c.ticket_id == ticket_id)) self.engine.execute(self.tickets.delete() - .where(self.tickets.c.ticket_id == ticket_id)) + .where(self.tickets.c.ticket_id == ticket_id)) def get_ticket(self, ticket_id): """Return a single ticket with all occurrences.""" @@ -251,7 +275,8 @@ def get_occurrences(self, ticket, order_by='-time', limit=50, offset=0): """Selects occurrences from the database for a ticket.""" return [Occurrence(self, row) for row in - self.engine.execute(self._order(self.occurrences.select() + self.engine.execute(self._order( + self.occurrences.select() .where(self.occurrences.c.ticket_id == ticket), self.occurrences, order_by) .limit(limit).offset(offset)).fetchall()] @@ -273,9 +298,8 @@ self.ticket_id = row['ticket_id'] self.occurrence_id = row['_id'] - #TODO: Update connection setup once PYTHON-160 is solved. + # TODO: Update connection setup once PYTHON-160 is solved. def setup_backend(self): - import pymongo from pymongo import ASCENDING, DESCENDING from pymongo.connection import Connection @@ -313,8 +337,10 @@ self.database = database # setup correct indexes - database.tickets.ensure_index([('record_hash', ASCENDING)], unique=True) - database.tickets.ensure_index([('solved', ASCENDING), ('level', ASCENDING)]) + database.tickets.ensure_index([('record_hash', ASCENDING)], + unique=True) + database.tickets.ensure_index([('solved', ASCENDING), + ('level', ASCENDING)]) database.occurrences.ensure_index([('time', DESCENDING)]) def _order(self, q, order_by): @@ -337,7 +363,8 @@ 'record_hash': hash, 'level': record.level, 'channel': record.channel or u(''), - 'location': u('%s:%d') % (record.filename, record.lineno), + 'location': u('%s:%d') % (record.filename, + record.lineno), 'module': record.module or u(''), 'occurrence_count': 0, 'solved': False, @@ -349,7 +376,7 @@ db.tickets.update({'_id': ticket_id}, { '$inc': { - 'occurrence_count': 1 + 'occurrence_count': 1 }, '$set': { 'last_occurrence_time': record.time, @@ -369,10 +396,11 @@ """Returns the number of tickets.""" return self.database.tickets.count() - def get_tickets(self, order_by='-last_occurrence_time', limit=50, offset=0): + def get_tickets(self, order_by='-last_occurrence_time', limit=50, + offset=0): """Selects tickets from the database.""" - query = self._order(self.database.tickets.find(), order_by) \ - .limit(limit).skip(offset) + query = (self._order(self.database.tickets.find(), order_by) + .limit(limit).skip(offset)) return [self._FixedTicketClass(self, obj) for obj in query] def solve_ticket(self, ticket_id): @@ -434,7 +462,8 @@ used to keep multiple application setups apart when logging into the same database. :param hash_salt: an optional salt (binary string) for the hashes. - :param backend: A backend class that implements the proper database handling. + :param backend: A backend class that implements the proper database + handling. Backends available are: :class:`SQLAlchemyBackend`, :class:`MongoDBBackend`. """ diff --git a/logbook/utils.py b/logbook/utils.py new file mode 100644 index 0000000..09bf1c5 --- /dev/null +++ b/logbook/utils.py @@ -0,0 +1,178 @@ +from contextlib import contextmanager +import functools +import sys +import threading + +from .base import Logger +from .helpers import string_types +from logbook import debug as logbook_debug + + +class _SlowContextNotifier(object): + + def __init__(self, threshold, logger_func, args, kwargs): + self.logger_func = logger_func + self.args = args + self.kwargs = kwargs or {} + self.evt = threading.Event() + self.threshold = threshold + self.thread = threading.Thread(target=self._notifier) + + def _notifier(self): + self.evt.wait(timeout=self.threshold) + if not self.evt.is_set(): + self.logger_func(*self.args, **self.kwargs) + + def __enter__(self): + self.thread.start() + return self + + def __exit__(self, *_): + self.evt.set() + self.thread.join() + + +def logged_if_slow(message, threshold=1, func=logbook_debug, args=None, + kwargs=None): + """Logs a message (by default using the global debug logger) if a certain + context containing a set of operations is too slow + + >>> with logged_if_slow('too slow!'): + ... ... + """ + full_args = (message, ) if args is None else (message, ) + tuple(args) + return _SlowContextNotifier(threshold, func, full_args, kwargs) + + +class _Local(threading.local): + enabled = True + +_local = _Local() + + +@contextmanager +def suppressed_deprecations(): + """Disables deprecation messages temporarily + + >>> with suppressed_deprecations(): + ... call_some_deprecated_logic() + """ + prev_enabled = _local.enabled + _local.enabled = False + try: + yield + finally: + _local.enabled = prev_enabled + + +_deprecation_logger = Logger("deprecation") +_deprecation_locations = set() + + +def forget_deprecation_locations(): + _deprecation_locations.clear() + + +def _write_deprecations_if_needed(message, frame_correction): + if not _local.enabled: + return + caller_location = _get_caller_location(frame_correction=frame_correction+1) + if caller_location not in _deprecation_locations: + _deprecation_logger.warning(message, frame_correction=frame_correction+1) + _deprecation_locations.add(caller_location) + + +def log_deprecation_message(message, frame_correction=0): + _write_deprecations_if_needed("Deprecation message: {0}".format(message), frame_correction=frame_correction+1) + + +class _DeprecatedFunction(object): + + def __init__(self, func, message, obj=None, objtype=None): + super(_DeprecatedFunction, self).__init__() + self._func = func + self._message = message + self._obj = obj + self._objtype = objtype + + def _get_underlying_func(self): + returned = self._func + if isinstance(returned, classmethod): + if hasattr(returned, '__func__'): + returned = returned.__func__ + else: + returned = returned.__get__(self._objtype).__func__ + return returned + + def __call__(self, *args, **kwargs): + func = self._get_underlying_func() + warning = "{0} is deprecated.".format(self._get_func_str()) + if self._message is not None: + warning += " {0}".format(self._message) + _write_deprecations_if_needed(warning, frame_correction=+1) + if self._obj is not None: + return func(self._obj, *args, **kwargs) + elif self._objtype is not None: + return func(self._objtype, *args, **kwargs) + return func(*args, **kwargs) + + def _get_func_str(self): + func = self._get_underlying_func() + if self._objtype is not None: + return '{0}.{1}'.format(self._objtype.__name__, func.__name__) + return '{0}.{1}'.format(func.__module__, func.__name__) + + def __get__(self, obj, objtype): + return self.bound_to(obj, objtype) + + def bound_to(self, obj, objtype): + return _DeprecatedFunction(self._func, self._message, obj=obj, + objtype=objtype) + + @property + def __name__(self): + return self._get_underlying_func().__name__ + + @property + def __doc__(self): + returned = self._get_underlying_func().__doc__ + if returned: # pylint: disable=no-member + returned += "\n.. deprecated\n" # pylint: disable=no-member + if self._message: + returned += " {0}".format( + self._message) # pylint: disable=no-member + return returned + + @__doc__.setter + def __doc__(self, doc): + self._get_underlying_func().__doc__ = doc + + +def deprecated(func=None, message=None): + """Marks the specified function as deprecated, and emits a warning when + it's called. + + >>> @deprecated(message='No longer supported') + ... def deprecated_func(): + ... pass + + This will cause a warning log to be emitted when the function gets called, + with the correct filename/lineno + """ + if isinstance(func, string_types): + assert message is None + message = func + func = None + + if func is None: + return functools.partial(deprecated, message=message) + + return _DeprecatedFunction(func, message) + + +def _get_caller_location(frame_correction): + frame = sys._getframe(frame_correction + 1) # pylint: disable=protected-access + try: + return (frame.f_code.co_name, frame.f_lineno) + finally: + del frame diff --git a/scripts/make-release.py b/scripts/make-release.py index 8a793cd..23cbdc7 100644 --- a/scripts/make-release.py +++ b/scripts/make-release.py @@ -27,7 +27,6 @@ match = re.search('^Version\s+(.*)', line.strip()) if match is None: continue - length = len(match.group(1)) version = match.group(1).strip() if lineiter.next().count('-') != len(match.group(0)): continue @@ -61,6 +60,7 @@ def set_filename_version(filename, version_number, pattern): changed = [] + def inject_version(match): before, old, after = match.groups() changed.append(True) @@ -76,23 +76,10 @@ f.write(contents) -def set_init_version(version): - info('Setting __init__.py version to %s', version) - set_filename_version('logbook/__init__.py', version, '__version__') - - -def set_setup_version(version): - info('Setting setup.py version to %s', version) - set_filename_version('setup.py', version, 'version') - -def set_doc_version(version): - info('Setting docs/conf.py version to %s', version) - set_filename_version('docs/conf.py', version, 'version') - set_filename_version('docs/conf.py', version, 'release') - - -def build_and_upload(): - Popen([sys.executable, 'setup.py', 'release', 'sdist', 'upload']).wait() +def set_version(version): + info('Setting version to %s', version) + with open('logbook/__version__.py', 'w') as f: + f.write('__version__ = {!r}'.format(version)) def fail(message, *args): @@ -105,7 +92,8 @@ def get_git_tags(): - return set(Popen(['git', 'tag'], stdout=PIPE).communicate()[0].splitlines()) + return set(Popen(['git', 'tag'], + stdout=PIPE).communicate()[0].splitlines()) def git_is_clean(): @@ -123,7 +111,9 @@ parser = argparse.ArgumentParser("%prog [options]") -parser.add_argument("--no-upload", dest="upload", action="store_false", default=True) +parser.add_argument("--no-upload", dest="upload", + action="store_false", default=True) + def main(): args = parser.parse_args() @@ -144,21 +134,16 @@ if version in tags: fail('Version "%s" is already tagged', version) if release_date.date() != date.today(): - fail('Release date is not today (%s != %s)' % (release_date.date(), date.today())) + fail('Release date is not today (%s != %s)' % + (release_date.date(), date.today())) if not git_is_clean(): fail('You have uncommitted changes in git') - set_init_version(version) - set_setup_version(version) - set_doc_version(version) + set_version(version) make_git_commit('Bump version number to %s', version) make_git_tag(version) - if args.upload: - build_and_upload() - set_init_version(dev_version) - set_setup_version(dev_version) - set_doc_version(dev_version) + set_version(dev_version) make_git_commit('Bump version number to %s', dev_version) diff --git a/scripts/test_setup.py b/scripts/test_setup.py index c586d61..b380aa6 100644 --- a/scripts/test_setup.py +++ b/scripts/test_setup.py @@ -1,12 +1,6 @@ #! /usr/bin/python -import subprocess -import os +import pip import sys - -def _execute(*args, **kwargs): - result = subprocess.call(*args, **kwargs) - if result != 0: - sys.exit(result) if __name__ == '__main__': python_version = sys.version_info @@ -24,4 +18,5 @@ else: deps.append("Jinja2") print("Setting up dependencies...") - _execute([os.path.join(os.path.dirname(sys.executable), "pip"), "install"] + deps, shell=False) + result = pip.main(["install"] + deps) + sys.exit(result) diff --git a/scripts/travis_build.py b/scripts/travis_build.py index da4bafa..db3f794 100644 --- a/scripts/travis_build.py +++ b/scripts/travis_build.py @@ -13,5 +13,6 @@ print("PyPy+Cython configuration skipped") else: sys.exit( - subprocess.call("make cybuild test" if use_cython else "make test", shell=True) + subprocess.call( + "make cybuild test" if use_cython else "make test", shell=True) ) diff --git a/setup.cfg b/setup.cfg index fb57ac7..60070cd 100644 --- a/setup.cfg +++ b/setup.cfg @@ -5,9 +5,3 @@ [upload_docs] upload-dir = docs/_build/html - -[egg_info] -tag_date = true - -[aliases] -release = egg_info -RDb '' diff --git a/setup.py b/setup.py index 4162d1a..45da0e2 100644 --- a/setup.py +++ b/setup.py @@ -56,8 +56,8 @@ import sys from setuptools import setup, Extension, Feature from distutils.command.build_ext import build_ext -from distutils.errors import CCompilerError, DistutilsExecError, \ - DistutilsPlatformError +from distutils.errors import ( + CCompilerError, DistutilsExecError, DistutilsPlatformError) extra = {} @@ -92,12 +92,17 @@ cmdclass['build_ext'] = ve_build_ext # Don't try to compile the extension if we're running on PyPy -if os.path.isfile('logbook/_speedups.c') and not hasattr(sys, "pypy_translation_info"): +if (os.path.isfile('logbook/_speedups.c') and + not hasattr(sys, "pypy_translation_info")): speedups = Feature('optional C speed-enhancement module', standard=True, ext_modules=[Extension('logbook._speedups', ['logbook/_speedups.c'])]) else: speedups = None + + +with open(os.path.join(os.path.dirname(__file__), "logbook", "__version__.py")) as version_file: + exec(version_file.read()) # pylint: disable=W0122 def run_setup(with_binary): @@ -106,7 +111,7 @@ features['speedups'] = speedups setup( name='Logbook', - version='0.10.0', + version=__version__, license='BSD', url='http://logbook.pocoo.org/', author='Armin Ronacher, Georg Brandl', @@ -117,9 +122,16 @@ zip_safe=False, platforms='any', cmdclass=cmdclass, + classifiers=[ + "Programming Language :: Python :: 2.6", + "Programming Language :: Python :: 2.7", + "Programming Language :: Python :: 3.2", + "Programming Language :: Python :: 3.3", + "Programming Language :: Python :: 3.4", + "Programming Language :: Python :: 3.5", + ], features=features, install_requires=[ - 'six>=1.4.0', ], **extra ) diff --git a/tests/conftest.py b/tests/conftest.py index 71f24f7..1882116 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -2,6 +2,7 @@ import pytest logbook.StderrHandler().push_application() + @pytest.fixture def logger(): @@ -91,16 +92,17 @@ try: import gevent - +except ImportError: + pass +else: @pytest.fixture(scope="module", autouse=True, params=[False, True]) def gevent(request): module_name = getattr(request.module, '__name__', '') - if not any(s in module_name for s in ('queues', 'processors')) and request.param: + if (not any(s in module_name for s in ('queues', 'processors')) + and request.param): from logbook.concurrency import enable_gevent, _disable_gevent enable_gevent() @request.addfinalizer def fin(): _disable_gevent() -except ImportError: - pass diff --git a/tests/test_deadlock.py b/tests/test_deadlock.py index bd066f3..66e4ad1 100644 --- a/tests/test_deadlock.py +++ b/tests/test_deadlock.py @@ -28,7 +28,8 @@ def test_deadlock_in_emit(): logbook_logger = logbook.Logger("logbook") obj = MyObject(logbook_logger.info) - stream_handler = logbook.StreamHandler(stream=sys.stderr, level=logbook.DEBUG) + stream_handler = logbook.StreamHandler(stream=sys.stderr, + level=logbook.DEBUG) stream_handler.lock = FakeLock() with stream_handler.applicationbound(): logbook_logger.info("format this: {}", obj) diff --git a/tests/test_file_handler.py b/tests/test_file_handler.py index 6feed07..ba6feda 100644 --- a/tests/test_file_handler.py +++ b/tests/test_file_handler.py @@ -9,9 +9,9 @@ def test_file_handler(logfile, activation_strategy, logger): - handler = logbook.FileHandler(logfile, - format_string='{record.level_name}:{record.channel}:' - '{record.message}',) + handler = logbook.FileHandler( + logfile, + format_string='{record.level_name}:{record.channel}:{record.message}',) with activation_strategy(handler): logger.warn('warning message') handler.close() @@ -27,9 +27,10 @@ def test_file_handler_delay(logfile, activation_strategy, logger): - handler = logbook.FileHandler(logfile, - format_string='{record.level_name}:{record.channel}:' - '{record.message}', delay=True) + handler = logbook.FileHandler( + logfile, + format_string='{record.level_name}:{record.channel}:{record.message}', + delay=True) assert (not os.path.isfile(logfile)) with activation_strategy(handler): logger.warn('warning message') @@ -41,10 +42,12 @@ def test_monitoring_file_handler(logfile, activation_strategy, logger): if os.name == 'nt': - pytest.skip('unsupported on windows due to different IO (also unneeded)') - handler = logbook.MonitoringFileHandler(logfile, - format_string='{record.level_name}:{record.channel}:' - '{record.message}', delay=True) + pytest.skip( + 'unsupported on windows due to different IO (also unneeded)') + handler = logbook.MonitoringFileHandler( + logfile, + format_string='{record.level_name}:{record.channel}:{record.message}', + delay=True) with activation_strategy(handler): logger.warn('warning message') os.rename(logfile, logfile + '.old') @@ -80,7 +83,8 @@ if x.startswith(basename)] files.sort() - assert files == [basename, basename + '.1', basename + '.2', basename + '.3'] + assert files == [basename, basename + + '.1', basename + '.2', basename + '.3'] with open(logfile) as f: assert f.readline().rstrip() == ('C' * 256) assert f.readline().rstrip() == ('D' * 256) @@ -91,7 +95,8 @@ @pytest.mark.parametrize("backup_count", [1, 3]) def test_timed_rotating_file_handler(tmpdir, activation_strategy, backup_count): basename = str(tmpdir.join('trot.log')) - handler = logbook.TimedRotatingFileHandler(basename, backup_count=backup_count) + handler = logbook.TimedRotatingFileHandler( + basename, backup_count=backup_count) handler.format_string = '[{record.time:%H:%M}] {record.message}' def fake_record(message, year, month, day, hour=0, @@ -113,7 +118,8 @@ files = sorted(x for x in os.listdir(str(tmpdir)) if x.startswith('trot')) - assert files == ['trot-2010-01-0{0}.log'.format(i) for i in xrange(5, 9)][-backup_count:] + assert files == ['trot-2010-01-0{0}.log'.format(i) + for i in xrange(5, 9)][-backup_count:] with open(str(tmpdir.join('trot-2010-01-08.log'))) as f: assert f.readline().rstrip() == '[01:00] Last One' assert f.readline().rstrip() == '[02:00] Last One' diff --git a/tests/test_fingers_crossed_handler.py b/tests/test_fingers_crossed_handler.py index a2644cc..49f4882 100644 --- a/tests/test_fingers_crossed_handler.py +++ b/tests/test_fingers_crossed_handler.py @@ -1,5 +1,4 @@ import logbook -import pytest from .utils import capturing_stderr_context @@ -73,5 +72,6 @@ logger.warn('Moar!') logger.error('Pure hate!') - assert test_handler.formatted_records == ['[WARNING] Test: Aha!', '[WARNING] Test: Moar!', '[ERROR] Test: Pure hate!'] - + assert test_handler.formatted_records == ['[WARNING] Test: Aha!', + '[WARNING] Test: Moar!', + '[ERROR] Test: Pure hate!'] diff --git a/tests/test_flags.py b/tests/test_flags.py index 1e333b3..eb0bf59 100644 --- a/tests/test_flags.py +++ b/tests/test_flags.py @@ -20,7 +20,8 @@ with pytest.raises(Exception) as caught: with logbook.Flags(errors='raise'): logger.warn('Foo {42}', 'aha') - assert 'Could not format message with provided arguments' in str(caught.value) + assert 'Could not format message with provided arguments' in str( + caught.value) def test_disable_introspection(logger): diff --git a/tests/test_handler_errors.py b/tests/test_handler_errors.py index 56e055a..8095083 100644 --- a/tests/test_handler_errors.py +++ b/tests/test_handler_errors.py @@ -36,11 +36,13 @@ record.message errormsg = str(caught.value) - assert re.search('Could not format message with provided arguments: ' - 'Invalid (?:format specifier)|(?:conversion specification)|(?:format spec)', - errormsg, re.M | re.S) + assert re.search( + 'Could not format message with provided arguments: Invalid ' + '(?:format specifier)|(?:conversion specification)|(?:format spec)', + errormsg, re.M | re.S) assert "msg='Hello {foo:invalid}'" in errormsg assert 'args=()' in errormsg assert "kwargs={'foo': 42}" in errormsg - assert re.search(r'Happened in file .*%s, line \d+' % re.escape(__file_without_pyc__), - errormsg, re.M | re.S) + assert re.search( + r'Happened in file .*%s, line \d+' % re.escape(__file_without_pyc__), + errormsg, re.M | re.S) diff --git a/tests/test_handlers.py b/tests/test_handlers.py index 03fa7de..4de4c61 100644 --- a/tests/test_handlers.py +++ b/tests/test_handlers.py @@ -12,8 +12,8 @@ record.extra['ip'] = client_ip custom_log = CustomLogger('awesome logger') - fmt = '[{record.level_name}] {record.channel}: ' \ - '{record.message} [{record.extra[ip]}]' + fmt = ('[{record.level_name}] {record.channel}: ' + '{record.message} [{record.extra[ip]}]') handler = logbook.TestHandler(format_string=fmt) assert handler.format_string == fmt diff --git a/tests/test_log_record.py b/tests/test_log_record.py index 865d766..0fb6ad6 100644 --- a/tests/test_log_record.py +++ b/tests/test_log_record.py @@ -10,6 +10,14 @@ with logbook.StreamHandler(sys.stderr): logger.debug('message', exc_info=True) assert 'Traceback' not in captured.getvalue() + + +def test_exc_info_false(): + with logbook.handlers.TestHandler() as handler: + logbook.debug('message here', exc_info=False) + [record] = handler.records + assert not record.formatted_exception + def test_extradict(active_handler, logger): logger.warn('Test warning') @@ -24,6 +32,7 @@ logger.warn('test') assert active_handler.records[0].calling_frame == sys._getframe() + def test_frame_correction(active_handler, logger): def inner(): logger.warn('test', frame_correction=+1) @@ -31,6 +40,7 @@ inner() assert active_handler.records[0].calling_frame == sys._getframe() + def test_dispatcher(active_handler, logger): logger.warn('Logbook is too awesome for stdlib') assert active_handler.records[0].dispatcher == logger diff --git a/tests/test_logging_api.py b/tests/test_logging_api.py index 936c147..a9b2c64 100644 --- a/tests/test_logging_api.py +++ b/tests/test_logging_api.py @@ -2,7 +2,7 @@ import sys import logbook -from logbook.helpers import iteritems, xrange +from logbook.helpers import iteritems, xrange, u import pytest @@ -32,12 +32,13 @@ def test_exception_catching_with_unicode(): - """ See https://github.com/mitsuhiko/logbook/issues/104 + """ See https://github.com/getlogbook/logbook/issues/104 """ try: raise Exception(u('\u202a test \u202c')) except: - r = logbook.LogRecord('channel', 'DEBUG', 'test', exc_info=sys.exc_info()) + r = logbook.LogRecord('channel', 'DEBUG', 'test', + exc_info=sys.exc_info()) r.exception_message @@ -47,7 +48,8 @@ 1 / 0 except Exception: exc_info = sys.exc_info() - logger.info("Exception caught", exc_info=exc_info if as_tuple else True) + logger.info("Exception caught", + exc_info=exc_info if as_tuple else True) assert active_handler.records[0].exc_info is not None assert active_handler.records[0].exc_info == exc_info diff --git a/tests/test_logging_compat.py b/tests/test_logging_compat.py index f983078..48dfebe 100644 --- a/tests/test_logging_compat.py +++ b/tests/test_logging_compat.py @@ -21,7 +21,8 @@ from logbook.compat import redirected_logging # mimic the default logging setting - request.addfinalizer(functools.partial(logging.root.setLevel, logging.root.level)) + request.addfinalizer(functools.partial( + logging.root.setLevel, logging.root.level)) logging.root.setLevel(logging.WARNING) name = 'test_logbook-%d' % randrange(1 << 32) @@ -35,7 +36,8 @@ logger.warn('This is from the old %s', 'system') logger.error('This is from the old system') logger.critical('This is from the old system') - assert ('WARNING: %s: This is from the old system' % name) in captured.getvalue() + assert ('WARNING: %s: This is from the old system' % + name) in captured.getvalue() if set_root_logger_level: assert handler.records[0].level == logbook.DEBUG else: @@ -79,5 +81,6 @@ redirector.end() assert len(handler.records) == 1 - assert handler.formatted_records[0].startswith('[WARNING] RuntimeWarning: Testing') + assert handler.formatted_records[0].startswith( + '[WARNING] RuntimeWarning: Testing') assert __file_without_pyc__ in handler.records[0].filename diff --git a/tests/test_logging_times.py b/tests/test_logging_times.py index 758c2f2..f9c44e6 100644 --- a/tests/test_logging_times.py +++ b/tests/test_logging_times.py @@ -37,8 +37,9 @@ # get the difference between LogRecord local and utc times logbook_minutes_diff = get_total_delta_seconds(time_local - time_utc)/60.0 - assert abs(logbook_minutes_diff) > 1, ('Localtime does not differ from UTC by more than 1 ' - 'minute (Local: %s, UTC: %s)' % (time_local, time_utc)) + assert abs(logbook_minutes_diff) > 1, ( + 'Localtime does not differ from UTC by more than 1 ' + 'minute (Local: %s, UTC: %s)' % (time_local, time_utc)) ratio = logbook_minutes_diff / tz_minutes_diff diff --git a/tests/test_mail_handler.py b/tests/test_mail_handler.py index 18803a4..babc4e2 100644 --- a/tests/test_mail_handler.py +++ b/tests/test_mail_handler.py @@ -36,7 +36,8 @@ if 'Content-Transfer-Encoding: base64' in header: data = base64.b64decode(data).decode('utf-8') assert re.search('Message type:\s+ERROR', data) - assert re.search('Location:.*%s' % re.escape(__file_without_pyc__), data) + assert re.search('Location:.*%s' % + re.escape(__file_without_pyc__), data) assert re.search('Module:\s+%s' % __name__, data) assert re.search('Function:\s+test_mail_handler', data) body = u('Viva la Espa\xf1a') diff --git a/tests/test_more.py b/tests/test_more.py index e762ab9..597b80f 100644 --- a/tests/test_more.py +++ b/tests/test_more.py @@ -81,7 +81,6 @@ def test_tagging_logger(default_handler): from logbook import StderrHandler from logbook.more import TaggingLogger - stream = StringIO() logger = TaggingLogger('tagged', ['a', 'b']) handler = StderrHandler(format_string="{record.msg}|{record.extra[tags]}") diff --git a/tests/test_processors.py b/tests/test_processors.py index a58b421..daaf92a 100644 --- a/tests/test_processors.py +++ b/tests/test_processors.py @@ -1,26 +1,29 @@ +from textwrap import dedent + import logbook from .utils import make_fake_mail_handler def test_handler_filter_after_processor(activation_strategy, logger): - handler = make_fake_mail_handler(format_string='''\ -Subject: Application Error for {record.extra[path]} [{record.extra[method]}] + handler = make_fake_mail_handler( + format_string=dedent(''' + Subject: Application Error for {record.extra[path]} [{record.extra[method]}] -Message type: {record.level_name} -Location: {record.filename}:{record.lineno} -Module: {record.module} -Function: {record.func_name} -Time: {record.time:%Y-%m-%d %H:%M:%S} -Remote IP: {record.extra[ip]} -Request: {record.extra[path]} [{record.extra[method]}] + Message type: {record.level_name} + Location: {record.filename}:{record.lineno} + Module: {record.module} + Function: {record.func_name} + Time: {record.time:%Y-%m-%d %H:%M:%S} + Remote IP: {record.extra[ip]} + Request: {record.extra[path]} [{record.extra[method]}] -Message: + Message: -{record.message} -''', - filter=lambda r, h: 'ip' in r.extra, - bubble=False) + {record.message} + ''').lstrip(), + filter=lambda r, h: 'ip' in r.extra, + bubble=False) class Request(object): remote_addr = '127.0.0.1' @@ -52,21 +55,22 @@ def test_handler_processors(activation_strategy, logger): - handler = make_fake_mail_handler(format_string='''\ -Subject: Application Error for {record.extra[path]} [{record.extra[method]}] + handler = make_fake_mail_handler( + format_string=dedent(''' + Subject: Application Error for {record.extra[path]} [{record.extra[method]}] -Message type: {record.level_name} -Location: {record.filename}:{record.lineno} -Module: {record.module} -Function: {record.func_name} -Time: {record.time:%Y-%m-%d %H:%M:%S} -Remote IP: {record.extra[ip]} -Request: {record.extra[path]} [{record.extra[method]}] + Message type: {record.level_name} + Location: {record.filename}:{record.lineno} + Module: {record.module} + Function: {record.func_name} + Time: {record.time:%Y-%m-%d %H:%M:%S} + Remote IP: {record.extra[ip]} + Request: {record.extra[path]} [{record.extra[method]}] -Message: + Message: -{record.message} -''') + {record.message} + ''').lstrip()) class Request(object): remote_addr = '127.0.0.1' diff --git a/tests/test_queues.py b/tests/test_queues.py index 7550c9c..80ce040 100644 --- a/tests/test_queues.py +++ b/tests/test_queues.py @@ -1,6 +1,7 @@ # -*- coding: utf-8 -*- +import os +import socket import time -import socket from .utils import require_module, missing, LETTERS @@ -71,6 +72,8 @@ @require_module('multiprocessing') def test_multi_processing_handler(): + if os.getenv('APPVEYOR') == 'True': + pytest.skip('Test hangs on AppVeyor CI') from multiprocessing import Process, Queue from logbook.queues import MultiProcessingSubscriber queue = Queue(-1) @@ -134,6 +137,8 @@ @require_module('multiprocessing') def test_subscriber_group(): + if os.getenv('APPVEYOR') == 'True': + pytest.skip('Test hangs on AppVeyor CI') from multiprocessing import Process, Queue from logbook.queues import MultiProcessingSubscriber, SubscriberGroup a_queue = Queue(-1) @@ -163,7 +168,8 @@ FIELDS = ['message', 'host'] r = redis.Redis(decode_responses=True) redis_handler = RedisHandler(level=logbook.INFO, bubble=True) - # We don't want output for the tests, so we can wrap everything in a NullHandler + # We don't want output for the tests, so we can wrap everything in a + # NullHandler null_handler = logbook.NullHandler() # Check default values diff --git a/tests/test_syslog_handler.py b/tests/test_syslog_handler.py index 8bd89ed..9772a2a 100644 --- a/tests/test_syslog_handler.py +++ b/tests/test_syslog_handler.py @@ -26,8 +26,9 @@ rv = inc.recvfrom(1024)[0] except socket.error: assert False, 'got timeout on socket' - assert rv == (u('<12>%stestlogger: Syslog is weird\x00') % - ((app_name and (app_name + u(':'))) or u(''))).encode('utf-8') + assert rv == ( + u('<12>%stestlogger: Syslog is weird\x00') % + ((app_name and (app_name + u(':'))) or u(''))).encode('utf-8') @pytest.fixture diff --git a/tests/test_test_handler.py b/tests/test_test_handler.py index 781b35a..5c92854 100644 --- a/tests/test_test_handler.py +++ b/tests/test_test_handler.py @@ -11,7 +11,8 @@ def test_test_handler_cache(active_handler, logger): logger.warn('First line') assert len(active_handler.formatted_records) == 1 - cache = active_handler.formatted_records # store cache, to make sure it is identifiable + # store cache, to make sure it is identifiable + cache = active_handler.formatted_records assert len(active_handler.formatted_records) == 1 assert cache is active_handler.formatted_records logger.warn('Second line invalidates cache') diff --git a/tests/test_ticketing.py b/tests/test_ticketing.py index efdb4e5..eaa241b 100644 --- a/tests/test_ticketing.py +++ b/tests/test_ticketing.py @@ -17,10 +17,13 @@ @require_module('sqlalchemy') def test_basic_ticketing(logger): from logbook.ticketing import TicketingHandler + from time import sleep with TicketingHandler('sqlite:///') as handler: for x in xrange(5): logger.warn('A warning') + sleep(0.1) logger.info('An error') + sleep(0.1) if x < 2: try: 1 / 0 diff --git a/tests/test_unicode.py b/tests/test_unicode.py index 2b74eba..96ff00c 100644 --- a/tests/test_unicode.py +++ b/tests/test_unicode.py @@ -25,7 +25,8 @@ # it's a string, is wrong, but just dump it in the logger, # don't try to decode/encode it logger.warn('Русский'.encode('koi8-r')) - assert "WARNING: testlogger: b'\\xf2\\xd5\\xd3\\xd3\\xcb\\xc9\\xca'" in stream.getvalue() + expected = "WARNING: testlogger: b'\\xf2\\xd5\\xd3\\xd3\\xcb\\xc9\\xca'" + assert expected in stream.getvalue() @require_py3 diff --git a/tests/test_utils.py b/tests/test_utils.py new file mode 100644 index 0000000..3d1443f --- /dev/null +++ b/tests/test_utils.py @@ -0,0 +1,202 @@ +import pytest +import logbook + +from logbook.utils import ( + logged_if_slow, deprecated, forget_deprecation_locations, + suppressed_deprecations, log_deprecation_message) +from time import sleep + +_THRESHOLD = 0.1 + + +def test_logged_if_slow_reached(logger, test_handler): + with test_handler.applicationbound(): + with logged_if_slow('checking...', threshold=_THRESHOLD): + sleep(2*_THRESHOLD) + assert len(test_handler.records) == 1 + [record] = test_handler.records + assert record.message == 'checking...' + + +def test_logged_if_slow_did_not_reached(logger, test_handler): + with test_handler.applicationbound(): + with logged_if_slow('checking...', threshold=_THRESHOLD): + sleep(_THRESHOLD/2) + assert len(test_handler.records) == 0 + + +def test_deprecated_func_called(capture): + assert deprecated_func(1, 2) == 3 + + +def test_deprecation_message(capture): + deprecated_func(1, 2) + + [record] = capture.records + assert "deprecated" in record.message + assert 'deprecated_func' in record.message + + +def test_deprecation_with_message(capture): + + @deprecated("use something else instead") + def func(a, b): + return a + b + + func(1, 2) + + [record] = capture.records + assert "use something else instead" in record.message + assert "func is deprecated" in record.message + + +def test_no_deprecations(capture): + + @deprecated('msg') + def func(a, b): + return a + b + + with suppressed_deprecations(): + assert func(1, 2) == 3 + assert not capture.records + + +def _no_decorator(func): + return func + + +@pytest.mark.parametrize('decorator', [_no_decorator, classmethod]) +def test_class_deprecation(capture, decorator): + + class Bla(object): + + @deprecated('reason') + @classmethod + def func(self, a, b): + assert isinstance(self, Bla) + return a + b + + assert Bla().func(2, 4) == 6 + + [record] = capture.records + assert 'Bla.func is deprecated' in record.message + + +def test_deprecations_different_sources(capture): + + def f(): + deprecated_func(1, 2) + + def g(): + deprecated_func(1, 2) + + f() + g() + assert len(capture.records) == 2 + + +def test_deprecations_same_sources(capture): + + def f(): + deprecated_func(1, 2) + + f() + f() + assert len(capture.records) == 1 + + +def test_deprecation_message_different_sources(capture): + + def f(flag): + if flag: + log_deprecation_message('first message type') + else: + log_deprecation_message('second message type') + + f(True) + f(False) + assert len(capture.records) == 2 + + +def test_deprecation_message_same_sources(capture): + + def f(flag): + if flag: + log_deprecation_message('first message type') + else: + log_deprecation_message('second message type') + + f(True) + f(True) + assert len(capture.records) == 1 + + +def test_deprecation_message_full_warning(capture): + def f(): + log_deprecation_message('some_message') + f() + + [record] = capture.records + assert record.message == 'Deprecation message: some_message' + + +def test_name_doc(): + @deprecated + def some_func(): + """docstring here""" + pass + + assert some_func.__name__ == 'some_func' + assert 'docstring here' in some_func.__doc__ + + +def test_doc_update(): + @deprecated('some_message') + def some_func(): + """docstring here""" + pass + + some_func.__doc__ = 'new_docstring' + + assert 'docstring here' not in some_func.__doc__ + assert 'new_docstring' in some_func.__doc__ + assert 'some_message' in some_func.__doc__ + + +def test_deprecatd_docstring(): + + message = "Use something else instead" + + @deprecated() + def some_func(): + """This is a function + """ + + @deprecated(message) + def other_func(): + """This is another function + """ + + assert ".. deprecated" in some_func.__doc__ + assert ".. deprecated\n {0}".format(message) in other_func.__doc__ + + +@pytest.fixture +def capture(request): + handler = logbook.TestHandler(level=logbook.WARNING) + handler.push_application() + + @request.addfinalizer + def pop(): + handler.pop_application() + return handler + + +@deprecated +def deprecated_func(a, b): + return a + b + + +@pytest.fixture(autouse=True) +def forget_locations(): + forget_deprecation_locations() diff --git a/tests/utils.py b/tests/utils.py index d545806..281b299 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -6,7 +6,6 @@ :copyright: (c) 2010 by Armin Ronacher, Georg Brandl. :license: BSD, see LICENSE for more details. """ -import platform import functools import sys from contextlib import contextmanager @@ -20,14 +19,19 @@ LETTERS = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ" + def get_total_delta_seconds(delta): """ - Replacement for datetime.timedelta.total_seconds() for Python 2.5, 2.6 and 3.1 + Replacement for datetime.timedelta.total_seconds() for Python 2.5, 2.6 + and 3.1 """ return (delta.microseconds + (delta.seconds + delta.days * 24 * 3600) * 10**6) / 10**6 -require_py3 = pytest.mark.skipif(sys.version_info[0] < 3, reason="Requires Python 3") +require_py3 = pytest.mark.skipif( + sys.version_info[0] < 3, reason="Requires Python 3") + + def require_module(module_name): found = True try: @@ -35,7 +39,9 @@ except ImportError: found = False - return pytest.mark.skipif(not found, reason='Module {0} is required'.format(module_name)) + return pytest.mark.skipif( + not found, reason='Module {0} is required'.format(module_name)) + def make_fake_mail_handler(**kwargs): class FakeMailHandler(logbook.MailHandler): @@ -70,8 +76,10 @@ return wrapper return decorate + def activate_via_with_statement(handler): return handler + @contextmanager def activate_via_push_pop(handler): @@ -81,6 +89,7 @@ finally: handler.pop_thread() + @contextmanager def capturing_stderr_context(): original = sys.stderr diff --git a/tox.ini b/tox.ini index 2151aa8..b5183a1 100644 --- a/tox.ini +++ b/tox.ini @@ -1,5 +1,5 @@ [tox] -envlist=py26,py27,py32,py33,py34,pypy,docs +envlist=py26,py27,py32,py33,py34,py35,pypy,docs skipsdist=True [testenv]