New upstream version 20.0.0
Chris Lamb
4 years ago
5 | 5 | examples, |
6 | 6 | scripts, |
7 | 7 | _compat.py, |
8 | argparse_compat.py, | |
9 | six.py, | |
10 | selectors.py, | |
11 | 8 | _gaiohttp.py, |
12 | 9 | |
13 | 10 | [MESSAGES CONTROL] |
23 | 20 | eval-used, |
24 | 21 | fixme, |
25 | 22 | import-error, |
23 | import-outside-toplevel, | |
26 | 24 | import-self, |
27 | 25 | inconsistent-return-statements, |
28 | 26 | invalid-name, |
49 | 47 | wrong-import-order, |
50 | 48 | ungrouped-imports, |
51 | 49 | unused-argument, |
50 | useless-object-inheritance, | |
51 | useless-import-alias, | |
52 | comparison-with-callable, | |
53 | try-except-raise, |
1 | 1 | language: python |
2 | 2 | matrix: |
3 | 3 | include: |
4 | - python: 2.6 | |
5 | env: TOXENV=py26 | |
6 | - python: 2.7 | |
7 | env: TOXENV=py27 | |
4 | - python: 3.8 | |
5 | env: TOXENV=lint | |
6 | dist: xenial | |
7 | sudo: true | |
8 | 8 | - python: 3.4 |
9 | 9 | env: TOXENV=py34 |
10 | 10 | - python: 3.5 |
11 | 11 | env: TOXENV=py35 |
12 | 12 | - python: 3.6 |
13 | 13 | env: TOXENV=py36 |
14 | - python: 3.6-dev | |
15 | env: TOXENV=py36-dev | |
16 | - python: nightly | |
14 | - python: 3.7 | |
17 | 15 | env: TOXENV=py37 |
18 | - python: 3.6 | |
19 | env: TOXENV=lint | |
20 | allow_failures: | |
21 | - env: TOXENV=py36-dev | |
22 | - env: TOXENV=py37 | |
16 | dist: xenial | |
17 | sudo: true | |
18 | - python: pypy3 | |
19 | env: TOXENV=pypy3 | |
20 | dist: xenial | |
21 | - python: 3.8 | |
22 | env: TOXENV=py38 | |
23 | dist: xenial | |
24 | sudo: true | |
25 | - python: 3.8 | |
26 | env: TOXENV=docs-lint | |
27 | dist: xenial | |
28 | sudo: true | |
23 | 29 | install: pip install tox |
24 | 30 | # TODO: https://github.com/tox-dev/tox/issues/149 |
25 | 31 | script: tox --recreate |
140 | 140 | |
141 | 141 | * Step 2: Find the MAINTAINERS file which affects this directory. If the directory itself does not have a MAINTAINERS file, work your way up the the repo hierarchy until you find one. |
142 | 142 | |
143 | * Step 3: The first maintainer listed is the primary maintainer. The pull request is assigned to him. He may assign it to other listed maintainers, at his discretion. | |
143 | * Step 3: The first maintainer listed is the primary maintainer who is assigned the Pull Request. The primary maintainer can reassign a Pull Request to other listed maintainers. | |
144 | 144 | |
145 | 145 | |
146 | 146 | ### I'm a maintainer, should I make pull requests too? |
0 | Core maintainers | |
1 | ================ | |
2 | ||
0 | 3 | Benoit Chesneau <benoitc@gunicorn.org> |
4 | Konstantin Kapustin <sirkonst@gmail.com> | |
5 | Randall Leeds <randall.leeds@gmail.com> | |
6 | Berker Peksağ <berker.peksag@gmail.com> | |
7 | Jason Madden <jason@nextthought.com> | |
8 | ||
9 | Alumni | |
10 | ====== | |
11 | ||
12 | This list contains maintainers that are no longer active on the project. | |
13 | It is thanks to these people that the project has become what it is today. | |
14 | Thank you! | |
15 | ||
16 | ||
1 | 17 | Paul J. Davis <paul.joseph.davis@gmail.com> |
2 | Randall Leeds <randall.leeds@gmail.com> | |
3 | Konstantin Kapustin <sirkonst@gmail.com> | |
4 | 18 | Kenneth Reitz <me@kennethreitz.com> |
5 | 19 | Nikolay Kim <fafhrd91@gmail.com> |
6 | 20 | Andrew Svetlov <andrew.svetlov@gmail.com> |
7 | Stéphane Wirtel <stephane@wirtel.be> | |
8 | Berker Peksağ <berker.peksag@gmail.com> | |
21 | Stéphane Wirtel <stephane@wirtel.be>⏎ |
18 | 18 | of the software without specific, written prior permission. |
19 | 19 | |
20 | 20 | VINAY SAJIP DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, |
21 | INCLUDINGALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL | |
21 | INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL | |
22 | 22 | VINAY SAJIP BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR |
23 | 23 | ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER |
24 | 24 | IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT |
121 | 121 | -------------- |
122 | 122 | |
123 | 123 | backport frop python3 Lib/test/support.py |
124 | ||
125 | ||
126 | gunicorn/selectors.py | |
127 | --------------------- | |
128 | Copyright (c) 2001-2016 Python Software Foundation; All Rights Reserved | |
129 | ||
130 | PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 | |
131 | -------------------------------------------- | |
132 | ||
133 | 1. This LICENSE AGREEMENT is between the Python Software Foundation | |
134 | ("PSF"), and the Individual or Organization ("Licensee") accessing and | |
135 | otherwise using this software ("Python") in source or binary form and | |
136 | its associated documentation. | |
137 | ||
138 | 2. Subject to the terms and conditions of this License Agreement, PSF hereby | |
139 | grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, | |
140 | analyze, test, perform and/or display publicly, prepare derivative works, | |
141 | distribute, and otherwise use Python alone or in any derivative version, | |
142 | provided, however, that PSF's License Agreement and PSF's notice of copyright, | |
143 | i.e., "Copyright (c) 2001-2016 Python Software Foundation; All Rights | |
144 | Reserved" are retained in Python alone or in any derivative version prepared | |
145 | by Licensee. | |
146 | ||
147 | 3. In the event Licensee prepares a derivative work that is based on | |
148 | or incorporates Python or any part thereof, and wants to make | |
149 | the derivative work available to others as provided herein, then | |
150 | Licensee hereby agrees to include in any such work a brief summary of | |
151 | the changes made to Python. | |
152 | ||
153 | 4. PSF is making Python available to Licensee on an "AS IS" | |
154 | basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR | |
155 | IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND | |
156 | DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS | |
157 | FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT | |
158 | INFRINGE ANY THIRD PARTY RIGHTS. | |
159 | ||
160 | 5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON | |
161 | FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS | |
162 | A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, | |
163 | OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. | |
164 | ||
165 | 6. This License Agreement will automatically terminate upon a material | |
166 | breach of its terms and conditions. | |
167 | ||
168 | 7. Nothing in this License Agreement shall be deemed to create any | |
169 | relationship of agency, partnership, or joint venture between PSF and | |
170 | Licensee. This License Agreement does not grant permission to use PSF | |
171 | trademarks or trade name in a trademark sense to endorse or promote | |
172 | products or services of Licensee, or any third party. | |
173 | ||
174 | 8. By copying, installing or otherwise using Python, Licensee | |
175 | agrees to be bound by the terms and conditions of this License | |
176 | Agreement. |
27 | 27 | Installation |
28 | 28 | ------------ |
29 | 29 | |
30 | Gunicorn requires **Python 2.x >= 2.6** or **Python 3.x >= 3.2**. | |
30 | Gunicorn requires **Python 3.x >= 3.4**. | |
31 | 31 | |
32 | 32 | Install from PyPI:: |
33 | 33 | |
51 | 51 | $ gunicorn --workers=2 test:app |
52 | 52 | |
53 | 53 | |
54 | Contributing | |
55 | ------------ | |
56 | ||
57 | See `our complete contributor's guide <CONTRIBUTING.md>`_ for more details. | |
58 | ||
59 | ||
54 | 60 | License |
55 | 61 | ------- |
56 | 62 |
30 | 30 | bninja <andrew@poundpay.com> |
31 | 31 | Bob Hagemann <bob+code@twilio.com> |
32 | 32 | Bobby Beckmann <bobby@macs-MacBook-Pro.local> |
33 | Brett Randall <javabrett@gmail.com> | |
33 | 34 | Brian Rosner <brosner@gmail.com> |
34 | 35 | Bruno Bigras <bigras.bruno@gmail.com> |
35 | 36 | Caleb Brown <git@calebbrown.id.au> |
45 | 46 | Dan Sully <daniel-github@electricrain.com> |
46 | 47 | Daniel Quinn <code@danielquinn.org> |
47 | 48 | Dariusz Suchojad <dsuch-github@m.zato.io> |
49 | David Black <github@dhb.is> | |
48 | 50 | David Vincelli <david@freshbooks.com> |
49 | 51 | David Wolever <david@wolever.net> |
50 | 52 | Denis Bilenko <denis.bilenko@gmail.com> |
52 | 54 | Dima Barsky <github@kappa.ac93.org> |
53 | 55 | Djoume Salvetti <djoume@freshbooks.com> |
54 | 56 | Dmitry Medvinsky <me@dmedvinsky.name> |
57 | Dustin Ingram <di@users.noreply.github.com> | |
55 | 58 | Ed Morley <edmorley@users.noreply.github.com> |
56 | 59 | Eric Florenzano <floguy@gmail.com> |
57 | 60 | Eric Shull <eric@elevenbasetwo.com> |
58 | 61 | Eugene Obukhov <irvind25@gmail.com> |
59 | 62 | Evan Mezeske <evan@meebo-inc.com> |
63 | Florian Apolloner <florian@apolloner.eu> | |
60 | 64 | Gaurav Kumar <gauravkumar37@gmail.com> |
61 | 65 | George Kollias <georgioskollias@gmail.com> |
62 | 66 | George Notaras <gnot@g-loaded.eu> |
68 | 72 | Hasan Ramezani <hasan.r67@gmail.com> |
69 | 73 | Hebert J <hebert@mail.ru> |
70 | 74 | Hobson Lane <shopper@totalgood.com> |
75 | Hugo van Kemenade <hugovk@users.noreply.github.com> | |
71 | 76 | Igor Petrov <igor.s.petrov@gmail.com> |
72 | 77 | INADA Naoki <methane@users.noreply.github.com> |
73 | 78 | Jakub Paweł Głazik <zytek@nuxi.pl> |
1 | 1 | environment: |
2 | 2 | matrix: |
3 | 3 | - TOXENV: lint |
4 | PYTHON: "C:\\Python35-x64" | |
5 | - TOXENV: py27 | |
6 | PYTHON: "C:\\Python27-x64" | |
4 | PYTHON: "C:\\Python37-x64" | |
7 | 5 | - TOXENV: py35 |
8 | 6 | PYTHON: "C:\\Python35-x64" |
9 | 7 | - TOXENV: py36 |
10 | 8 | PYTHON: "C:\\Python36-x64" |
9 | - TOXENV: py37 | |
10 | PYTHON: "C:\\Python37-x64" | |
11 | 11 | matrix: |
12 | 12 | allow_failures: |
13 | - TOXENV: py27 | |
14 | 13 | - TOXENV: py35 |
15 | 14 | - TOXENV: py36 |
15 | - TOXENV: py37 | |
16 | 16 | init: SET "PATH=%PYTHON%;%PYTHON%\\Scripts;%PATH%" |
17 | 17 | install: |
18 | 18 | - pip install tox |
5 | 5 | |
6 | 6 | To generate documentation you need to install: |
7 | 7 | |
8 | - Python >= 2.5 | |
8 | - Python >= 3.4 | |
9 | 9 | - Sphinx (http://sphinx-doc.org/) |
10 | 10 | |
11 | 11 |
15 | 15 | <div class="logo-div"> |
16 | 16 | <div class="latest"> |
17 | 17 | Latest version: <strong><a |
18 | href="http://docs.gunicorn.org/en/stable">19.9.0</a></strong> | |
18 | href="https://docs.gunicorn.org/en/stable/">19.9.0</a></strong> | |
19 | 19 | </div> |
20 | 20 | |
21 | 21 | <div class="logo"><img src="images/logo.jpg" ></div> |
178 | 178 | </div> |
179 | 179 | </div> |
180 | 180 | |
181 | <script src="http://ajax.googleapis.com/ajax/libs/jquery/1.7.2/jquery.min.js"></script> | |
181 | <script src="https://ajax.googleapis.com/ajax/libs/jquery/1.7.2/jquery.min.js"></script> | |
182 | 182 | <script src="js/main.js"></script> |
183 | 183 | </body> |
184 | 184 | </html> |
49 | 49 | --testing, specified when user is experimenting |
50 | 50 | """ |
51 | 51 | |
52 | # Please be careful that all syntax used in this file can be parsed on | |
53 | # Python 1.5 -- this version check is not evaluated until after the | |
54 | # entire file has been parsed. | |
55 | import sys | |
56 | if sys.hexversion < 0x02020000: | |
57 | print 'This script requires Python 2.2 or later.' | |
58 | print 'Currently run with version: %s' % sys.version | |
59 | sys.exit(1) | |
60 | ||
61 | 52 | import fnmatch |
62 | 53 | import glob |
63 | 54 | import gzip |
70 | 61 | import urllib |
71 | 62 | import urlparse |
72 | 63 | import xml.sax |
73 | ||
74 | # True and False were introduced in Python2.2.2 | |
75 | try: | |
76 | testTrue=True | |
77 | del testTrue | |
78 | except NameError: | |
79 | True=1 | |
80 | False=0 | |
81 | 64 | |
82 | 65 | # Text encodings |
83 | 66 | ENC_ASCII = 'ASCII' |
0 | ================ | |
1 | Changelog - 2018 | |
2 | ================ | |
3 | ||
4 | .. note:: | |
5 | ||
6 | Please see :doc:`news` for the latest changes | |
7 | ||
8 | 19.9.0 / 2018/07/03 | |
9 | =================== | |
10 | ||
11 | - fix: address a regression that prevented syslog support from working | |
12 | (:issue:`1668`, :pr:`1773`) | |
13 | - fix: correctly set `REMOTE_ADDR` on versions of Python 3 affected by | |
14 | `Python Issue 30205 <https://bugs.python.org/issue30205>`_ | |
15 | (:issue:`1755`, :pr:`1796`) | |
16 | - fix: show zero response length correctly in access log (:pr:`1787`) | |
17 | - fix: prevent raising :exc:`AttributeError` when ``--reload`` is not passed | |
18 | in case of a :exc:`SyntaxError` raised from the WSGI application. | |
19 | (:issue:`1805`, :pr:`1806`) | |
20 | - The internal module ``gunicorn.workers.async`` was renamed to ``gunicorn.workers.base_async`` | |
21 | since ``async`` is now a reserved word in Python 3.7. | |
22 | (:pr:`1527`) | |
23 | ||
24 | 19.8.1 / 2018/04/30 | |
25 | =================== | |
26 | ||
27 | - fix: secure scheme headers when bound to a unix socket | |
28 | (:issue:`1766`, :pr:`1767`) | |
29 | ||
30 | 19.8.0 / 2018/04/28 | |
31 | =================== | |
32 | ||
33 | - Eventlet 0.21.0 support (:issue:`1584`) | |
34 | - Tornado 5 support (:issue:`1728`, :pr:`1752`) | |
35 | - support watching additional files with ``--reload-extra-file`` | |
36 | (:pr:`1527`) | |
37 | - support configuring logging with a dictionary with ``--logging-config-dict`` | |
38 | (:issue:`1087`, :pr:`1110`, :pr:`1602`) | |
39 | - add support for the ``--config`` flag in the ``GUNICORN_CMD_ARGS`` environment | |
40 | variable (:issue:`1576`, :pr:`1581`) | |
41 | - disable ``SO_REUSEPORT`` by default and add the ``--reuse-port`` setting | |
42 | (:issue:`1553`, :issue:`1603`, :pr:`1669`) | |
43 | - fix: installing `inotify` on MacOS no longer breaks the reloader | |
44 | (:issue:`1540`, :pr:`1541`) | |
45 | - fix: do not throw ``TypeError`` when ``SO_REUSEPORT`` is not available | |
46 | (:issue:`1501`, :pr:`1491`) | |
47 | - fix: properly decode HTTP paths containing certain non-ASCII characters | |
48 | (:issue:`1577`, :pr:`1578`) | |
49 | - fix: remove whitespace when logging header values under gevent (:pr:`1607`) | |
50 | - fix: close unlinked temporary files (:issue:`1327`, :pr:`1428`) | |
51 | - fix: parse ``--umask=0`` correctly (:issue:`1622`, :pr:`1632`) | |
52 | - fix: allow loading applications using relative file paths | |
53 | (:issue:`1349`, :pr:`1481`) | |
54 | - fix: force blocking mode on the gevent sockets (:issue:`880`, :pr:`1616`) | |
55 | - fix: preserve leading `/` in request path (:issue:`1512`, :pr:`1511`) | |
56 | - fix: forbid contradictory secure scheme headers | |
57 | - fix: handle malformed basic authentication headers in access log | |
58 | (:issue:`1683`, :pr:`1684`) | |
59 | - fix: defer handling of ``USR1`` signal to a new greenlet under gevent | |
60 | (:issue:`1645`, :pr:`1651`) | |
61 | - fix: the threaded worker would sometimes close the wrong keep-alive | |
62 | connection under Python 2 (:issue:`1698`, :pr:`1699`) | |
63 | - fix: re-open log files on ``USR1`` signal using ``handler._open`` to | |
64 | support subclasses of ``FileHandler`` (:issue:`1739`, :pr:`1742`) | |
65 | - deprecation: the ``gaiohttp`` worker is deprecated, see the | |
66 | :ref:`worker-class` documentation for more information | |
67 | (:issue:`1338`, :pr:`1418`, :pr:`1569`)⏎ |
20 | 20 | master_doc = 'index' |
21 | 21 | |
22 | 22 | # General information about the project. |
23 | project = u'Gunicorn' | |
24 | copyright = u'2009-%s, Benoit Chesneau' % time.strftime('%Y') | |
23 | project = 'Gunicorn' | |
24 | copyright = '2009-%s, Benoit Chesneau' % time.strftime('%Y') | |
25 | 25 | # gunicorn version |
26 | 26 | import gunicorn |
27 | 27 | release = version = gunicorn.__version__ |
54 | 54 | } |
55 | 55 | |
56 | 56 | latex_documents = [ |
57 | ('index', 'Gunicorn.tex', u'Gunicorn Documentation', | |
58 | u'Benoit Chesneau', 'manual'), | |
57 | ('index', 'Gunicorn.tex', 'Gunicorn Documentation', | |
58 | 'Benoit Chesneau', 'manual'), | |
59 | 59 | ] |
60 | 60 | |
61 | 61 | |
62 | 62 | # -- Options for manual page output -------------------------------------------- |
63 | 63 | man_pages = [ |
64 | ('index', 'gunicorn', u'Gunicorn Documentation', | |
65 | [u'Benoit Chesneau'], 1) | |
64 | ('index', 'gunicorn', 'Gunicorn Documentation', | |
65 | ['Benoit Chesneau'], 1) | |
66 | 66 | ] |
67 | 67 | |
68 | 68 | texinfo_documents = [ |
69 | ('index', 'Gunicorn', u'Gunicorn Documentation', | |
70 | u'Benoit Chesneau', 'Gunicorn', 'One line description of project.', | |
69 | ('index', 'Gunicorn', 'Gunicorn Documentation', | |
70 | 'Benoit Chesneau', 'Gunicorn', 'One line description of project.', | |
71 | 71 | 'Miscellaneous'), |
72 | 72 | ] |
12 | 12 | a custom Application: |
13 | 13 | |
14 | 14 | .. literalinclude:: ../../examples/standalone_app.py |
15 | :lines: 11-60 | |
15 | :start-after: # See the NOTICE for more information | |
16 | :lines: 2- | |
17 | ||
18 | Direct Usage of Existing WSGI Apps | |
19 | ---------------------------------- | |
20 | ||
21 | If necessary, you can run Gunicorn straight from Python, allowing you to | |
22 | specify a WSGI-compatible application at runtime. This can be handy for | |
23 | rolling deploys or in the case of using PEX files to deploy your application, | |
24 | as the app and Gunicorn can be bundled in the same PEX file. Gunicorn has | |
25 | this functionality built-in as a first class citizen known as | |
26 | :class:`gunicorn.app.wsgiapp`. This can be used to run WSGI-compatible app | |
27 | instances such as those produced by Flask or Django. Assuming your WSGI API | |
28 | package is *exampleapi*, and your application instance is *app*, this is all | |
29 | you need to get going:: | |
30 | ||
31 | gunicorn.app.wsgiapp exampleapi:app | |
32 | ||
33 | This command will work with any Gunicorn CLI parameters or a config file - just | |
34 | pass them along as if you're directly giving them to Gunicorn: | |
35 | ||
36 | .. code-block:: bash | |
37 | ||
38 | # Custom parameters | |
39 | $ python gunicorn.app.wsgiapp exampleapi:app --bind=0.0.0.0:8081 --workers=4 | |
40 | # Using a config file | |
41 | $ python gunicorn.app.wsgiapp exampleapi:app -c config.py | |
42 | ||
43 | Note for those using PEX: use ``-c gunicorn`` as your entry at build | |
44 | time, and your compiled app should work with the entry point passed to it at | |
45 | run time. | |
46 | ||
47 | .. code-block:: bash | |
48 | ||
49 | # Generic pex build command via bash from root of exampleapi project | |
50 | $ pex . -v -c gunicorn -o compiledapp.pex | |
51 | # Running it | |
52 | ./compiledapp.pex exampleapi:app -c gunicorn_config.py |
66 | 66 | handled. Previous to Gunicorn 19 this was set to the value of |
67 | 67 | ``X-Forwarded-For`` if received from a trusted proxy. However, this was not in |
68 | 68 | compliance with :rfc:`3875` which is why the ``REMOTE_ADDR`` is now the IP |
69 | address of **the proxy** and **not the actual user**. You should instead | |
70 | configure Nginx to send the user's IP address through the ``X-Forwarded-For`` | |
71 | header like this:: | |
72 | ||
73 | ... | |
74 | proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; | |
75 | ... | |
69 | address of **the proxy** and **not the actual user**. | |
70 | ||
71 | To have access logs indicate **the actual user** IP when proxied, set | |
72 | :ref:`access-log-format` with a format which includes ``X-Forwarded-For``. For | |
73 | example, this format uses ``X-Forwarded-For`` in place of ``REMOTE_ADDR``:: | |
74 | ||
75 | %({x-forwarded-for}i)s %(l)s %(u)s %(t)s "%(r)s" %(s)s %(b)s "%(f)s" "%(a)s" | |
76 | 76 | |
77 | 77 | It is also worth noting that the ``REMOTE_ADDR`` will be completely empty if |
78 | 78 | you bind Gunicorn to a UNIX socket and not a TCP ``host:port`` tuple. |
211 | 211 | Systemd |
212 | 212 | ------- |
213 | 213 | |
214 | A tool that is starting to be common on linux systems is Systemd_. Below are | |
215 | configurations files and instructions for using systemd to create a unix socket | |
216 | for incoming Gunicorn requests. Systemd will listen on this socket and start | |
217 | gunicorn automatically in response to traffic. Later in this section are | |
218 | instructions for configuring Nginx to forward web traffic to the newly created | |
219 | unix socket: | |
214 | A tool that is starting to be common on linux systems is Systemd_. It is a | |
215 | system services manager that allows for strict process management, resources | |
216 | and permissions control. | |
217 | ||
218 | Below are configurations files and instructions for using systemd to create | |
219 | a unix socket for incoming Gunicorn requests. Systemd will listen on this | |
220 | socket and start gunicorn automatically in response to traffic. Later in | |
221 | this section are instructions for configuring Nginx to forward web traffic | |
222 | to the newly created unix socket: | |
220 | 223 | |
221 | 224 | **/etc/systemd/system/gunicorn.service**:: |
222 | 225 | |
226 | 229 | After=network.target |
227 | 230 | |
228 | 231 | [Service] |
229 | PIDFile=/run/gunicorn/pid | |
232 | Type=notify | |
233 | # the specific user that our service will run as | |
230 | 234 | User=someuser |
231 | 235 | Group=someuser |
236 | # another option for an even more restricted service is | |
237 | # DynamicUser=yes | |
238 | # see http://0pointer.net/blog/dynamic-users-with-systemd.html | |
232 | 239 | RuntimeDirectory=gunicorn |
233 | 240 | WorkingDirectory=/home/someuser/applicationroot |
234 | ExecStart=/usr/bin/gunicorn --pid /run/gunicorn/pid \ | |
235 | --bind unix:/run/gunicorn/socket applicationname.wsgi | |
241 | ExecStart=/usr/bin/gunicorn applicationname.wsgi | |
236 | 242 | ExecReload=/bin/kill -s HUP $MAINPID |
237 | ExecStop=/bin/kill -s TERM $MAINPID | |
243 | KillMode=mixed | |
244 | TimeoutStopSec=5 | |
238 | 245 | PrivateTmp=true |
239 | 246 | |
240 | 247 | [Install] |
246 | 253 | Description=gunicorn socket |
247 | 254 | |
248 | 255 | [Socket] |
249 | ListenStream=/run/gunicorn/socket | |
256 | ListenStream=/run/gunicorn.sock | |
257 | # Our service won't need permissions for the socket, since it | |
258 | # inherits the file descriptor by socket activation | |
259 | # only the nginx daemon will need access to the socket | |
260 | User=www-data | |
261 | # Optionally restrict the socket permissions even more. | |
262 | # Mode=600 | |
250 | 263 | |
251 | 264 | [Install] |
252 | 265 | WantedBy=sockets.target |
253 | 266 | |
254 | **/etc/tmpfiles.d/gunicorn.conf**:: | |
255 | ||
256 | d /run/gunicorn 0755 someuser somegroup - | |
257 | ||
258 | Next enable the socket so it autostarts at boot:: | |
259 | ||
260 | systemctl enable gunicorn.socket | |
261 | ||
262 | Either reboot, or start the services manually:: | |
263 | ||
264 | systemctl start gunicorn.socket | |
265 | ||
266 | ||
267 | After running ``curl --unix-socket /run/gunicorn/socket http``, Gunicorn | |
268 | should start and you should see some HTML from your server in the terminal. | |
267 | ||
268 | Next enable and start the socket (it will autostart at boot too):: | |
269 | ||
270 | systemctl enable --now gunicorn.socket | |
271 | ||
272 | ||
273 | Now let's see if the nginx daemon will be able to connect to the socket. | |
274 | Running ``sudo -u www-data curl --unix-socket /run/gunicorn.sock http``, | |
275 | our Gunicorn service will be automatically started and you should see some | |
276 | HTML from your server in the terminal. | |
277 | ||
278 | .. note:: | |
279 | ||
280 | systemd employs cgroups to track the processes of a service, so it doesn't | |
281 | need pid files. In the rare case that you need to find out the service main | |
282 | pid, you can use ``systemctl show --value -p MainPID gunicorn.service``, but | |
283 | if you only want to send a signal an even better option is | |
284 | ``systemctl kill -s HUP gunicorn.service``. | |
285 | ||
286 | .. note:: | |
287 | ||
288 | ``www-data`` is the default nginx user in debian, other distriburions use | |
289 | different users (for example: ``http`` or ``nginx``). Check you distro to | |
290 | know what to put for the socket user, and for the sudo command. | |
269 | 291 | |
270 | 292 | You must now configure your web proxy to send traffic to the new Gunicorn |
271 | 293 | socket. Edit your ``nginx.conf`` to include the following: |
272 | 294 | |
273 | 295 | **/etc/nginx/nginx.conf**:: |
274 | 296 | |
297 | user www-data; | |
275 | 298 | ... |
276 | 299 | http { |
277 | 300 | server { |
278 | 301 | listen 8000; |
279 | 302 | server_name 127.0.0.1; |
280 | 303 | location / { |
281 | proxy_pass http://unix:/run/gunicorn/socket; | |
304 | proxy_pass http://unix:/run/gunicorn.sock; | |
282 | 305 | } |
283 | 306 | } |
284 | 307 | } |
289 | 312 | The listen and server_name used here are configured for a local machine. |
290 | 313 | In a production server you will most likely listen on port 80, |
291 | 314 | and use your URL as the server_name. |
292 | ||
315 | ||
293 | 316 | Now make sure you enable the nginx service so it automatically starts at boot:: |
294 | 317 | |
295 | 318 | systemctl enable nginx.service |
296 | ||
319 | ||
297 | 320 | Either reboot, or start Nginx with the following command:: |
298 | 321 | |
299 | 322 | systemctl start nginx |
300 | ||
323 | ||
301 | 324 | Now you should be able to test Nginx with Gunicorn by visiting |
302 | 325 | http://127.0.0.1:8000/ in any web browser. Systemd is now set up. |
303 | 326 |
45 | 45 | Python. In general, an application should be able to make use of these worker |
46 | 46 | classes with no changes. |
47 | 47 | |
48 | For full greenlet support applications might need to be adapted. | |
49 | When using, e.g., Gevent_ and Psycopg_ it makes sense to ensure psycogreen_ is | |
50 | installed and `setup <http://www.gevent.org/api/gevent.monkey.html#plugins>`_. | |
51 | ||
52 | Other applications might not be compatible at all as they, e.g., rely on | |
53 | the original unpatched behavior. | |
54 | ||
48 | 55 | Tornado Workers |
49 | 56 | --------------- |
50 | 57 | |
58 | 65 | AsyncIO Workers |
59 | 66 | --------------- |
60 | 67 | |
61 | These workers are compatible with python3. You have two kind of workers. | |
68 | These workers are compatible with Python 3. | |
62 | 69 | |
63 | 70 | The worker `gthread` is a threaded worker. It accepts connections in the |
64 | 71 | main loop, accepted connections are added to the thread pool as a |
66 | 73 | waiting for an event. If no event happen after the keep alive timeout, |
67 | 74 | the connection is closed. |
68 | 75 | |
69 | The worker `gaiohttp` is a full asyncio worker using aiohttp_. | |
70 | ||
71 | .. note:: | |
72 | The ``gaiohttp`` worker requires the aiohttp_ module to be installed. | |
73 | aiohttp_ has removed its native WSGI application support in version 2. | |
74 | If you want to continue to use the ``gaiohttp`` worker with your WSGI | |
75 | application (e.g. an application that uses Flask or Django), there are | |
76 | three options available: | |
77 | ||
78 | #. Install aiohttp_ version 1.3.5 instead of version 2:: | |
79 | ||
80 | $ pip install aiohttp==1.3.5 | |
81 | ||
82 | #. Use aiohttp_wsgi_ to wrap your WSGI application. You can take a look | |
83 | at the `example`_ in the Gunicorn repository. | |
84 | #. Port your application to use aiohttp_'s ``web.Application`` API. | |
85 | #. Use the ``aiohttp.worker.GunicornWebWorker`` worker instead of the | |
86 | deprecated ``gaiohttp`` worker. | |
76 | You can port also your application to use aiohttp_'s `web.Application`` API and use the | |
77 | ``aiohttp.worker.GunicornWebWorker`` worker. | |
87 | 78 | |
88 | 79 | Choosing a Worker Type |
89 | 80 | ====================== |
149 | 140 | code in the master process). |
150 | 141 | |
151 | 142 | .. note:: |
152 | Under Python 2.x, you need to install the 'futures' package to use this | |
143 | Under Python 2.x, you need to install the 'futures' package to use this | |
153 | 144 | feature. |
154 | 145 | |
155 | 146 | .. _Greenlets: https://github.com/python-greenlet/greenlet |
156 | 147 | .. _Eventlet: http://eventlet.net/ |
157 | 148 | .. _Gevent: http://www.gevent.org/ |
158 | 149 | .. _Hey: https://github.com/rakyll/hey |
159 | .. _aiohttp: https://aiohttp.readthedocs.io/en/stable/ | |
160 | .. _aiohttp_wsgi: https://aiohttp-wsgi.readthedocs.io/en/stable/index.html | |
150 | .. _aiohttp: https://docs.aiohttp.org/en/stable/deployment.html#nginx-gunicorn | |
161 | 151 | .. _`example`: https://github.com/benoitc/gunicorn/blob/master/examples/frameworks/flaskapp_aiohttp_wsgi.py |
152 | .. _Psycopg: http://initd.org/psycopg/ | |
153 | .. _psycogreen: https://bitbucket.org/dvarrazzo/psycogreen |
171 | 171 | ``tmpfs`` in Ubuntu; in AWS an EBS root instance volume may sometimes hang for |
172 | 172 | half a minute and during this time Gunicorn workers may completely block in |
173 | 173 | ``os.fchmod``. ``os.fchmod`` may introduce extra delays if the disk gets full. |
174 | Also Gunicon may refuse to start if it can't create the files when the disk is | |
174 | Also Gunicorn may refuse to start if it can't create the files when the disk is | |
175 | 175 | full. |
176 | 176 | |
177 | Currently to avoid these problems you can create a ``tmpfs`` mount (for a new | |
177 | Currently to avoid these problems you can use a ``tmpfs`` mount (for a new | |
178 | 178 | directory or for ``/tmp``) and pass its path to ``--worker-tmp-dir``. First, |
179 | 179 | check whether your ``/tmp`` is disk-backed or RAM-backed:: |
180 | 180 | |
182 | 182 | Filesystem 1K-blocks Used Available Use% Mounted on |
183 | 183 | /dev/xvda1 ... ... ... ... / |
184 | 184 | |
185 | No luck. Let's create a new ``tmpfs`` mount:: | |
185 | No luck. If you are using Fedora or Ubuntu, you should already have a ``tmpfs`` | |
186 | mount at ``/dev/shm``:: | |
187 | ||
188 | $ df /dev/shm | |
189 | Filesystem 1K-blocks Used Available Use% Mounted on | |
190 | tmpfs ... ... ... ... /dev/shm | |
191 | ||
192 | In this case you can set ``--worker-tmp-dir /dev/shm``, otherwise you can | |
193 | create a new ``tmpfs`` mount:: | |
186 | 194 | |
187 | 195 | sudo cp /etc/fstab /etc/fstab.orig |
188 | 196 | sudo mkdir /mem |
22 | 22 | * Simple Python configuration |
23 | 23 | * Multiple worker configurations |
24 | 24 | * Various server hooks for extensibility |
25 | * Compatible with Python 2.x >= 2.6 or 3.x >= 3.2 | |
25 | * Compatible with Python 3.x >= 3.4 | |
26 | 26 | |
27 | 27 | |
28 | 28 | Contents |
3 | 3 | |
4 | 4 | .. highlight:: bash |
5 | 5 | |
6 | :Requirements: **Python 2.x >= 2.6** or **Python 3.x >= 3.2** | |
6 | :Requirements: **Python 3.x >= 3.4** | |
7 | 7 | |
8 | 8 | To install the latest released version of Gunicorn:: |
9 | 9 | |
39 | 39 | $ pip install gunicorn[gevent] # Or, using extra |
40 | 40 | |
41 | 41 | .. note:: |
42 | Both require ``greenlet``, which should get installed automatically, | |
42 | Both require ``greenlet``, which should get installed automatically. | |
43 | 43 | If its installation fails, you probably need to install |
44 | 44 | the Python headers. These headers are available in most package |
45 | 45 | managers. On Ubuntu the package name for ``apt-get`` is |
51 | 51 | installed, this is the most likely reason. |
52 | 52 | |
53 | 53 | |
54 | Extra Packages | |
55 | ============== | |
56 | Some Gunicorn options require additional packages. You can use the ``[extra]`` | |
57 | syntax to install these at the same time as Gunicorn. | |
58 | ||
59 | Most extra packages are needed for alternate worker types. See the | |
60 | `design docs`_ for more information on when you'll want to consider an | |
61 | alternate worker type. | |
62 | ||
63 | * ``gunicorn[eventlet]`` - Eventlet-based greenlets workers | |
64 | * ``gunicorn[gevent]`` - Gevent-based greenlets workers | |
65 | * ``gunicorn[gthread]`` - Threaded workers | |
66 | * ``gunicorn[tornado]`` - Tornado-based workers, not recommended | |
67 | ||
68 | If you are running more than one instance of Gunicorn, the :ref:`proc-name` | |
69 | setting will help distinguish between them in tools like ``ps`` and ``top``. | |
70 | ||
71 | * ``gunicorn[setproctitle]`` - Enables setting the process name | |
72 | ||
73 | Multiple extras can be combined, like | |
74 | ``pip install gunicorn[gevent,setproctitle]``. | |
75 | ||
54 | 76 | Debian GNU/Linux |
55 | 77 | ================ |
56 | 78 | |
57 | If you are using Debian GNU/Linux and it is recommended that you use | |
79 | If you are using Debian GNU/Linux it is recommended that you use | |
58 | 80 | system packages to install Gunicorn except maybe when you want to use |
59 | 81 | different versions of Gunicorn with virtualenv. This has a number of |
60 | 82 | advantages: |
98 | 120 | -------------------- |
99 | 121 | |
100 | 122 | The version of Gunicorn in the Debian_ "oldstable" distribution is 19.0 (June |
101 | 2014). you can install it using:: | |
123 | 2014). You can install it using:: | |
102 | 124 | |
103 | 125 | $ sudo apt-get install gunicorn |
104 | 126 | |
127 | 149 | Ubuntu |
128 | 150 | ====== |
129 | 151 | |
130 | Ubuntu_ 12.04 (trusty) or later contains Gunicorn package by default so that | |
152 | Ubuntu_ 12.04 (trusty) or later contains the Gunicorn package by default so that | |
131 | 153 | you can install it in the usual way:: |
132 | 154 | |
133 | 155 | $ sudo apt-get update |
1 | 1 | Changelog |
2 | 2 | ========= |
3 | 3 | |
4 | 19.9.0 / 2018/07/03 | |
5 | =================== | |
4 | 20.0 / 2019/10/30 | |
5 | ================= | |
6 | 6 | |
7 | - fix: address a regression that prevented syslog support from working | |
8 | (:issue:`1668`, :pr:`1773`) | |
9 | - fix: correctly set `REMOTE_ADDR` on versions of Python 3 affected by | |
10 | `Python Issue 30205 <https://bugs.python.org/issue30205>`_ | |
11 | (:issue:`1755`, :pr:`1796`) | |
12 | - fix: show zero response length correctly in access log (:pr:`1787`) | |
13 | - fix: prevent raising :exc:`AttributeError` when ``--reload`` is not passed | |
14 | in case of a :exc:`SyntaxError` raised from the WSGI application. | |
15 | (:issue:`1805`, :pr:`1806`) | |
16 | - The internal module ``gunicorn.workers.async`` was renamed to ``gunicorn.workers.base_async`` | |
17 | since ``async`` is now a reserved word in Python 3.7. | |
18 | (:pr:`1527`) | |
7 | - Fixed `fdopen` `RuntimeWarning` in Python 3.8 | |
8 | - Added check and exception for str type on value in Response process_headers method. | |
9 | - Ensure WSGI header value is string before conducting regex search on it. | |
10 | - Added pypy3 to list of tested environments | |
11 | - Grouped `StopIteration` and `KeyboardInterrupt` exceptions with same body together in Arbiter.run() | |
12 | - Added `setproctitle` module to `extras_require` in setup.py | |
13 | - Avoid unnecessary chown of temporary files | |
14 | - Logging: Handle auth type case insensitively | |
15 | - Removed `util.import_module` | |
16 | - Removed fallback for `types.SimpleNamespace` in tests utils | |
17 | - Use `SourceFileLoader` instead instead of `execfile_` | |
18 | - Use `importlib` instead of `__import__` and eval` | |
19 | - Fixed eventlet patching | |
20 | - Added optional `datadog <https://www.datadoghq.com>`_ tags for statsd metrics | |
21 | - Header values now are encoded using latin-1, not ascii. | |
22 | - Rewritten `parse_address` util added test | |
23 | - Removed redundant super() arguments | |
24 | - Simplify `futures` import in gthread module | |
25 | - Fixed worker_connections` setting to also affects the Gthread worker type | |
26 | - Fixed setting max_requests | |
27 | - Bump minimum Eventlet and Gevent versions to 0.24 and 1.4 | |
28 | - Use Python default SSL cipher list by default | |
29 | - handle `wsgi.input_terminated` extension | |
30 | - Simplify Paste Deployment documentation | |
31 | - Fix root logging: root and logger are same level. | |
32 | - Fixed typo in ssl_version documentation | |
33 | - Documented systemd deployement unit examples | |
34 | - Added systemd sd_notify support | |
35 | - Fixed typo in gthread.py | |
36 | - Added `tornado <https://www.tornadoweb.org/>`_ 5 and 6 support | |
37 | - Declare our setuptools dependency | |
38 | - Added support to `--bind` to open file descriptors | |
39 | - Document how to serve WSGI app modules from Gunicorn | |
40 | - Provide guidance on X-Forwarded-For access log in documentation | |
41 | - Add support for named constants in the `--ssl-version` flag | |
42 | - Clarify log format usage of header & environment in documentation | |
43 | - Fixed systemd documentation to properly setup gunicorn unix socket | |
44 | - Prevent removal unix socket for reuse_port | |
45 | - Fix `ResourceWarning` when reading a Python config module | |
46 | - Remove unnecessary call to dict keys method | |
47 | - Support str and bytes for UNIX socket addresses | |
48 | - fixed `InotifyReloadeder`: handle `module.__file__` is None | |
49 | - `/dev/shm` as a convenient alternative to making your own tmpfs mount in fchmod FAQ | |
50 | - fix examples to work on python3 | |
51 | - Fix typo in `--max-requests` documentation | |
52 | - Clear tornado ioloop before os.fork | |
53 | - Miscellaneous fixes and improvement for linting using Pylint | |
19 | 54 | |
20 | 19.8.1 / 2018/04/30 | |
21 | =================== | |
55 | Breaking Change | |
56 | +++++++++++++++ | |
22 | 57 | |
23 | - fix: secure scheme headers when bound to a unix socket | |
24 | (:issue:`1766`, :pr:`1767`) | |
25 | ||
26 | 19.8.0 / 2018/04/28 | |
27 | =================== | |
28 | ||
29 | - Eventlet 0.21.0 support (:issue:`1584`) | |
30 | - Tornado 5 support (:issue:`1728`, :pr:`1752`) | |
31 | - support watching additional files with ``--reload-extra-file`` | |
32 | (:pr:`1527`) | |
33 | - support configuring logging with a dictionary with ``--logging-config-dict`` | |
34 | (:issue:`1087`, :pr:`1110`, :pr:`1602`) | |
35 | - add support for the ``--config`` flag in the ``GUNICORN_CMD_ARGS`` environment | |
36 | variable (:issue:`1576`, :pr:`1581`) | |
37 | - disable ``SO_REUSEPORT`` by default and add the ``--reuse-port`` setting | |
38 | (:issue:`1553`, :issue:`1603`, :pr:`1669`) | |
39 | - fix: installing `inotify` on MacOS no longer breaks the reloader | |
40 | (:issue:`1540`, :pr:`1541`) | |
41 | - fix: do not throw ``TypeError`` when ``SO_REUSEPORT`` is not available | |
42 | (:issue:`1501`, :pr:`1491`) | |
43 | - fix: properly decode HTTP paths containing certain non-ASCII characters | |
44 | (:issue:`1577`, :pr:`1578`) | |
45 | - fix: remove whitespace when logging header values under gevent (:pr:`1607`) | |
46 | - fix: close unlinked temporary files (:issue:`1327`, :pr:`1428`) | |
47 | - fix: parse ``--umask=0`` correctly (:issue:`1622`, :pr:`1632`) | |
48 | - fix: allow loading applications using relative file paths | |
49 | (:issue:`1349`, :pr:`1481`) | |
50 | - fix: force blocking mode on the gevent sockets (:issue:`880`, :pr:`1616`) | |
51 | - fix: preserve leading `/` in request path (:issue:`1512`, :pr:`1511`) | |
52 | - fix: forbid contradictory secure scheme headers | |
53 | - fix: handle malformed basic authentication headers in access log | |
54 | (:issue:`1683`, :pr:`1684`) | |
55 | - fix: defer handling of ``USR1`` signal to a new greenlet under gevent | |
56 | (:issue:`1645`, :pr:`1651`) | |
57 | - fix: the threaded worker would sometimes close the wrong keep-alive | |
58 | connection under Python 2 (:issue:`1698`, :pr:`1699`) | |
59 | - fix: re-open log files on ``USR1`` signal using ``handler._open`` to | |
60 | support subclasses of ``FileHandler`` (:issue:`1739`, :pr:`1742`) | |
61 | - deprecation: the ``gaiohttp`` worker is deprecated, see the | |
62 | :ref:`worker-class` documentation for more information | |
63 | (:issue:`1338`, :pr:`1418`, :pr:`1569`) | |
58 | - Removed gaiohttp worker | |
59 | - Drop support for Python 2.x | |
60 | - Drop support for EOL Python 3.2 and 3.3 | |
64 | 61 | |
65 | 62 | |
66 | 63 | History |
69 | 66 | .. toctree:: |
70 | 67 | :titlesonly: |
71 | 68 | |
69 | 2018-news | |
72 | 70 | 2017-news |
73 | 71 | 2016-news |
74 | 72 | 2015-news |
3 | 3 | |
4 | 4 | .. highlight:: bash |
5 | 5 | |
6 | You can run Gunicorn by using commands or integrate with Django or Paster. For | |
7 | deploying Gunicorn in production see :doc:`deploy`. | |
6 | You can run Gunicorn by using commands or integrate with popular frameworks | |
7 | like Django, Pyramid, or TurboGears. For deploying Gunicorn in production see | |
8 | :doc:`deploy`. | |
8 | 9 | |
9 | 10 | Commands |
10 | 11 | ======== |
51 | 52 | * ``-c CONFIG, --config=CONFIG`` - Specify a config file in the form |
52 | 53 | ``$(PATH)``, ``file:$(PATH)``, or ``python:$(MODULE_NAME)``. |
53 | 54 | * ``-b BIND, --bind=BIND`` - Specify a server socket to bind. Server sockets |
54 | can be any of ``$(HOST)``, ``$(HOST):$(PORT)``, or ``unix:$(PATH)``. | |
55 | An IP is a valid ``$(HOST)``. | |
55 | can be any of ``$(HOST)``, ``$(HOST):$(PORT)``, ``fd://$(FD)``, or | |
56 | ``unix:$(PATH)``. An IP is a valid ``$(HOST)``. | |
56 | 57 | * ``-w WORKERS, --workers=WORKERS`` - The number of worker processes. This |
57 | 58 | number should generally be between 2-4 workers per core in the server. |
58 | 59 | Check the :ref:`faq` for ideas on tuning this parameter. |
60 | 61 | to run. You'll definitely want to read the production page for the |
61 | 62 | implications of this parameter. You can set this to ``$(NAME)`` |
62 | 63 | where ``$(NAME)`` is one of ``sync``, ``eventlet``, ``gevent``, |
63 | ``tornado``, ``gthread``, ``gaiohttp`` (deprecated). | |
64 | ``tornado``, ``gthread``. | |
64 | 65 | ``sync`` is the default. See the :ref:`worker-class` documentation for more |
65 | 66 | information. |
66 | 67 | * ``-n APP_NAME, --name=APP_NAME`` - If setproctitle_ is installed you can |
77 | 78 | Integration |
78 | 79 | =========== |
79 | 80 | |
80 | We also provide integration for both Django and Paster applications. | |
81 | Gunicorn also provides integration for Django and Paste Deploy applications. | |
81 | 82 | |
82 | 83 | Django |
83 | 84 | ------ |
103 | 104 | |
104 | 105 | $ gunicorn --env DJANGO_SETTINGS_MODULE=myproject.settings myproject.wsgi |
105 | 106 | |
106 | Paste | |
107 | ----- | |
107 | Paste Deployment | |
108 | ---------------- | |
108 | 109 | |
109 | If you are a user/developer of a paste-compatible framework/app (as | |
110 | Pyramid, Pylons and Turbogears) you can use the | |
111 | `--paste <http://docs.gunicorn.org/en/latest/settings.html#paste>`_ option | |
112 | to run your application. | |
110 | Frameworks such as Pyramid and Turbogears are typically configured using Paste | |
111 | Deployment configuration files. If you would like to use these files with | |
112 | Gunicorn, there are two approaches. | |
113 | ||
114 | As a server runner, Gunicorn can serve your application using the commands from | |
115 | your framework, such as ``pserve`` or ``gearbox``. To use Gunicorn with these | |
116 | commands, specify it as a server in your configuration file: | |
117 | ||
118 | .. code-block:: ini | |
119 | ||
120 | [server:main] | |
121 | use = egg:gunicorn#main | |
122 | host = 127.0.0.1 | |
123 | port = 8080 | |
124 | workers = 3 | |
125 | ||
126 | This approach is the quickest way to get started with Gunicorn, but there are | |
127 | some limitations. Gunicorn will have no control over how the application is | |
128 | loaded, so settings such as reload_ will have no effect and Gunicorn will be | |
129 | unable to hot upgrade a running application. Using the daemon_ option may | |
130 | confuse your command line tool. Instead, use the built-in support for these | |
131 | features provided by that tool. For example, run ``pserve --reload`` instead of | |
132 | specifying ``reload = True`` in the server configuration block. For advanced | |
133 | configuration of Gunicorn, such as `Server Hooks`_ specifying a Gunicorn | |
134 | configuration file using the ``config`` key is supported. | |
135 | ||
136 | To use the full power of Gunicorn's reloading and hot code upgrades, use the | |
137 | `paste option`_ to run your application instead. When used this way, Gunicorn | |
138 | will use the application defined by the PasteDeploy configuration file, but | |
139 | Gunicorn will not use any server configuration defined in the file. Instead, | |
140 | `configure gunicorn`_. | |
113 | 141 | |
114 | 142 | For example:: |
115 | 143 | |
119 | 147 | |
120 | 148 | $ gunicorn --paste development.ini#admin -b :8080 --chdir /path/to/project |
121 | 149 | |
122 | It is all here. No configuration files nor additional Python modules to write! | |
150 | With both approaches, Gunicorn will use any loggers section found in Paste | |
151 | Deployment configuration file, unless instructed otherwise by specifying | |
152 | additional `logging settings`_. | |
153 | ||
154 | .. _reload: http://docs.gunicorn.org/en/latest/settings.html#reload | |
155 | .. _daemon: http://docs.gunicorn.org/en/latest/settings.html#daemon | |
156 | .. _Server Hooks: http://docs.gunicorn.org/en/latest/settings.html#server-hooks | |
157 | .. _paste option: http://docs.gunicorn.org/en/latest/settings.html#paste | |
158 | .. _configure gunicorn: http://docs.gunicorn.org/en/latest/configure.html | |
159 | .. _logging settings: http://docs.gunicorn.org/en/latest/settings.html#logging |
180 | 180 | D request time in microseconds |
181 | 181 | L request time in decimal seconds |
182 | 182 | p process ID |
183 | {Header}i request header | |
184 | {Header}o response header | |
185 | {Variable}e environment variable | |
183 | {header}i request header | |
184 | {header}o response header | |
185 | {variable}e environment variable | |
186 | 186 | =========== =========== |
187 | ||
188 | Use lowercase for header and environment variable names, and put | |
189 | ``{...}x`` names inside ``%(...)s``. For example:: | |
190 | ||
191 | %({x-forwarded-for}i)s | |
187 | 192 | |
188 | 193 | .. _errorlog: |
189 | 194 | |
357 | 362 | |
358 | 363 | .. versionadded:: 19.1 |
359 | 364 | |
365 | .. _dogstatsd-tags: | |
366 | ||
367 | dogstatsd_tags | |
368 | ~~~~~~~~~~~~~~ | |
369 | ||
370 | * ``--dogstatsd-tags DOGSTATSD_TAGS`` | |
371 | * ``(empty string)`` | |
372 | ||
373 | A comma-delimited list of datadog statsd (dogstatsd) tags to append to statsd metrics. | |
374 | ||
375 | .. versionadded:: 20 | |
376 | ||
360 | 377 | .. _statsd-prefix: |
361 | 378 | |
362 | 379 | statsd_prefix |
430 | 447 | * ``--ssl-version`` |
431 | 448 | * ``_SSLMethod.PROTOCOL_TLS`` |
432 | 449 | |
433 | SSL version to use (see stdlib ssl module's) | |
450 | SSL version to use. | |
451 | ||
452 | ============= ============ | |
453 | --ssl-version Description | |
454 | ============= ============ | |
455 | SSLv3 SSLv3 is not-secure and is strongly discouraged. | |
456 | SSLv23 Alias for TLS. Deprecated in Python 3.6, use TLS. | |
457 | TLS Negotiate highest possible version between client/server. | |
458 | Can yield SSL. (Python 3.6+) | |
459 | TLSv1 TLS 1.0 | |
460 | TLSv1_1 TLS 1.1 (Python 3.4+) | |
461 | TLSv1_2 TLS 1.2 (Python 3.4+) | |
462 | TLS_SERVER Auto-negotiate the highest protocol version like TLS, | |
463 | but only support server-side SSLSocket connections. | |
464 | (Python 3.6+) | |
465 | ============= ============ | |
434 | 466 | |
435 | 467 | .. versionchanged:: 19.7 |
436 | 468 | The default value has been changed from ``ssl.PROTOCOL_TLSv1`` to |
437 | 469 | ``ssl.PROTOCOL_SSLv23``. |
470 | .. versionchanged:: 20.0 | |
471 | This setting now accepts string names based on ``ssl.PROTOCOL_`` | |
472 | constants. | |
438 | 473 | |
439 | 474 | .. _cert-reqs: |
440 | 475 | |
482 | 517 | ~~~~~~~ |
483 | 518 | |
484 | 519 | * ``--ciphers`` |
485 | * ``TLSv1`` | |
486 | ||
487 | Ciphers to use (see stdlib ssl module's) | |
520 | * ``None`` | |
521 | ||
522 | SSL Cipher suite to use, in the format of an OpenSSL cipher list. | |
523 | ||
524 | By default we use the default cipher list from Python's ``ssl`` module, | |
525 | which contains ciphers considered strong at the time of each Python | |
526 | release. | |
527 | ||
528 | As a recommended alternative, the Open Web App Security Project (OWASP) | |
529 | offers `a vetted set of strong cipher strings rated A+ to C- | |
530 | <https://www.owasp.org/index.php/TLS_Cipher_String_Cheat_Sheet>`_. | |
531 | OWASP provides details on user-agent compatibility at each security level. | |
532 | ||
533 | See the `OpenSSL Cipher List Format Documentation | |
534 | <https://www.openssl.org/docs/manmaster/man1/ciphers.html#CIPHER-LIST-FORMAT>`_ | |
535 | for details on the format of an OpenSSL cipher list. | |
488 | 536 | |
489 | 537 | Security |
490 | 538 | -------- |
1102 | 1150 | |
1103 | 1151 | The socket to bind. |
1104 | 1152 | |
1105 | A string of the form: ``HOST``, ``HOST:PORT``, ``unix:PATH``. An IP is | |
1106 | a valid ``HOST``. | |
1153 | A string of the form: ``HOST``, ``HOST:PORT``, ``unix:PATH``, | |
1154 | ``fd://FD``. An IP is a valid ``HOST``. | |
1155 | ||
1156 | .. versionchanged:: 20.0 | |
1157 | Support for ``fd://FD`` got added. | |
1107 | 1158 | |
1108 | 1159 | Multiple addresses can be bound. ex.:: |
1109 | 1160 | |
1162 | 1213 | The default class (``sync``) should handle most "normal" types of |
1163 | 1214 | workloads. You'll want to read :doc:`design` for information on when |
1164 | 1215 | you might want to choose one of the other worker classes. Required |
1165 | libraries may be installed using setuptools' ``extra_require`` feature. | |
1216 | libraries may be installed using setuptools' ``extras_require`` feature. | |
1166 | 1217 | |
1167 | 1218 | A string referring to one of the following bundled classes: |
1168 | 1219 | |
1169 | 1220 | * ``sync`` |
1170 | * ``eventlet`` - Requires eventlet >= 0.9.7 (or install it via | |
1221 | * ``eventlet`` - Requires eventlet >= 0.24.1 (or install it via | |
1171 | 1222 | ``pip install gunicorn[eventlet]``) |
1172 | * ``gevent`` - Requires gevent >= 0.13 (or install it via | |
1223 | * ``gevent`` - Requires gevent >= 1.4 (or install it via | |
1173 | 1224 | ``pip install gunicorn[gevent]``) |
1174 | * ``tornado`` - Requires tornado >= 0.2 (or install it via | |
1225 | * ``tornado`` - Requires tornado >= 0.2 (or install it via | |
1175 | 1226 | ``pip install gunicorn[tornado]``) |
1176 | 1227 | * ``gthread`` - Python 2 requires the futures package to be installed |
1177 | 1228 | (or install it via ``pip install gunicorn[gthread]``) |
1178 | * ``gaiohttp`` - Deprecated. | |
1179 | 1229 | |
1180 | 1230 | Optionally, you can provide your own worker by giving Gunicorn a |
1181 | 1231 | Python path to a subclass of ``gunicorn.workers.base.Worker``. |
1182 | 1232 | This alternative syntax will load the gevent class: |
1183 | 1233 | ``gunicorn.workers.ggevent.GeventWorker``. |
1184 | 1234 | |
1185 | .. deprecated:: 19.8 | |
1186 | The ``gaiohttp`` worker is deprecated. Please use | |
1187 | ``aiohttp.worker.GunicornWebWorker`` instead. See | |
1188 | :ref:`asyncio-workers` for more information on how to use it. | |
1189 | ||
1190 | 1235 | .. _threads: |
1191 | 1236 | |
1192 | 1237 | threads |
1234 | 1279 | |
1235 | 1280 | The maximum number of requests a worker will process before restarting. |
1236 | 1281 | |
1237 | Any value greater than zero will limit the number of requests a work | |
1282 | Any value greater than zero will limit the number of requests a worker | |
1238 | 1283 | will process before automatically restarting. This is a simple method |
1239 | 1284 | to help limit the damage of memory leaks. |
1240 | 1285 |
4 | 4 | # |
5 | 5 | # Example code from Eventlet sources |
6 | 6 | |
7 | from wsgiref.validate import validator | |
8 | ||
9 | 7 | from gunicorn import __version__ |
10 | 8 | |
11 | 9 | |
12 | @validator | |
13 | 10 | def app(environ, start_response): |
14 | 11 | """Simplest possible application object""" |
15 | 12 | |
23 | 20 | response_headers = [ |
24 | 21 | ('Content-type', 'text/plain'), |
25 | 22 | ('Content-Length', str(len(data))), |
26 | ('X-Gunicorn-Version', __version__), | |
27 | ("Test", "test тест"), | |
23 | ('X-Gunicorn-Version', __version__) | |
28 | 24 | ] |
29 | 25 | start_response(status, response_headers) |
30 | 26 | return iter([data]) |
199 | 199 | |
200 | 200 | ## get traceback info |
201 | 201 | import threading, sys, traceback |
202 | id2name = dict([(th.ident, th.name) for th in threading.enumerate()]) | |
202 | id2name = {th.ident: th.name for th in threading.enumerate()} | |
203 | 203 | code = [] |
204 | 204 | for threadId, stack in sys._current_frames().items(): |
205 | 205 | code.append("\n# Thread: %s(%d)" % (id2name.get(threadId,""), |
9 | 9 | |
10 | 10 | class GunicornSubProcessTestMiddleware(object): |
11 | 11 | def __init__(self): |
12 | super(GunicornSubProcessTestMiddleware, self).__init__() | |
12 | super().__init__() | |
13 | 13 | self.queue = Queue() |
14 | 14 | self.process = Process(target=child_process, args=(self.queue,)) |
15 | 15 | self.process.start() |
2 | 2 | import os |
3 | 3 | from django import forms |
4 | 4 | from django.http import HttpResponse |
5 | from django.shortcuts import render_to_response | |
5 | from django.shortcuts import render | |
6 | 6 | from django.template import RequestContext |
7 | 7 | |
8 | 8 | |
37 | 37 | else: |
38 | 38 | form = MsgForm() |
39 | 39 | |
40 | return render_to_response('home.html', { | |
40 | ||
41 | ||
42 | return render(request, 'home.html', { | |
41 | 43 | 'form': form, |
42 | 44 | 'subject': subject, |
43 | 45 | 'message': message, |
44 | 46 | 'size': size |
45 | }, RequestContext(request)) | |
47 | }) | |
46 | 48 | |
47 | 49 | |
48 | 50 | def acsv(request): |
80 | 80 | ) |
81 | 81 | |
82 | 82 | # Make this unique, and don't share it with anybody. |
83 | SECRET_KEY = '' | |
83 | SECRET_KEY = 'what' | |
84 | ||
84 | 85 | |
85 | 86 | # List of callables that know how to import templates from various sources. |
86 | 87 | TEMPLATE_LOADERS = ( |
105 | 106 | |
106 | 107 | # Python dotted path to the WSGI application used by Django's runserver. |
107 | 108 | WSGI_APPLICATION = 'testing.wsgi.application' |
109 | ||
110 | TEMPLATES = [ | |
111 | { | |
112 | 'BACKEND': 'django.template.backends.django.DjangoTemplates', | |
113 | 'DIRS': [], | |
114 | 'APP_DIRS': True, | |
115 | 'OPTIONS': { | |
116 | # ... some options here ... | |
117 | }, | |
118 | }, | |
119 | ] | |
108 | 120 | |
109 | 121 | TEMPLATE_DIRS = ( |
110 | 122 | # Put strings here, like "/home/html/django_templates" or "C:/www/django/templates". |
12 | 12 | # url(r'^admin/doc/', include('django.contrib.admindocs.urls')), |
13 | 13 | |
14 | 14 | # Uncomment the next line to enable the admin: |
15 | url(r'^admin/', include(admin.site.urls)), | |
15 | url(r'^admin/', admin.site.urls), | |
16 | 16 | |
17 | 17 | url(r'^', include("testing.apps.someapp.urls")), |
18 | 18 | ] |
6 | 6 | @app.route('/') |
7 | 7 | def index(): |
8 | 8 | buf = io.BytesIO() |
9 | buf.write('hello world') | |
9 | buf.write(b'hello world') | |
10 | 10 | buf.seek(0) |
11 | 11 | return send_file(buf, |
12 | 12 | attachment_filename="testing.txt", |
12 | 12 | log.info("Hello Info!") |
13 | 13 | log.warn("Hello Warn!") |
14 | 14 | log.error("Hello Error!") |
15 | return ["Hello World!\n"] | |
15 | return [b"Hello World!\n"] |
9 | 9 | class TestIter(object): |
10 | 10 | |
11 | 11 | def __iter__(self): |
12 | lines = ['line 1\n', 'line 2\n'] | |
12 | lines = [b'line 1\n', b'line 2\n'] | |
13 | 13 | for line in lines: |
14 | 14 | yield line |
15 | 15 | time.sleep(20) |
1 | 1 | Use this config file in your script like this: |
2 | 2 | |
3 | 3 | $ gunicorn project_name.wsgi:application -c read_django_settings.py |
4 | ||
5 | You need to replace the exec() call if you want it to work on Python 2. | |
6 | 4 | """ |
7 | 5 | |
8 | 6 | settings_dict = {} |
0 | # -*- coding: utf-8 - | |
1 | # | |
2 | # This file is part of gunicorn released under the MIT license. | |
3 | # See the NOTICE for more information. | |
4 | # | |
5 | # Simple example of readline, reading from a stream then echoing the response | |
6 | # | |
7 | # Usage: | |
8 | # | |
9 | # Launch a server with the app in a terminal | |
10 | # | |
11 | # $ gunicorn -w3 readline:app | |
12 | # | |
13 | # Then in another terminal launch the following command: | |
14 | # | |
15 | # $ curl -XPOST -d'test\r\ntest2\r\n' -H"Transfer-Encoding: Chunked" http://localhost:8000 | |
16 | ||
17 | ||
18 | ||
19 | from gunicorn import __version__ | |
20 | ||
21 | ||
22 | def app(environ, start_response): | |
23 | """Simplest possible application object""" | |
24 | status = '200 OK' | |
25 | ||
26 | response_headers = [ | |
27 | ('Content-type', 'text/plain'), | |
28 | ('Transfer-Encoding', "chunked"), | |
29 | ('X-Gunicorn-Version', __version__), | |
30 | #("Test", "test тест"), | |
31 | ] | |
32 | start_response(status, response_headers) | |
33 | ||
34 | body = environ['wsgi.input'] | |
35 | ||
36 | lines = [] | |
37 | while True: | |
38 | line = body.readline() | |
39 | if line == b"": | |
40 | break | |
41 | print(line) | |
42 | lines.append(line) | |
43 | ||
44 | return iter(lines)⏎ |
0 | # -*- coding: utf-8 - | |
1 | # | |
2 | # This file is part of gunicorn released under the MIT license. | |
3 | # See the NOTICE for more information. | |
4 | # | |
5 | # Simple example of readline, reading from a stream then echoing the response | |
6 | # | |
7 | # Usage: | |
8 | # | |
9 | # Launch a server with the app in a terminal | |
10 | # | |
11 | # $ gunicorn -w3 readline_app:app | |
12 | # | |
13 | # Then in another terminal launch the following command: | |
14 | # | |
15 | # $ curl -XPOST -d'test\r\ntest2\r\n' -H"Transfer-Encoding: Chunked" http://localhost:8000 | |
16 | ||
17 | ||
18 | ||
19 | from gunicorn import __version__ | |
20 | ||
21 | ||
22 | def app(environ, start_response): | |
23 | """Simplest possible application object""" | |
24 | status = '200 OK' | |
25 | ||
26 | response_headers = [ | |
27 | ('Content-type', 'text/plain'), | |
28 | ('Transfer-Encoding', "chunked"), | |
29 | ('X-Gunicorn-Version', __version__) | |
30 | ] | |
31 | start_response(status, response_headers) | |
32 | ||
33 | body = environ['wsgi.input'] | |
34 | ||
35 | lines = [] | |
36 | while True: | |
37 | line = body.readline() | |
38 | if line == b"": | |
39 | break | |
40 | print(line) | |
41 | lines.append(line) | |
42 | ||
43 | return iter(lines) |
7 | 7 | # This file is part of gunicorn released under the MIT license. |
8 | 8 | # See the NOTICE for more information. |
9 | 9 | |
10 | from __future__ import unicode_literals | |
11 | ||
12 | 10 | import multiprocessing |
13 | 11 | |
14 | 12 | import gunicorn.app.base |
15 | ||
16 | from gunicorn.six import iteritems | |
17 | 13 | |
18 | 14 | |
19 | 15 | def number_of_workers(): |
38 | 34 | def __init__(self, app, options=None): |
39 | 35 | self.options = options or {} |
40 | 36 | self.application = app |
41 | super(StandaloneApplication, self).__init__() | |
37 | super().__init__() | |
42 | 38 | |
43 | 39 | def load_config(self): |
44 | config = dict([(key, value) for key, value in iteritems(self.options) | |
45 | if key in self.cfg.settings and value is not None]) | |
46 | for key, value in iteritems(config): | |
40 | config = {key: value for key, value in self.options.items() | |
41 | if key in self.cfg.settings and value is not None} | |
42 | for key, value in config.items(): | |
47 | 43 | self.cfg.set(key.lower(), value) |
48 | 44 | |
49 | 45 | def load(self): |
20 | 20 | ('Content-type', 'text/plain'), |
21 | 21 | ('Content-Length', str(len(data))), |
22 | 22 | ('X-Gunicorn-Version', __version__), |
23 | #("Test", "test тест"), | |
23 | ('Foo', 'B\u00e5r'), # Foo: Bår | |
24 | 24 | ] |
25 | 25 | start_response(status, response_headers) |
26 | 26 | return iter([data]) |
1 | 1 | import collections |
2 | 2 | import errno |
3 | 3 | import re |
4 | from hashlib import sha1 | |
4 | import hashlib | |
5 | 5 | import base64 |
6 | 6 | from base64 import b64encode, b64decode |
7 | 7 | import socket |
10 | 10 | from socket import error as SocketError |
11 | 11 | |
12 | 12 | import gevent |
13 | from gunicorn.workers.async import ALREADY_HANDLED | |
13 | from gunicorn.workers.base_async import ALREADY_HANDLED | |
14 | 14 | |
15 | 15 | logger = logging.getLogger(__name__) |
16 | 16 | |
17 | WS_KEY = "258EAFA5-E914-47DA-95CA-C5AB0DC85B11" | |
17 | WS_KEY = b"258EAFA5-E914-47DA-95CA-C5AB0DC85B11" | |
18 | 18 | |
19 | 19 | class WebSocketWSGI(object): |
20 | 20 | def __init__(self, handler): |
79 | 79 | if ws_extensions: |
80 | 80 | handshake_reply += 'Sec-WebSocket-Extensions: %s\r\n' % ', '.join(ws_extensions) |
81 | 81 | |
82 | key_hash = hashlib.sha1() | |
83 | key_hash.update(key.encode()) | |
84 | key_hash.update(WS_KEY) | |
85 | ||
82 | 86 | handshake_reply += ( |
83 | 87 | "Sec-WebSocket-Origin: %s\r\n" |
84 | 88 | "Sec-WebSocket-Location: ws://%s%s\r\n" |
89 | 93 | environ.get('HTTP_HOST'), |
90 | 94 | ws.path, |
91 | 95 | version, |
92 | base64.b64encode(sha1(key + WS_KEY).digest()) | |
96 | base64.b64encode(key_hash.digest()).decode() | |
93 | 97 | )) |
94 | 98 | |
95 | 99 | else: |
101 | 105 | environ.get('HTTP_HOST'), |
102 | 106 | ws.path)) |
103 | 107 | |
104 | sock.sendall(handshake_reply) | |
108 | sock.sendall(handshake_reply.encode()) | |
105 | 109 | |
106 | 110 | try: |
107 | 111 | self.handler(ws) |
108 | except socket.error as e: | |
109 | if e[0] != errno.EPIPE: | |
110 | raise | |
112 | except BrokenPipeError: | |
113 | pass | |
114 | else: | |
115 | raise | |
111 | 116 | # use this undocumented feature of grainbows to ensure that it |
112 | 117 | # doesn't barf on the fact that we didn't call start_response |
113 | 118 | return ALREADY_HANDLED |
162 | 167 | """ |
163 | 168 | if base64: |
164 | 169 | buf = b64encode(buf) |
170 | else: | |
171 | buf = buf.encode() | |
165 | 172 | |
166 | 173 | b1 = 0x80 | (opcode & 0x0f) # FIN + opcode |
167 | 174 | payload_len = len(buf) |
374 | 381 | return None |
375 | 382 | # no parsed messages, must mean buf needs more data |
376 | 383 | delta = self.socket.recv(8096) |
377 | if delta == '': | |
384 | if delta == b'': | |
378 | 385 | return None |
379 | 386 | self._buf += delta |
380 | 387 | msgs = self._parse_messages() |
394 | 401 | |
395 | 402 | elif self.version == 76 and not self.websocket_closed: |
396 | 403 | try: |
397 | self.socket.sendall("\xff\x00") | |
404 | self.socket.sendall(b"\xff\x00") | |
398 | 405 | except SocketError: |
399 | 406 | # Sometimes, like when the remote side cuts off the connection, |
400 | 407 | # we don't care about this. |
424 | 431 | ws.send(m) |
425 | 432 | |
426 | 433 | elif ws.path == '/data': |
427 | for i in xrange(10000): | |
434 | for i in range(10000): | |
428 | 435 | ws.send("0 %s %s\n" % (i, random.random())) |
429 | 436 | gevent.sleep(0.1) |
430 | 437 | |
438 | 445 | 'websocket.html')).read() |
439 | 446 | data = data % environ |
440 | 447 | start_response('200 OK', [('Content-Type', 'text/html'), |
441 | ('Content-Length', len(data))]) | |
442 | return [data] | |
448 | ('Content-Length', str(len(data)))]) | |
449 | return [data.encode()] | |
443 | 450 | else: |
444 | 451 | return wsapp(environ, start_response) |
10 | 10 | from socket import error as SocketError |
11 | 11 | |
12 | 12 | import eventlet |
13 | from gunicorn.workers.async import ALREADY_HANDLED | |
13 | from gunicorn.workers.base_async import ALREADY_HANDLED | |
14 | 14 | from eventlet import pools |
15 | 15 | |
16 | 16 | logger = logging.getLogger(__name__) |
17 | 17 | |
18 | WS_KEY = "258EAFA5-E914-47DA-95CA-C5AB0DC85B11" | |
18 | WS_KEY = b"258EAFA5-E914-47DA-95CA-C5AB0DC85B11" | |
19 | 19 | |
20 | 20 | class WebSocketWSGI(object): |
21 | 21 | def __init__(self, handler): |
80 | 80 | if ws_extensions: |
81 | 81 | handshake_reply += 'Sec-WebSocket-Extensions: %s\r\n' % ', '.join(ws_extensions) |
82 | 82 | |
83 | key_hash = sha1() | |
84 | key_hash.update(key.encode()) | |
85 | key_hash.update(WS_KEY) | |
86 | ||
83 | 87 | handshake_reply += ( |
84 | 88 | "Sec-WebSocket-Origin: %s\r\n" |
85 | 89 | "Sec-WebSocket-Location: ws://%s%s\r\n" |
90 | 94 | environ.get('HTTP_HOST'), |
91 | 95 | ws.path, |
92 | 96 | version, |
93 | base64.b64encode(sha1(key + WS_KEY).digest()) | |
97 | base64.b64encode(key_hash.digest()).decode() | |
94 | 98 | )) |
95 | 99 | |
96 | 100 | else: |
102 | 106 | environ.get('HTTP_HOST'), |
103 | 107 | ws.path)) |
104 | 108 | |
105 | sock.sendall(handshake_reply) | |
109 | sock.sendall(handshake_reply.encode()) | |
106 | 110 | |
107 | 111 | try: |
108 | 112 | self.handler(ws) |
109 | except socket.error as e: | |
110 | if e[0] != errno.EPIPE: | |
111 | raise | |
113 | except BrokenPipeError: | |
114 | pass | |
115 | else: | |
116 | raise | |
112 | 117 | # use this undocumented feature of grainbows to ensure that it |
113 | 118 | # doesn't barf on the fact that we didn't call start_response |
114 | 119 | return ALREADY_HANDLED |
163 | 168 | """ |
164 | 169 | if base64: |
165 | 170 | buf = b64encode(buf) |
171 | else: | |
172 | buf = buf.encode() | |
166 | 173 | |
167 | 174 | b1 = 0x80 | (opcode & 0x0f) # FIN + opcode |
168 | 175 | payload_len = len(buf) |
375 | 382 | return None |
376 | 383 | # no parsed messages, must mean buf needs more data |
377 | 384 | delta = self.socket.recv(8096) |
378 | if delta == '': | |
385 | if delta == b'': | |
379 | 386 | return None |
380 | 387 | self._buf += delta |
381 | 388 | msgs = self._parse_messages() |
395 | 402 | |
396 | 403 | elif self.version == 76 and not self.websocket_closed: |
397 | 404 | try: |
398 | self.socket.sendall("\xff\x00") | |
405 | self.socket.sendall(b"\xff\x00") | |
399 | 406 | except SocketError: |
400 | 407 | # Sometimes, like when the remote side cuts off the connection, |
401 | 408 | # we don't care about this. |
424 | 431 | ws.send(m) |
425 | 432 | |
426 | 433 | elif ws.path == '/data': |
427 | for i in xrange(10000): | |
434 | for i in range(10000): | |
428 | 435 | ws.send("0 %s %s\n" % (i, random.random())) |
429 | 436 | eventlet.sleep(0.1) |
430 | 437 | |
438 | 445 | 'websocket.html')).read() |
439 | 446 | data = data % environ |
440 | 447 | start_response('200 OK', [('Content-Type', 'text/html'), |
441 | ('Content-Length', len(data))]) | |
442 | return [data] | |
448 | ('Content-Length', str(len(data)))]) | |
449 | return [data.encode()] | |
443 | 450 | else: |
444 | 451 | return wsapp(environ, start_response) |
7 | 7 | class MemoryWatch(threading.Thread): |
8 | 8 | |
9 | 9 | def __init__(self, server, max_mem): |
10 | super(MemoryWatch, self).__init__() | |
10 | super().__init__() | |
11 | 11 | self.daemon = True |
12 | 12 | self.server = server |
13 | 13 | self.max_mem = max_mem |
2 | 2 | # This file is part of gunicorn released under the MIT license. |
3 | 3 | # See the NOTICE for more information. |
4 | 4 | |
5 | version_info = (19, 9, 0) | |
5 | version_info = (20, 0, 0) | |
6 | 6 | __version__ = ".".join([str(v) for v in version_info]) |
7 | 7 | SERVER_SOFTWARE = "gunicorn/%s" % __version__ |
0 | import sys | |
1 | ||
2 | from gunicorn import six | |
3 | ||
4 | PY26 = (sys.version_info[:2] == (2, 6)) | |
5 | PY33 = (sys.version_info >= (3, 3)) | |
6 | ||
7 | ||
8 | def _check_if_pyc(fname): | |
9 | """Return True if the extension is .pyc, False if .py | |
10 | and None if otherwise""" | |
11 | from imp import find_module | |
12 | from os.path import realpath, dirname, basename, splitext | |
13 | ||
14 | # Normalize the file-path for the find_module() | |
15 | filepath = realpath(fname) | |
16 | dirpath = dirname(filepath) | |
17 | module_name = splitext(basename(filepath))[0] | |
18 | ||
19 | # Validate and fetch | |
20 | try: | |
21 | fileobj, fullpath, (_, _, pytype) = find_module(module_name, [dirpath]) | |
22 | except ImportError: | |
23 | raise IOError("Cannot find config file. " | |
24 | "Path maybe incorrect! : {0}".format(filepath)) | |
25 | return pytype, fileobj, fullpath | |
26 | ||
27 | ||
28 | def _get_codeobj(pyfile): | |
29 | """ Returns the code object, given a python file """ | |
30 | from imp import PY_COMPILED, PY_SOURCE | |
31 | ||
32 | result, fileobj, fullpath = _check_if_pyc(pyfile) | |
33 | ||
34 | # WARNING: | |
35 | # fp.read() can blowup if the module is extremely large file. | |
36 | # Lookout for overflow errors. | |
37 | try: | |
38 | data = fileobj.read() | |
39 | finally: | |
40 | fileobj.close() | |
41 | ||
42 | # This is a .pyc file. Treat accordingly. | |
43 | if result is PY_COMPILED: | |
44 | # .pyc format is as follows: | |
45 | # 0 - 4 bytes: Magic number, which changes with each create of .pyc file. | |
46 | # First 2 bytes change with each marshal of .pyc file. Last 2 bytes is "\r\n". | |
47 | # 4 - 8 bytes: Datetime value, when the .py was last changed. | |
48 | # 8 - EOF: Marshalled code object data. | |
49 | # So to get code object, just read the 8th byte onwards till EOF, and | |
50 | # UN-marshal it. | |
51 | import marshal | |
52 | code_obj = marshal.loads(data[8:]) | |
53 | ||
54 | elif result is PY_SOURCE: | |
55 | # This is a .py file. | |
56 | code_obj = compile(data, fullpath, 'exec') | |
57 | ||
58 | else: | |
59 | # Unsupported extension | |
60 | raise Exception("Input file is unknown format: {0}".format(fullpath)) | |
61 | ||
62 | # Return code object | |
63 | return code_obj | |
64 | ||
65 | if six.PY3: | |
66 | def execfile_(fname, *args): | |
67 | if fname.endswith(".pyc"): | |
68 | code = _get_codeobj(fname) | |
69 | else: | |
70 | code = compile(open(fname, 'rb').read(), fname, 'exec') | |
71 | return six.exec_(code, *args) | |
72 | ||
73 | def bytes_to_str(b): | |
74 | if isinstance(b, six.text_type): | |
75 | return b | |
76 | return str(b, 'latin1') | |
77 | ||
78 | import urllib.parse | |
79 | ||
80 | def unquote_to_wsgi_str(string): | |
81 | return _unquote_to_bytes(string).decode('latin-1') | |
82 | ||
83 | _unquote_to_bytes = urllib.parse.unquote_to_bytes | |
84 | ||
85 | else: | |
86 | def execfile_(fname, *args): | |
87 | """ Overriding PY2 execfile() implementation to support .pyc files """ | |
88 | if fname.endswith(".pyc"): | |
89 | return six.exec_(_get_codeobj(fname), *args) | |
90 | return execfile(fname, *args) | |
91 | ||
92 | def bytes_to_str(s): | |
93 | if isinstance(s, unicode): | |
94 | return s.encode('utf-8') | |
95 | return s | |
96 | ||
97 | import urllib | |
98 | unquote_to_wsgi_str = urllib.unquote | |
99 | ||
100 | ||
101 | # The following code adapted from trollius.py33_exceptions | |
102 | def _wrap_error(exc, mapping, key): | |
103 | if key not in mapping: | |
104 | return | |
105 | new_err_cls = mapping[key] | |
106 | new_err = new_err_cls(*exc.args) | |
107 | ||
108 | # raise a new exception with the original traceback | |
109 | six.reraise(new_err_cls, new_err, | |
110 | exc.__traceback__ if hasattr(exc, '__traceback__') else sys.exc_info()[2]) | |
111 | ||
112 | if PY33: | |
113 | import builtins | |
114 | ||
115 | BlockingIOError = builtins.BlockingIOError | |
116 | BrokenPipeError = builtins.BrokenPipeError | |
117 | ChildProcessError = builtins.ChildProcessError | |
118 | ConnectionRefusedError = builtins.ConnectionRefusedError | |
119 | ConnectionResetError = builtins.ConnectionResetError | |
120 | InterruptedError = builtins.InterruptedError | |
121 | ConnectionAbortedError = builtins.ConnectionAbortedError | |
122 | PermissionError = builtins.PermissionError | |
123 | FileNotFoundError = builtins.FileNotFoundError | |
124 | ProcessLookupError = builtins.ProcessLookupError | |
125 | ||
126 | def wrap_error(func, *args, **kw): | |
127 | return func(*args, **kw) | |
128 | else: | |
129 | import errno | |
130 | import select | |
131 | import socket | |
132 | ||
133 | class BlockingIOError(OSError): | |
134 | pass | |
135 | ||
136 | class BrokenPipeError(OSError): | |
137 | pass | |
138 | ||
139 | class ChildProcessError(OSError): | |
140 | pass | |
141 | ||
142 | class ConnectionRefusedError(OSError): | |
143 | pass | |
144 | ||
145 | class InterruptedError(OSError): | |
146 | pass | |
147 | ||
148 | class ConnectionResetError(OSError): | |
149 | pass | |
150 | ||
151 | class ConnectionAbortedError(OSError): | |
152 | pass | |
153 | ||
154 | class PermissionError(OSError): | |
155 | pass | |
156 | ||
157 | class FileNotFoundError(OSError): | |
158 | pass | |
159 | ||
160 | class ProcessLookupError(OSError): | |
161 | pass | |
162 | ||
163 | _MAP_ERRNO = { | |
164 | errno.EACCES: PermissionError, | |
165 | errno.EAGAIN: BlockingIOError, | |
166 | errno.EALREADY: BlockingIOError, | |
167 | errno.ECHILD: ChildProcessError, | |
168 | errno.ECONNABORTED: ConnectionAbortedError, | |
169 | errno.ECONNREFUSED: ConnectionRefusedError, | |
170 | errno.ECONNRESET: ConnectionResetError, | |
171 | errno.EINPROGRESS: BlockingIOError, | |
172 | errno.EINTR: InterruptedError, | |
173 | errno.ENOENT: FileNotFoundError, | |
174 | errno.EPERM: PermissionError, | |
175 | errno.EPIPE: BrokenPipeError, | |
176 | errno.ESHUTDOWN: BrokenPipeError, | |
177 | errno.EWOULDBLOCK: BlockingIOError, | |
178 | errno.ESRCH: ProcessLookupError, | |
179 | } | |
180 | ||
181 | def wrap_error(func, *args, **kw): | |
182 | """ | |
183 | Wrap socket.error, IOError, OSError, select.error to raise new specialized | |
184 | exceptions of Python 3.3 like InterruptedError (PEP 3151). | |
185 | """ | |
186 | try: | |
187 | return func(*args, **kw) | |
188 | except (socket.error, IOError, OSError) as exc: | |
189 | if hasattr(exc, 'winerror'): | |
190 | _wrap_error(exc, _MAP_ERRNO, exc.winerror) | |
191 | # _MAP_ERRNO does not contain all Windows errors. | |
192 | # For some errors like "file not found", exc.errno should | |
193 | # be used (ex: ENOENT). | |
194 | _wrap_error(exc, _MAP_ERRNO, exc.errno) | |
195 | raise | |
196 | except select.error as exc: | |
197 | if exc.args: | |
198 | _wrap_error(exc, _MAP_ERRNO, exc.args[0]) | |
199 | raise | |
200 | ||
201 | if PY26: | |
202 | from urlparse import ( | |
203 | _parse_cache, MAX_CACHE_SIZE, clear_cache, _splitnetloc, SplitResult, | |
204 | scheme_chars, | |
205 | ) | |
206 | ||
207 | def urlsplit(url, scheme='', allow_fragments=True): | |
208 | """Parse a URL into 5 components: | |
209 | <scheme>://<netloc>/<path>?<query>#<fragment> | |
210 | Return a 5-tuple: (scheme, netloc, path, query, fragment). | |
211 | Note that we don't break the components up in smaller bits | |
212 | (e.g. netloc is a single string) and we don't expand % escapes.""" | |
213 | allow_fragments = bool(allow_fragments) | |
214 | key = url, scheme, allow_fragments, type(url), type(scheme) | |
215 | cached = _parse_cache.get(key, None) | |
216 | if cached: | |
217 | return cached | |
218 | if len(_parse_cache) >= MAX_CACHE_SIZE: # avoid runaway growth | |
219 | clear_cache() | |
220 | netloc = query = fragment = '' | |
221 | i = url.find(':') | |
222 | if i > 0: | |
223 | if url[:i] == 'http': # optimize the common case | |
224 | scheme = url[:i].lower() | |
225 | url = url[i+1:] | |
226 | if url[:2] == '//': | |
227 | netloc, url = _splitnetloc(url, 2) | |
228 | if (('[' in netloc and ']' not in netloc) or | |
229 | (']' in netloc and '[' not in netloc)): | |
230 | raise ValueError("Invalid IPv6 URL") | |
231 | if allow_fragments and '#' in url: | |
232 | url, fragment = url.split('#', 1) | |
233 | if '?' in url: | |
234 | url, query = url.split('?', 1) | |
235 | v = SplitResult(scheme, netloc, url, query, fragment) | |
236 | _parse_cache[key] = v | |
237 | return v | |
238 | for c in url[:i]: | |
239 | if c not in scheme_chars: | |
240 | break | |
241 | else: | |
242 | # make sure "url" is not actually a port number (in which case | |
243 | # "scheme" is really part of the path) | |
244 | rest = url[i+1:] | |
245 | if not rest or any(c not in '0123456789' for c in rest): | |
246 | # not a port number | |
247 | scheme, url = url[:i].lower(), rest | |
248 | ||
249 | if url[:2] == '//': | |
250 | netloc, url = _splitnetloc(url, 2) | |
251 | if (('[' in netloc and ']' not in netloc) or | |
252 | (']' in netloc and '[' not in netloc)): | |
253 | raise ValueError("Invalid IPv6 URL") | |
254 | if allow_fragments and '#' in url: | |
255 | url, fragment = url.split('#', 1) | |
256 | if '?' in url: | |
257 | url, query = url.split('?', 1) | |
258 | v = SplitResult(scheme, netloc, url, query, fragment) | |
259 | _parse_cache[key] = v | |
260 | return v | |
261 | ||
262 | else: | |
263 | from gunicorn.six.moves.urllib.parse import urlsplit | |
264 | ||
265 | ||
266 | import inspect | |
267 | ||
268 | if hasattr(inspect, 'signature'): | |
269 | positionals = ( | |
270 | inspect.Parameter.POSITIONAL_ONLY, | |
271 | inspect.Parameter.POSITIONAL_OR_KEYWORD, | |
272 | ) | |
273 | ||
274 | def get_arity(f): | |
275 | sig = inspect.signature(f) | |
276 | arity = 0 | |
277 | ||
278 | for param in sig.parameters.values(): | |
279 | if param.kind in positionals: | |
280 | arity += 1 | |
281 | ||
282 | return arity | |
283 | else: | |
284 | def get_arity(f): | |
285 | return len(inspect.getargspec(f)[0]) | |
286 | ||
287 | ||
288 | try: | |
289 | import html | |
290 | ||
291 | def html_escape(s): | |
292 | return html.escape(s) | |
293 | except ImportError: | |
294 | import cgi | |
295 | ||
296 | def html_escape(s): | |
297 | return cgi.escape(s, quote=True) |
1 | 1 | # |
2 | 2 | # This file is part of gunicorn released under the MIT license. |
3 | 3 | # See the NOTICE for more information. |
4 | from __future__ import print_function | |
5 | ||
4 | import importlib.machinery | |
6 | 5 | import os |
7 | 6 | import sys |
8 | 7 | import traceback |
9 | ||
10 | from gunicorn._compat import execfile_ | |
8 | import types | |
9 | ||
11 | 10 | from gunicorn import util |
12 | 11 | from gunicorn.arbiter import Arbiter |
13 | 12 | from gunicorn.config import Config, get_default_config_file |
14 | 13 | from gunicorn import debug |
14 | ||
15 | 15 | |
16 | 16 | class BaseApplication(object): |
17 | 17 | """ |
94 | 94 | if not os.path.exists(filename): |
95 | 95 | raise RuntimeError("%r doesn't exist" % filename) |
96 | 96 | |
97 | cfg = { | |
98 | "__builtins__": __builtins__, | |
99 | "__name__": "__config__", | |
100 | "__file__": filename, | |
101 | "__doc__": None, | |
102 | "__package__": None | |
103 | } | |
104 | 97 | try: |
105 | execfile_(filename, cfg, cfg) | |
98 | module_name = '__config__' | |
99 | mod = types.ModuleType(module_name) | |
100 | loader = importlib.machinery.SourceFileLoader(module_name, filename) | |
101 | loader.exec_module(mod) | |
106 | 102 | except Exception: |
107 | 103 | print("Failed to read config file: %s" % filename, file=sys.stderr) |
108 | 104 | traceback.print_exc() |
109 | 105 | sys.stderr.flush() |
110 | 106 | sys.exit(1) |
111 | 107 | |
112 | return cfg | |
108 | return vars(mod) | |
113 | 109 | |
114 | 110 | def get_config_from_module_name(self, module_name): |
115 | return vars(util.import_module(module_name)) | |
111 | return vars(importlib.import_module(module_name)) | |
116 | 112 | |
117 | 113 | def load_config_from_module_name_or_filename(self, location): |
118 | 114 | """ |
219 | 215 | if pythonpath not in sys.path: |
220 | 216 | sys.path.insert(0, pythonpath) |
221 | 217 | |
222 | super(Application, self).run() | |
218 | super().run() |
1 | 1 | # |
2 | 2 | # This file is part of gunicorn released under the MIT license. |
3 | 3 | # See the NOTICE for more information. |
4 | from __future__ import print_function | |
5 | 4 | |
6 | # pylint: skip-file | |
5 | import configparser | |
6 | import os | |
7 | 7 | |
8 | import os | |
9 | import pkg_resources | |
10 | import sys | |
8 | from paste.deploy import loadapp | |
11 | 9 | |
12 | try: | |
13 | import configparser as ConfigParser | |
14 | except ImportError: | |
15 | import ConfigParser | |
16 | ||
17 | from paste.deploy import loadapp, loadwsgi | |
18 | SERVER = loadwsgi.SERVER | |
19 | ||
20 | from gunicorn.app.base import Application | |
21 | from gunicorn.config import Config, get_default_config_file | |
22 | from gunicorn import util | |
10 | from gunicorn.app.wsgiapp import WSGIApplication | |
11 | from gunicorn.config import get_default_config_file | |
23 | 12 | |
24 | 13 | |
25 | def _has_logging_config(paste_file): | |
26 | cfg_parser = ConfigParser.ConfigParser() | |
27 | cfg_parser.read([paste_file]) | |
28 | return cfg_parser.has_section('loggers') | |
14 | def get_wsgi_app(config_uri, name=None, defaults=None): | |
15 | if ':' not in config_uri: | |
16 | config_uri = "config:%s" % config_uri | |
17 | ||
18 | return loadapp( | |
19 | config_uri, | |
20 | name=name, | |
21 | relative_to=os.getcwd(), | |
22 | global_conf=defaults, | |
23 | ) | |
29 | 24 | |
30 | 25 | |
31 | def paste_config(gconfig, config_url, relative_to, global_conf=None): | |
32 | # add entry to pkg_resources | |
33 | sys.path.insert(0, relative_to) | |
34 | pkg_resources.working_set.add_entry(relative_to) | |
35 | ||
36 | config_url = config_url.split('#')[0] | |
37 | cx = loadwsgi.loadcontext(SERVER, config_url, relative_to=relative_to, | |
38 | global_conf=global_conf) | |
39 | gc, lc = cx.global_conf.copy(), cx.local_conf.copy() | |
40 | cfg = {} | |
41 | ||
42 | host, port = lc.pop('host', ''), lc.pop('port', '') | |
43 | if host and port: | |
44 | cfg['bind'] = '%s:%s' % (host, port) | |
45 | elif host: | |
46 | cfg['bind'] = host.split(',') | |
47 | ||
48 | cfg['default_proc_name'] = gc.get('__file__') | |
49 | ||
50 | # init logging configuration | |
51 | config_file = config_url.split(':')[1] | |
52 | if _has_logging_config(config_file): | |
53 | cfg.setdefault('logconfig', config_file) | |
54 | ||
55 | for k, v in gc.items(): | |
56 | if k not in gconfig.settings: | |
57 | continue | |
58 | cfg[k] = v | |
59 | ||
60 | for k, v in lc.items(): | |
61 | if k not in gconfig.settings: | |
62 | continue | |
63 | cfg[k] = v | |
64 | ||
65 | return cfg | |
26 | def has_logging_config(config_file): | |
27 | parser = configparser.ConfigParser() | |
28 | parser.read([config_file]) | |
29 | return parser.has_section('loggers') | |
66 | 30 | |
67 | 31 | |
68 | def load_pasteapp(config_url, relative_to, global_conf=None): | |
69 | return loadapp(config_url, relative_to=relative_to, | |
70 | global_conf=global_conf) | |
32 | def serve(app, global_conf, **local_conf): | |
33 | """\ | |
34 | A Paste Deployment server runner. | |
71 | 35 | |
72 | class PasterBaseApplication(Application): | |
73 | gcfg = None | |
36 | Example configuration: | |
74 | 37 | |
75 | def app_config(self): | |
76 | return paste_config(self.cfg, self.cfgurl, self.relpath, | |
77 | global_conf=self.gcfg) | |
38 | [server:main] | |
39 | use = egg:gunicorn#main | |
40 | host = 127.0.0.1 | |
41 | port = 5000 | |
42 | """ | |
43 | config_file = global_conf['__file__'] | |
44 | gunicorn_config_file = local_conf.pop('config', None) | |
78 | 45 | |
79 | def load_config(self): | |
80 | super(PasterBaseApplication, self).load_config() | |
46 | host = local_conf.pop('host', '') | |
47 | port = local_conf.pop('port', '') | |
48 | if host and port: | |
49 | local_conf['bind'] = '%s:%s' % (host, port) | |
50 | elif host: | |
51 | local_conf['bind'] = host.split(',') | |
81 | 52 | |
82 | # reload logging conf | |
83 | if hasattr(self, "cfgfname"): | |
84 | parser = ConfigParser.ConfigParser() | |
85 | parser.read([self.cfgfname]) | |
86 | if parser.has_section('loggers'): | |
87 | from logging.config import fileConfig | |
88 | config_file = os.path.abspath(self.cfgfname) | |
89 | fileConfig(config_file, dict(__file__=config_file, | |
90 | here=os.path.dirname(config_file))) | |
53 | class PasterServerApplication(WSGIApplication): | |
54 | def load_config(self): | |
55 | self.cfg.set("default_proc_name", config_file) | |
91 | 56 | |
57 | if has_logging_config(config_file): | |
58 | self.cfg.set("logconfig", config_file) | |
92 | 59 | |
93 | class PasterApplication(PasterBaseApplication): | |
60 | if gunicorn_config_file: | |
61 | self.load_config_from_file(gunicorn_config_file) | |
62 | else: | |
63 | default_gunicorn_config_file = get_default_config_file() | |
64 | if default_gunicorn_config_file is not None: | |
65 | self.load_config_from_file(default_gunicorn_config_file) | |
94 | 66 | |
95 | def init(self, parser, opts, args): | |
96 | if len(args) != 1: | |
97 | parser.error("No application name specified.") | |
67 | for k, v in local_conf.items(): | |
68 | if v is not None: | |
69 | self.cfg.set(k.lower(), v) | |
98 | 70 | |
99 | cwd = util.getcwd() | |
100 | cfgfname = os.path.normpath(os.path.join(cwd, args[0])) | |
101 | cfgfname = os.path.abspath(cfgfname) | |
102 | if not os.path.exists(cfgfname): | |
103 | parser.error("Config file not found: %s" % cfgfname) | |
71 | def load(self): | |
72 | return app | |
104 | 73 | |
105 | self.cfgurl = 'config:%s' % cfgfname | |
106 | self.relpath = os.path.dirname(cfgfname) | |
107 | self.cfgfname = cfgfname | |
108 | ||
109 | sys.path.insert(0, self.relpath) | |
110 | pkg_resources.working_set.add_entry(self.relpath) | |
111 | ||
112 | return self.app_config() | |
113 | ||
114 | def load(self): | |
115 | # chdir to the configured path before loading, | |
116 | # default is the current dir | |
117 | os.chdir(self.cfg.chdir) | |
118 | ||
119 | return load_pasteapp(self.cfgurl, self.relpath, global_conf=self.gcfg) | |
120 | ||
121 | ||
122 | class PasterServerApplication(PasterBaseApplication): | |
123 | ||
124 | def __init__(self, app, gcfg=None, host="127.0.0.1", port=None, **kwargs): | |
125 | # pylint: disable=super-init-not-called | |
126 | self.cfg = Config() | |
127 | self.gcfg = gcfg # need to hold this for app_config | |
128 | self.app = app | |
129 | self.callable = None | |
130 | ||
131 | gcfg = gcfg or {} | |
132 | cfgfname = gcfg.get("__file__") | |
133 | if cfgfname is not None: | |
134 | self.cfgurl = 'config:%s' % cfgfname | |
135 | self.relpath = os.path.dirname(cfgfname) | |
136 | self.cfgfname = cfgfname | |
137 | ||
138 | cfg = kwargs.copy() | |
139 | ||
140 | if port and not host.startswith("unix:"): | |
141 | bind = "%s:%s" % (host, port) | |
142 | else: | |
143 | bind = host | |
144 | cfg["bind"] = bind.split(',') | |
145 | ||
146 | if gcfg: | |
147 | for k, v in gcfg.items(): | |
148 | cfg[k] = v | |
149 | cfg["default_proc_name"] = cfg['__file__'] | |
150 | ||
151 | try: | |
152 | for k, v in cfg.items(): | |
153 | if k.lower() in self.cfg.settings and v is not None: | |
154 | self.cfg.set(k.lower(), v) | |
155 | except Exception as e: | |
156 | print("\nConfig error: %s" % str(e), file=sys.stderr) | |
157 | sys.stderr.flush() | |
158 | sys.exit(1) | |
159 | ||
160 | if cfg.get("config"): | |
161 | self.load_config_from_file(cfg["config"]) | |
162 | else: | |
163 | default_config = get_default_config_file() | |
164 | if default_config is not None: | |
165 | self.load_config_from_file(default_config) | |
166 | ||
167 | def load(self): | |
168 | return self.app | |
169 | ||
170 | ||
171 | def run(): | |
172 | """\ | |
173 | The ``gunicorn_paster`` command for launching Paster compatible | |
174 | applications like Pylons or Turbogears2 | |
175 | """ | |
176 | util.warn("""This command is deprecated. | |
177 | ||
178 | You should now use the `--paste` option. Ex.: | |
179 | ||
180 | gunicorn --paste development.ini | |
181 | """) | |
182 | ||
183 | from gunicorn.app.pasterapp import PasterApplication | |
184 | PasterApplication("%(prog)s [OPTIONS] pasteconfig.ini").run() | |
185 | ||
186 | ||
187 | def paste_server(app, gcfg=None, host="127.0.0.1", port=None, **kwargs): | |
188 | """\ | |
189 | A paster server. | |
190 | ||
191 | Then entry point in your paster ini file should looks like this: | |
192 | ||
193 | [server:main] | |
194 | use = egg:gunicorn#main | |
195 | host = 127.0.0.1 | |
196 | port = 5000 | |
197 | ||
198 | """ | |
199 | ||
200 | util.warn("""This command is deprecated. | |
201 | ||
202 | You should now use the `--paste` option. Ex.: | |
203 | ||
204 | gunicorn --paste development.ini | |
205 | """) | |
206 | ||
207 | from gunicorn.app.pasterapp import PasterServerApplication | |
208 | PasterServerApplication(app, gcfg=gcfg, host=host, port=port, **kwargs).run() | |
74 | PasterServerApplication().run() |
12 | 12 | class WSGIApplication(Application): |
13 | 13 | def init(self, parser, opts, args): |
14 | 14 | if opts.paste: |
15 | app_name = 'main' | |
16 | path = opts.paste | |
17 | if '#' in path: | |
18 | path, app_name = path.split('#') | |
19 | path = os.path.abspath(os.path.normpath( | |
20 | os.path.join(util.getcwd(), path))) | |
15 | from .pasterapp import has_logging_config | |
21 | 16 | |
22 | if not os.path.exists(path): | |
23 | raise ConfigError("%r not found" % path) | |
17 | config_uri = os.path.abspath(opts.paste) | |
18 | config_file = config_uri.split('#')[0] | |
24 | 19 | |
25 | # paste application, load the config | |
26 | self.cfgurl = 'config:%s#%s' % (path, app_name) | |
27 | self.relpath = os.path.dirname(path) | |
20 | if not os.path.exists(config_file): | |
21 | raise ConfigError("%r not found" % config_file) | |
28 | 22 | |
29 | from .pasterapp import paste_config | |
30 | return paste_config(self.cfg, self.cfgurl, self.relpath) | |
23 | self.cfg.set("default_proc_name", config_file) | |
24 | self.app_uri = config_uri | |
31 | 25 | |
32 | if len(args) < 1: | |
26 | if has_logging_config(config_file): | |
27 | self.cfg.set("logconfig", config_file) | |
28 | ||
29 | return | |
30 | ||
31 | if not args: | |
33 | 32 | parser.error("No application module specified.") |
34 | 33 | |
35 | 34 | self.cfg.set("default_proc_name", args[0]) |
36 | 35 | self.app_uri = args[0] |
37 | 36 | |
38 | 37 | def load_wsgiapp(self): |
39 | # load the app | |
40 | 38 | return util.import_app(self.app_uri) |
41 | 39 | |
42 | 40 | def load_pasteapp(self): |
43 | # load the paste app | |
44 | from .pasterapp import load_pasteapp | |
45 | return load_pasteapp(self.cfgurl, self.relpath, global_conf=self.cfg.paste_global_conf) | |
41 | from .pasterapp import get_wsgi_app | |
42 | return get_wsgi_app(self.app_uri, defaults=self.cfg.paste_global_conf) | |
46 | 43 | |
47 | 44 | def load(self): |
48 | 45 | if self.cfg.paste is not None: |
1 | 1 | # |
2 | 2 | # This file is part of gunicorn released under the MIT license. |
3 | 3 | # See the NOTICE for more information. |
4 | from __future__ import print_function | |
5 | ||
6 | 4 | import errno |
7 | 5 | import os |
8 | 6 | import random |
159 | 157 | self.log.debug("Arbiter booted") |
160 | 158 | self.log.info("Listening at: %s (%s)", listeners_str, self.pid) |
161 | 159 | self.log.info("Using worker: %s", self.cfg.worker_class_str) |
160 | systemd.sd_notify("READY=1\nSTATUS=Gunicorn arbiter booted", self.log) | |
162 | 161 | |
163 | 162 | # check worker class requirements |
164 | 163 | if hasattr(self.worker_class, "check_config"): |
223 | 222 | self.log.info("Handling signal: %s", signame) |
224 | 223 | handler() |
225 | 224 | self.wakeup() |
226 | except StopIteration: | |
227 | self.halt() | |
228 | except KeyboardInterrupt: | |
225 | except (StopIteration, KeyboardInterrupt): | |
229 | 226 | self.halt() |
230 | 227 | except HaltServer as inst: |
231 | 228 | self.halt(reason=inst.reason, exit_status=inst.exit_status) |
376 | 373 | :attr graceful: boolean, If True (the default) workers will be |
377 | 374 | killed gracefully (ie. trying to wait for the current connection) |
378 | 375 | """ |
379 | ||
380 | unlink = self.reexec_pid == self.master_pid == 0 and not self.systemd | |
376 | unlink = ( | |
377 | self.reexec_pid == self.master_pid == 0 | |
378 | and not self.systemd | |
379 | and not self.cfg.reuse_port | |
380 | ) | |
381 | 381 | sock.close_sockets(self.LISTENERS, unlink) |
382 | 382 | |
383 | 383 | self.LISTENERS = [] |
540 | 540 | Maintain the number of workers by spawning or killing |
541 | 541 | as required. |
542 | 542 | """ |
543 | if len(self.WORKERS.keys()) < self.num_workers: | |
543 | if len(self.WORKERS) < self.num_workers: | |
544 | 544 | self.spawn_workers() |
545 | 545 | |
546 | 546 | workers = self.WORKERS.items() |
611 | 611 | of the master process. |
612 | 612 | """ |
613 | 613 | |
614 | for _ in range(self.num_workers - len(self.WORKERS.keys())): | |
614 | for _ in range(self.num_workers - len(self.WORKERS)): | |
615 | 615 | self.spawn_worker() |
616 | 616 | time.sleep(0.1 * random.random()) |
617 | 617 |
0 | # Author: Steven J. Bethard <steven.bethard@gmail.com>. | |
1 | ||
2 | """Command-line parsing library | |
3 | ||
4 | This module is an optparse-inspired command-line parsing library that: | |
5 | ||
6 | - handles both optional and positional arguments | |
7 | - produces highly informative usage messages | |
8 | - supports parsers that dispatch to sub-parsers | |
9 | ||
10 | The following is a simple usage example that sums integers from the | |
11 | command-line and writes the result to a file:: | |
12 | ||
13 | parser = argparse.ArgumentParser( | |
14 | description='sum the integers at the command line') | |
15 | parser.add_argument( | |
16 | 'integers', metavar='int', nargs='+', type=int, | |
17 | help='an integer to be summed') | |
18 | parser.add_argument( | |
19 | '--log', default=sys.stdout, type=argparse.FileType('w'), | |
20 | help='the file where the sum should be written') | |
21 | args = parser.parse_args() | |
22 | args.log.write('%s' % sum(args.integers)) | |
23 | args.log.close() | |
24 | ||
25 | The module contains the following public classes: | |
26 | ||
27 | - ArgumentParser -- The main entry point for command-line parsing. As the | |
28 | example above shows, the add_argument() method is used to populate | |
29 | the parser with actions for optional and positional arguments. Then | |
30 | the parse_args() method is invoked to convert the args at the | |
31 | command-line into an object with attributes. | |
32 | ||
33 | - ArgumentError -- The exception raised by ArgumentParser objects when | |
34 | there are errors with the parser's actions. Errors raised while | |
35 | parsing the command-line are caught by ArgumentParser and emitted | |
36 | as command-line messages. | |
37 | ||
38 | - FileType -- A factory for defining types of files to be created. As the | |
39 | example above shows, instances of FileType are typically passed as | |
40 | the type= argument of add_argument() calls. | |
41 | ||
42 | - Action -- The base class for parser actions. Typically actions are | |
43 | selected by passing strings like 'store_true' or 'append_const' to | |
44 | the action= argument of add_argument(). However, for greater | |
45 | customization of ArgumentParser actions, subclasses of Action may | |
46 | be defined and passed as the action= argument. | |
47 | ||
48 | - HelpFormatter, RawDescriptionHelpFormatter, RawTextHelpFormatter, | |
49 | ArgumentDefaultsHelpFormatter -- Formatter classes which | |
50 | may be passed as the formatter_class= argument to the | |
51 | ArgumentParser constructor. HelpFormatter is the default, | |
52 | RawDescriptionHelpFormatter and RawTextHelpFormatter tell the parser | |
53 | not to change the formatting for help text, and | |
54 | ArgumentDefaultsHelpFormatter adds information about argument defaults | |
55 | to the help. | |
56 | ||
57 | All other classes in this module are considered implementation details. | |
58 | (Also note that HelpFormatter and RawDescriptionHelpFormatter are only | |
59 | considered public as object names -- the API of the formatter objects is | |
60 | still considered an implementation detail.) | |
61 | """ | |
62 | ||
63 | __version__ = '1.2.1' | |
64 | __all__ = [ | |
65 | 'ArgumentParser', | |
66 | 'ArgumentError', | |
67 | 'ArgumentTypeError', | |
68 | 'FileType', | |
69 | 'HelpFormatter', | |
70 | 'ArgumentDefaultsHelpFormatter', | |
71 | 'RawDescriptionHelpFormatter', | |
72 | 'RawTextHelpFormatter', | |
73 | 'Namespace', | |
74 | 'Action', | |
75 | 'ONE_OR_MORE', | |
76 | 'OPTIONAL', | |
77 | 'PARSER', | |
78 | 'REMAINDER', | |
79 | 'SUPPRESS', | |
80 | 'ZERO_OR_MORE', | |
81 | ] | |
82 | ||
83 | ||
84 | import copy as _copy | |
85 | import os as _os | |
86 | import re as _re | |
87 | import sys as _sys | |
88 | import textwrap as _textwrap | |
89 | ||
90 | from gettext import gettext as _ | |
91 | ||
92 | try: | |
93 | set | |
94 | except NameError: | |
95 | # for python < 2.4 compatibility (sets module is there since 2.3): | |
96 | from sets import Set as set | |
97 | ||
98 | try: | |
99 | basestring | |
100 | except NameError: | |
101 | basestring = str | |
102 | ||
103 | try: | |
104 | sorted | |
105 | except NameError: | |
106 | # for python < 2.4 compatibility: | |
107 | def sorted(iterable, reverse=False): | |
108 | result = list(iterable) | |
109 | result.sort() | |
110 | if reverse: | |
111 | result.reverse() | |
112 | return result | |
113 | ||
114 | ||
115 | def _callable(obj): | |
116 | return hasattr(obj, '__call__') or hasattr(obj, '__bases__') | |
117 | ||
118 | ||
119 | SUPPRESS = '==SUPPRESS==' | |
120 | ||
121 | OPTIONAL = '?' | |
122 | ZERO_OR_MORE = '*' | |
123 | ONE_OR_MORE = '+' | |
124 | PARSER = 'A...' | |
125 | REMAINDER = '...' | |
126 | _UNRECOGNIZED_ARGS_ATTR = '_unrecognized_args' | |
127 | ||
128 | # ============================= | |
129 | # Utility functions and classes | |
130 | # ============================= | |
131 | ||
132 | class _AttributeHolder(object): | |
133 | """Abstract base class that provides __repr__. | |
134 | ||
135 | The __repr__ method returns a string in the format:: | |
136 | ClassName(attr=name, attr=name, ...) | |
137 | The attributes are determined either by a class-level attribute, | |
138 | '_kwarg_names', or by inspecting the instance __dict__. | |
139 | """ | |
140 | ||
141 | def __repr__(self): | |
142 | type_name = type(self).__name__ | |
143 | arg_strings = [] | |
144 | for arg in self._get_args(): | |
145 | arg_strings.append(repr(arg)) | |
146 | for name, value in self._get_kwargs(): | |
147 | arg_strings.append('%s=%r' % (name, value)) | |
148 | return '%s(%s)' % (type_name, ', '.join(arg_strings)) | |
149 | ||
150 | def _get_kwargs(self): | |
151 | return sorted(self.__dict__.items()) | |
152 | ||
153 | def _get_args(self): | |
154 | return [] | |
155 | ||
156 | ||
157 | def _ensure_value(namespace, name, value): | |
158 | if getattr(namespace, name, None) is None: | |
159 | setattr(namespace, name, value) | |
160 | return getattr(namespace, name) | |
161 | ||
162 | ||
163 | # =============== | |
164 | # Formatting Help | |
165 | # =============== | |
166 | ||
167 | class HelpFormatter(object): | |
168 | """Formatter for generating usage messages and argument help strings. | |
169 | ||
170 | Only the name of this class is considered a public API. All the methods | |
171 | provided by the class are considered an implementation detail. | |
172 | """ | |
173 | ||
174 | def __init__(self, | |
175 | prog, | |
176 | indent_increment=2, | |
177 | max_help_position=24, | |
178 | width=None): | |
179 | ||
180 | # default setting for width | |
181 | if width is None: | |
182 | try: | |
183 | width = int(_os.environ['COLUMNS']) | |
184 | except (KeyError, ValueError): | |
185 | width = 80 | |
186 | width -= 2 | |
187 | ||
188 | self._prog = prog | |
189 | self._indent_increment = indent_increment | |
190 | self._max_help_position = max_help_position | |
191 | self._width = width | |
192 | ||
193 | self._current_indent = 0 | |
194 | self._level = 0 | |
195 | self._action_max_length = 0 | |
196 | ||
197 | self._root_section = self._Section(self, None) | |
198 | self._current_section = self._root_section | |
199 | ||
200 | self._whitespace_matcher = _re.compile(r'\s+') | |
201 | self._long_break_matcher = _re.compile(r'\n\n\n+') | |
202 | ||
203 | # =============================== | |
204 | # Section and indentation methods | |
205 | # =============================== | |
206 | def _indent(self): | |
207 | self._current_indent += self._indent_increment | |
208 | self._level += 1 | |
209 | ||
210 | def _dedent(self): | |
211 | self._current_indent -= self._indent_increment | |
212 | assert self._current_indent >= 0, 'Indent decreased below 0.' | |
213 | self._level -= 1 | |
214 | ||
215 | class _Section(object): | |
216 | ||
217 | def __init__(self, formatter, parent, heading=None): | |
218 | self.formatter = formatter | |
219 | self.parent = parent | |
220 | self.heading = heading | |
221 | self.items = [] | |
222 | ||
223 | def format_help(self): | |
224 | # format the indented section | |
225 | if self.parent is not None: | |
226 | self.formatter._indent() | |
227 | join = self.formatter._join_parts | |
228 | for func, args in self.items: | |
229 | func(*args) | |
230 | item_help = join([func(*args) for func, args in self.items]) | |
231 | if self.parent is not None: | |
232 | self.formatter._dedent() | |
233 | ||
234 | # return nothing if the section was empty | |
235 | if not item_help: | |
236 | return '' | |
237 | ||
238 | # add the heading if the section was non-empty | |
239 | if self.heading is not SUPPRESS and self.heading is not None: | |
240 | current_indent = self.formatter._current_indent | |
241 | heading = '%*s%s:\n' % (current_indent, '', self.heading) | |
242 | else: | |
243 | heading = '' | |
244 | ||
245 | # join the section-initial newline, the heading and the help | |
246 | return join(['\n', heading, item_help, '\n']) | |
247 | ||
248 | def _add_item(self, func, args): | |
249 | self._current_section.items.append((func, args)) | |
250 | ||
251 | # ======================== | |
252 | # Message building methods | |
253 | # ======================== | |
254 | def start_section(self, heading): | |
255 | self._indent() | |
256 | section = self._Section(self, self._current_section, heading) | |
257 | self._add_item(section.format_help, []) | |
258 | self._current_section = section | |
259 | ||
260 | def end_section(self): | |
261 | self._current_section = self._current_section.parent | |
262 | self._dedent() | |
263 | ||
264 | def add_text(self, text): | |
265 | if text is not SUPPRESS and text is not None: | |
266 | self._add_item(self._format_text, [text]) | |
267 | ||
268 | def add_usage(self, usage, actions, groups, prefix=None): | |
269 | if usage is not SUPPRESS: | |
270 | args = usage, actions, groups, prefix | |
271 | self._add_item(self._format_usage, args) | |
272 | ||
273 | def add_argument(self, action): | |
274 | if action.help is not SUPPRESS: | |
275 | ||
276 | # find all invocations | |
277 | get_invocation = self._format_action_invocation | |
278 | invocations = [get_invocation(action)] | |
279 | for subaction in self._iter_indented_subactions(action): | |
280 | invocations.append(get_invocation(subaction)) | |
281 | ||
282 | # update the maximum item length | |
283 | invocation_length = max([len(s) for s in invocations]) | |
284 | action_length = invocation_length + self._current_indent | |
285 | self._action_max_length = max(self._action_max_length, | |
286 | action_length) | |
287 | ||
288 | # add the item to the list | |
289 | self._add_item(self._format_action, [action]) | |
290 | ||
291 | def add_arguments(self, actions): | |
292 | for action in actions: | |
293 | self.add_argument(action) | |
294 | ||
295 | # ======================= | |
296 | # Help-formatting methods | |
297 | # ======================= | |
298 | def format_help(self): | |
299 | help = self._root_section.format_help() | |
300 | if help: | |
301 | help = self._long_break_matcher.sub('\n\n', help) | |
302 | help = help.strip('\n') + '\n' | |
303 | return help | |
304 | ||
305 | def _join_parts(self, part_strings): | |
306 | return ''.join([part | |
307 | for part in part_strings | |
308 | if part and part is not SUPPRESS]) | |
309 | ||
310 | def _format_usage(self, usage, actions, groups, prefix): | |
311 | if prefix is None: | |
312 | prefix = _('usage: ') | |
313 | ||
314 | # if usage is specified, use that | |
315 | if usage is not None: | |
316 | usage = usage % dict(prog=self._prog) | |
317 | ||
318 | # if no optionals or positionals are available, usage is just prog | |
319 | elif usage is None and not actions: | |
320 | usage = '%(prog)s' % dict(prog=self._prog) | |
321 | ||
322 | # if optionals and positionals are available, calculate usage | |
323 | elif usage is None: | |
324 | prog = '%(prog)s' % dict(prog=self._prog) | |
325 | ||
326 | # split optionals from positionals | |
327 | optionals = [] | |
328 | positionals = [] | |
329 | for action in actions: | |
330 | if action.option_strings: | |
331 | optionals.append(action) | |
332 | else: | |
333 | positionals.append(action) | |
334 | ||
335 | # build full usage string | |
336 | format = self._format_actions_usage | |
337 | action_usage = format(optionals + positionals, groups) | |
338 | usage = ' '.join([s for s in [prog, action_usage] if s]) | |
339 | ||
340 | # wrap the usage parts if it's too long | |
341 | text_width = self._width - self._current_indent | |
342 | if len(prefix) + len(usage) > text_width: | |
343 | ||
344 | # break usage into wrappable parts | |
345 | part_regexp = r'\(.*?\)+|\[.*?\]+|\S+' | |
346 | opt_usage = format(optionals, groups) | |
347 | pos_usage = format(positionals, groups) | |
348 | opt_parts = _re.findall(part_regexp, opt_usage) | |
349 | pos_parts = _re.findall(part_regexp, pos_usage) | |
350 | assert ' '.join(opt_parts) == opt_usage | |
351 | assert ' '.join(pos_parts) == pos_usage | |
352 | ||
353 | # helper for wrapping lines | |
354 | def get_lines(parts, indent, prefix=None): | |
355 | lines = [] | |
356 | line = [] | |
357 | if prefix is not None: | |
358 | line_len = len(prefix) - 1 | |
359 | else: | |
360 | line_len = len(indent) - 1 | |
361 | for part in parts: | |
362 | if line_len + 1 + len(part) > text_width: | |
363 | lines.append(indent + ' '.join(line)) | |
364 | line = [] | |
365 | line_len = len(indent) - 1 | |
366 | line.append(part) | |
367 | line_len += len(part) + 1 | |
368 | if line: | |
369 | lines.append(indent + ' '.join(line)) | |
370 | if prefix is not None: | |
371 | lines[0] = lines[0][len(indent):] | |
372 | return lines | |
373 | ||
374 | # if prog is short, follow it with optionals or positionals | |
375 | if len(prefix) + len(prog) <= 0.75 * text_width: | |
376 | indent = ' ' * (len(prefix) + len(prog) + 1) | |
377 | if opt_parts: | |
378 | lines = get_lines([prog] + opt_parts, indent, prefix) | |
379 | lines.extend(get_lines(pos_parts, indent)) | |
380 | elif pos_parts: | |
381 | lines = get_lines([prog] + pos_parts, indent, prefix) | |
382 | else: | |
383 | lines = [prog] | |
384 | ||
385 | # if prog is long, put it on its own line | |
386 | else: | |
387 | indent = ' ' * len(prefix) | |
388 | parts = opt_parts + pos_parts | |
389 | lines = get_lines(parts, indent) | |
390 | if len(lines) > 1: | |
391 | lines = [] | |
392 | lines.extend(get_lines(opt_parts, indent)) | |
393 | lines.extend(get_lines(pos_parts, indent)) | |
394 | lines = [prog] + lines | |
395 | ||
396 | # join lines into usage | |
397 | usage = '\n'.join(lines) | |
398 | ||
399 | # prefix with 'usage:' | |
400 | return '%s%s\n\n' % (prefix, usage) | |
401 | ||
402 | def _format_actions_usage(self, actions, groups): | |
403 | # find group indices and identify actions in groups | |
404 | group_actions = set() | |
405 | inserts = {} | |
406 | for group in groups: | |
407 | try: | |
408 | start = actions.index(group._group_actions[0]) | |
409 | except ValueError: | |
410 | continue | |
411 | else: | |
412 | end = start + len(group._group_actions) | |
413 | if actions[start:end] == group._group_actions: | |
414 | for action in group._group_actions: | |
415 | group_actions.add(action) | |
416 | if not group.required: | |
417 | if start in inserts: | |
418 | inserts[start] += ' [' | |
419 | else: | |
420 | inserts[start] = '[' | |
421 | inserts[end] = ']' | |
422 | else: | |
423 | if start in inserts: | |
424 | inserts[start] += ' (' | |
425 | else: | |
426 | inserts[start] = '(' | |
427 | inserts[end] = ')' | |
428 | for i in range(start + 1, end): | |
429 | inserts[i] = '|' | |
430 | ||
431 | # collect all actions format strings | |
432 | parts = [] | |
433 | for i, action in enumerate(actions): | |
434 | ||
435 | # suppressed arguments are marked with None | |
436 | # remove | separators for suppressed arguments | |
437 | if action.help is SUPPRESS: | |
438 | parts.append(None) | |
439 | if inserts.get(i) == '|': | |
440 | inserts.pop(i) | |
441 | elif inserts.get(i + 1) == '|': | |
442 | inserts.pop(i + 1) | |
443 | ||
444 | # produce all arg strings | |
445 | elif not action.option_strings: | |
446 | part = self._format_args(action, action.dest) | |
447 | ||
448 | # if it's in a group, strip the outer [] | |
449 | if action in group_actions: | |
450 | if part[0] == '[' and part[-1] == ']': | |
451 | part = part[1:-1] | |
452 | ||
453 | # add the action string to the list | |
454 | parts.append(part) | |
455 | ||
456 | # produce the first way to invoke the option in brackets | |
457 | else: | |
458 | option_string = action.option_strings[0] | |
459 | ||
460 | # if the Optional doesn't take a value, format is: | |
461 | # -s or --long | |
462 | if action.nargs == 0: | |
463 | part = '%s' % option_string | |
464 | ||
465 | # if the Optional takes a value, format is: | |
466 | # -s ARGS or --long ARGS | |
467 | else: | |
468 | default = action.dest.upper() | |
469 | args_string = self._format_args(action, default) | |
470 | part = '%s %s' % (option_string, args_string) | |
471 | ||
472 | # make it look optional if it's not required or in a group | |
473 | if not action.required and action not in group_actions: | |
474 | part = '[%s]' % part | |
475 | ||
476 | # add the action string to the list | |
477 | parts.append(part) | |
478 | ||
479 | # insert things at the necessary indices | |
480 | for i in sorted(inserts, reverse=True): | |
481 | parts[i:i] = [inserts[i]] | |
482 | ||
483 | # join all the action items with spaces | |
484 | text = ' '.join([item for item in parts if item is not None]) | |
485 | ||
486 | # clean up separators for mutually exclusive groups | |
487 | open = r'[\[(]' | |
488 | close = r'[\])]' | |
489 | text = _re.sub(r'(%s) ' % open, r'\1', text) | |
490 | text = _re.sub(r' (%s)' % close, r'\1', text) | |
491 | text = _re.sub(r'%s *%s' % (open, close), r'', text) | |
492 | text = _re.sub(r'\(([^|]*)\)', r'\1', text) | |
493 | text = text.strip() | |
494 | ||
495 | # return the text | |
496 | return text | |
497 | ||
498 | def _format_text(self, text): | |
499 | if '%(prog)' in text: | |
500 | text = text % dict(prog=self._prog) | |
501 | text_width = self._width - self._current_indent | |
502 | indent = ' ' * self._current_indent | |
503 | return self._fill_text(text, text_width, indent) + '\n\n' | |
504 | ||
505 | def _format_action(self, action): | |
506 | # determine the required width and the entry label | |
507 | help_position = min(self._action_max_length + 2, | |
508 | self._max_help_position) | |
509 | help_width = self._width - help_position | |
510 | action_width = help_position - self._current_indent - 2 | |
511 | action_header = self._format_action_invocation(action) | |
512 | ||
513 | # ho nelp; start on same line and add a final newline | |
514 | if not action.help: | |
515 | tup = self._current_indent, '', action_header | |
516 | action_header = '%*s%s\n' % tup | |
517 | ||
518 | # short action name; start on the same line and pad two spaces | |
519 | elif len(action_header) <= action_width: | |
520 | tup = self._current_indent, '', action_width, action_header | |
521 | action_header = '%*s%-*s ' % tup | |
522 | indent_first = 0 | |
523 | ||
524 | # long action name; start on the next line | |
525 | else: | |
526 | tup = self._current_indent, '', action_header | |
527 | action_header = '%*s%s\n' % tup | |
528 | indent_first = help_position | |
529 | ||
530 | # collect the pieces of the action help | |
531 | parts = [action_header] | |
532 | ||
533 | # if there was help for the action, add lines of help text | |
534 | if action.help: | |
535 | help_text = self._expand_help(action) | |
536 | help_lines = self._split_lines(help_text, help_width) | |
537 | parts.append('%*s%s\n' % (indent_first, '', help_lines[0])) | |
538 | for line in help_lines[1:]: | |
539 | parts.append('%*s%s\n' % (help_position, '', line)) | |
540 | ||
541 | # or add a newline if the description doesn't end with one | |
542 | elif not action_header.endswith('\n'): | |
543 | parts.append('\n') | |
544 | ||
545 | # if there are any sub-actions, add their help as well | |
546 | for subaction in self._iter_indented_subactions(action): | |
547 | parts.append(self._format_action(subaction)) | |
548 | ||
549 | # return a single string | |
550 | return self._join_parts(parts) | |
551 | ||
552 | def _format_action_invocation(self, action): | |
553 | if not action.option_strings: | |
554 | metavar, = self._metavar_formatter(action, action.dest)(1) | |
555 | return metavar | |
556 | ||
557 | else: | |
558 | parts = [] | |
559 | ||
560 | # if the Optional doesn't take a value, format is: | |
561 | # -s, --long | |
562 | if action.nargs == 0: | |
563 | parts.extend(action.option_strings) | |
564 | ||
565 | # if the Optional takes a value, format is: | |
566 | # -s ARGS, --long ARGS | |
567 | else: | |
568 | default = action.dest.upper() | |
569 | args_string = self._format_args(action, default) | |
570 | for option_string in action.option_strings: | |
571 | parts.append('%s %s' % (option_string, args_string)) | |
572 | ||
573 | return ', '.join(parts) | |
574 | ||
575 | def _metavar_formatter(self, action, default_metavar): | |
576 | if action.metavar is not None: | |
577 | result = action.metavar | |
578 | elif action.choices is not None: | |
579 | choice_strs = [str(choice) for choice in action.choices] | |
580 | result = '{%s}' % ','.join(choice_strs) | |
581 | else: | |
582 | result = default_metavar | |
583 | ||
584 | def format(tuple_size): | |
585 | if isinstance(result, tuple): | |
586 | return result | |
587 | else: | |
588 | return (result, ) * tuple_size | |
589 | return format | |
590 | ||
591 | def _format_args(self, action, default_metavar): | |
592 | get_metavar = self._metavar_formatter(action, default_metavar) | |
593 | if action.nargs is None: | |
594 | result = '%s' % get_metavar(1) | |
595 | elif action.nargs == OPTIONAL: | |
596 | result = '[%s]' % get_metavar(1) | |
597 | elif action.nargs == ZERO_OR_MORE: | |
598 | result = '[%s [%s ...]]' % get_metavar(2) | |
599 | elif action.nargs == ONE_OR_MORE: | |
600 | result = '%s [%s ...]' % get_metavar(2) | |
601 | elif action.nargs == REMAINDER: | |
602 | result = '...' | |
603 | elif action.nargs == PARSER: | |
604 | result = '%s ...' % get_metavar(1) | |
605 | else: | |
606 | formats = ['%s' for _ in range(action.nargs)] | |
607 | result = ' '.join(formats) % get_metavar(action.nargs) | |
608 | return result | |
609 | ||
610 | def _expand_help(self, action): | |
611 | params = dict(vars(action), prog=self._prog) | |
612 | for name in list(params): | |
613 | if params[name] is SUPPRESS: | |
614 | del params[name] | |
615 | for name in list(params): | |
616 | if hasattr(params[name], '__name__'): | |
617 | params[name] = params[name].__name__ | |
618 | if params.get('choices') is not None: | |
619 | choices_str = ', '.join([str(c) for c in params['choices']]) | |
620 | params['choices'] = choices_str | |
621 | return self._get_help_string(action) % params | |
622 | ||
623 | def _iter_indented_subactions(self, action): | |
624 | try: | |
625 | get_subactions = action._get_subactions | |
626 | except AttributeError: | |
627 | pass | |
628 | else: | |
629 | self._indent() | |
630 | for subaction in get_subactions(): | |
631 | yield subaction | |
632 | self._dedent() | |
633 | ||
634 | def _split_lines(self, text, width): | |
635 | text = self._whitespace_matcher.sub(' ', text).strip() | |
636 | return _textwrap.wrap(text, width) | |
637 | ||
638 | def _fill_text(self, text, width, indent): | |
639 | text = self._whitespace_matcher.sub(' ', text).strip() | |
640 | return _textwrap.fill(text, width, initial_indent=indent, | |
641 | subsequent_indent=indent) | |
642 | ||
643 | def _get_help_string(self, action): | |
644 | return action.help | |
645 | ||
646 | ||
647 | class RawDescriptionHelpFormatter(HelpFormatter): | |
648 | """Help message formatter which retains any formatting in descriptions. | |
649 | ||
650 | Only the name of this class is considered a public API. All the methods | |
651 | provided by the class are considered an implementation detail. | |
652 | """ | |
653 | ||
654 | def _fill_text(self, text, width, indent): | |
655 | return ''.join([indent + line for line in text.splitlines(True)]) | |
656 | ||
657 | ||
658 | class RawTextHelpFormatter(RawDescriptionHelpFormatter): | |
659 | """Help message formatter which retains formatting of all help text. | |
660 | ||
661 | Only the name of this class is considered a public API. All the methods | |
662 | provided by the class are considered an implementation detail. | |
663 | """ | |
664 | ||
665 | def _split_lines(self, text, width): | |
666 | return text.splitlines() | |
667 | ||
668 | ||
669 | class ArgumentDefaultsHelpFormatter(HelpFormatter): | |
670 | """Help message formatter which adds default values to argument help. | |
671 | ||
672 | Only the name of this class is considered a public API. All the methods | |
673 | provided by the class are considered an implementation detail. | |
674 | """ | |
675 | ||
676 | def _get_help_string(self, action): | |
677 | help = action.help | |
678 | if '%(default)' not in action.help: | |
679 | if action.default is not SUPPRESS: | |
680 | defaulting_nargs = [OPTIONAL, ZERO_OR_MORE] | |
681 | if action.option_strings or action.nargs in defaulting_nargs: | |
682 | help += ' (default: %(default)s)' | |
683 | return help | |
684 | ||
685 | ||
686 | # ===================== | |
687 | # Options and Arguments | |
688 | # ===================== | |
689 | ||
690 | def _get_action_name(argument): | |
691 | if argument is None: | |
692 | return None | |
693 | elif argument.option_strings: | |
694 | return '/'.join(argument.option_strings) | |
695 | elif argument.metavar not in (None, SUPPRESS): | |
696 | return argument.metavar | |
697 | elif argument.dest not in (None, SUPPRESS): | |
698 | return argument.dest | |
699 | else: | |
700 | return None | |
701 | ||
702 | ||
703 | class ArgumentError(Exception): | |
704 | """An error from creating or using an argument (optional or positional). | |
705 | ||
706 | The string value of this exception is the message, augmented with | |
707 | information about the argument that caused it. | |
708 | """ | |
709 | ||
710 | def __init__(self, argument, message): | |
711 | self.argument_name = _get_action_name(argument) | |
712 | self.message = message | |
713 | ||
714 | def __str__(self): | |
715 | if self.argument_name is None: | |
716 | format = '%(message)s' | |
717 | else: | |
718 | format = 'argument %(argument_name)s: %(message)s' | |
719 | return format % dict(message=self.message, | |
720 | argument_name=self.argument_name) | |
721 | ||
722 | ||
723 | class ArgumentTypeError(Exception): | |
724 | """An error from trying to convert a command line string to a type.""" | |
725 | pass | |
726 | ||
727 | ||
728 | # ============== | |
729 | # Action classes | |
730 | # ============== | |
731 | ||
732 | class Action(_AttributeHolder): | |
733 | """Information about how to convert command line strings to Python objects. | |
734 | ||
735 | Action objects are used by an ArgumentParser to represent the information | |
736 | needed to parse a single argument from one or more strings from the | |
737 | command line. The keyword arguments to the Action constructor are also | |
738 | all attributes of Action instances. | |
739 | ||
740 | Keyword Arguments: | |
741 | ||
742 | - option_strings -- A list of command-line option strings which | |
743 | should be associated with this action. | |
744 | ||
745 | - dest -- The name of the attribute to hold the created object(s) | |
746 | ||
747 | - nargs -- The number of command-line arguments that should be | |
748 | consumed. By default, one argument will be consumed and a single | |
749 | value will be produced. Other values include: | |
750 | - N (an integer) consumes N arguments (and produces a list) | |
751 | - '?' consumes zero or one arguments | |
752 | - '*' consumes zero or more arguments (and produces a list) | |
753 | - '+' consumes one or more arguments (and produces a list) | |
754 | Note that the difference between the default and nargs=1 is that | |
755 | with the default, a single value will be produced, while with | |
756 | nargs=1, a list containing a single value will be produced. | |
757 | ||
758 | - const -- The value to be produced if the option is specified and the | |
759 | option uses an action that takes no values. | |
760 | ||
761 | - default -- The value to be produced if the option is not specified. | |
762 | ||
763 | - type -- The type which the command-line arguments should be converted | |
764 | to, should be one of 'string', 'int', 'float', 'complex' or a | |
765 | callable object that accepts a single string argument. If None, | |
766 | 'string' is assumed. | |
767 | ||
768 | - choices -- A container of values that should be allowed. If not None, | |
769 | after a command-line argument has been converted to the appropriate | |
770 | type, an exception will be raised if it is not a member of this | |
771 | collection. | |
772 | ||
773 | - required -- True if the action must always be specified at the | |
774 | command line. This is only meaningful for optional command-line | |
775 | arguments. | |
776 | ||
777 | - help -- The help string describing the argument. | |
778 | ||
779 | - metavar -- The name to be used for the option's argument with the | |
780 | help string. If None, the 'dest' value will be used as the name. | |
781 | """ | |
782 | ||
783 | def __init__(self, | |
784 | option_strings, | |
785 | dest, | |
786 | nargs=None, | |
787 | const=None, | |
788 | default=None, | |
789 | type=None, | |
790 | choices=None, | |
791 | required=False, | |
792 | help=None, | |
793 | metavar=None): | |
794 | self.option_strings = option_strings | |
795 | self.dest = dest | |
796 | self.nargs = nargs | |
797 | self.const = const | |
798 | self.default = default | |
799 | self.type = type | |
800 | self.choices = choices | |
801 | self.required = required | |
802 | self.help = help | |
803 | self.metavar = metavar | |
804 | ||
805 | def _get_kwargs(self): | |
806 | names = [ | |
807 | 'option_strings', | |
808 | 'dest', | |
809 | 'nargs', | |
810 | 'const', | |
811 | 'default', | |
812 | 'type', | |
813 | 'choices', | |
814 | 'help', | |
815 | 'metavar', | |
816 | ] | |
817 | return [(name, getattr(self, name)) for name in names] | |
818 | ||
819 | def __call__(self, parser, namespace, values, option_string=None): | |
820 | raise NotImplementedError(_('.__call__() not defined')) | |
821 | ||
822 | ||
823 | class _StoreAction(Action): | |
824 | ||
825 | def __init__(self, | |
826 | option_strings, | |
827 | dest, | |
828 | nargs=None, | |
829 | const=None, | |
830 | default=None, | |
831 | type=None, | |
832 | choices=None, | |
833 | required=False, | |
834 | help=None, | |
835 | metavar=None): | |
836 | if nargs == 0: | |
837 | raise ValueError('nargs for store actions must be > 0; if you ' | |
838 | 'have nothing to store, actions such as store ' | |
839 | 'true or store const may be more appropriate') | |
840 | if const is not None and nargs != OPTIONAL: | |
841 | raise ValueError('nargs must be %r to supply const' % OPTIONAL) | |
842 | super(_StoreAction, self).__init__( | |
843 | option_strings=option_strings, | |
844 | dest=dest, | |
845 | nargs=nargs, | |
846 | const=const, | |
847 | default=default, | |
848 | type=type, | |
849 | choices=choices, | |
850 | required=required, | |
851 | help=help, | |
852 | metavar=metavar) | |
853 | ||
854 | def __call__(self, parser, namespace, values, option_string=None): | |
855 | setattr(namespace, self.dest, values) | |
856 | ||
857 | ||
858 | class _StoreConstAction(Action): | |
859 | ||
860 | def __init__(self, | |
861 | option_strings, | |
862 | dest, | |
863 | const, | |
864 | default=None, | |
865 | required=False, | |
866 | help=None, | |
867 | metavar=None): | |
868 | super(_StoreConstAction, self).__init__( | |
869 | option_strings=option_strings, | |
870 | dest=dest, | |
871 | nargs=0, | |
872 | const=const, | |
873 | default=default, | |
874 | required=required, | |
875 | help=help) | |
876 | ||
877 | def __call__(self, parser, namespace, values, option_string=None): | |
878 | setattr(namespace, self.dest, self.const) | |
879 | ||
880 | ||
881 | class _StoreTrueAction(_StoreConstAction): | |
882 | ||
883 | def __init__(self, | |
884 | option_strings, | |
885 | dest, | |
886 | default=False, | |
887 | required=False, | |
888 | help=None): | |
889 | super(_StoreTrueAction, self).__init__( | |
890 | option_strings=option_strings, | |
891 | dest=dest, | |
892 | const=True, | |
893 | default=default, | |
894 | required=required, | |
895 | help=help) | |
896 | ||
897 | ||
898 | class _StoreFalseAction(_StoreConstAction): | |
899 | ||
900 | def __init__(self, | |
901 | option_strings, | |
902 | dest, | |
903 | default=True, | |
904 | required=False, | |
905 | help=None): | |
906 | super(_StoreFalseAction, self).__init__( | |
907 | option_strings=option_strings, | |
908 | dest=dest, | |
909 | const=False, | |
910 | default=default, | |
911 | required=required, | |
912 | help=help) | |
913 | ||
914 | ||
915 | class _AppendAction(Action): | |
916 | ||
917 | def __init__(self, | |
918 | option_strings, | |
919 | dest, | |
920 | nargs=None, | |
921 | const=None, | |
922 | default=None, | |
923 | type=None, | |
924 | choices=None, | |
925 | required=False, | |
926 | help=None, | |
927 | metavar=None): | |
928 | if nargs == 0: | |
929 | raise ValueError('nargs for append actions must be > 0; if arg ' | |
930 | 'strings are not supplying the value to append, ' | |
931 | 'the append const action may be more appropriate') | |
932 | if const is not None and nargs != OPTIONAL: | |
933 | raise ValueError('nargs must be %r to supply const' % OPTIONAL) | |
934 | super(_AppendAction, self).__init__( | |
935 | option_strings=option_strings, | |
936 | dest=dest, | |
937 | nargs=nargs, | |
938 | const=const, | |
939 | default=default, | |
940 | type=type, | |
941 | choices=choices, | |
942 | required=required, | |
943 | help=help, | |
944 | metavar=metavar) | |
945 | ||
946 | def __call__(self, parser, namespace, values, option_string=None): | |
947 | items = _copy.copy(_ensure_value(namespace, self.dest, [])) | |
948 | items.append(values) | |
949 | setattr(namespace, self.dest, items) | |
950 | ||
951 | ||
952 | class _AppendConstAction(Action): | |
953 | ||
954 | def __init__(self, | |
955 | option_strings, | |
956 | dest, | |
957 | const, | |
958 | default=None, | |
959 | required=False, | |
960 | help=None, | |
961 | metavar=None): | |
962 | super(_AppendConstAction, self).__init__( | |
963 | option_strings=option_strings, | |
964 | dest=dest, | |
965 | nargs=0, | |
966 | const=const, | |
967 | default=default, | |
968 | required=required, | |
969 | help=help, | |
970 | metavar=metavar) | |
971 | ||
972 | def __call__(self, parser, namespace, values, option_string=None): | |
973 | items = _copy.copy(_ensure_value(namespace, self.dest, [])) | |
974 | items.append(self.const) | |
975 | setattr(namespace, self.dest, items) | |
976 | ||
977 | ||
978 | class _CountAction(Action): | |
979 | ||
980 | def __init__(self, | |
981 | option_strings, | |
982 | dest, | |
983 | default=None, | |
984 | required=False, | |
985 | help=None): | |
986 | super(_CountAction, self).__init__( | |
987 | option_strings=option_strings, | |
988 | dest=dest, | |
989 | nargs=0, | |
990 | default=default, | |
991 | required=required, | |
992 | help=help) | |
993 | ||
994 | def __call__(self, parser, namespace, values, option_string=None): | |
995 | new_count = _ensure_value(namespace, self.dest, 0) + 1 | |
996 | setattr(namespace, self.dest, new_count) | |
997 | ||
998 | ||
999 | class _HelpAction(Action): | |
1000 | ||
1001 | def __init__(self, | |
1002 | option_strings, | |
1003 | dest=SUPPRESS, | |
1004 | default=SUPPRESS, | |
1005 | help=None): | |
1006 | super(_HelpAction, self).__init__( | |
1007 | option_strings=option_strings, | |
1008 | dest=dest, | |
1009 | default=default, | |
1010 | nargs=0, | |
1011 | help=help) | |
1012 | ||
1013 | def __call__(self, parser, namespace, values, option_string=None): | |
1014 | parser.print_help() | |
1015 | parser.exit() | |
1016 | ||
1017 | ||
1018 | class _VersionAction(Action): | |
1019 | ||
1020 | def __init__(self, | |
1021 | option_strings, | |
1022 | version=None, | |
1023 | dest=SUPPRESS, | |
1024 | default=SUPPRESS, | |
1025 | help="show program's version number and exit"): | |
1026 | super(_VersionAction, self).__init__( | |
1027 | option_strings=option_strings, | |
1028 | dest=dest, | |
1029 | default=default, | |
1030 | nargs=0, | |
1031 | help=help) | |
1032 | self.version = version | |
1033 | ||
1034 | def __call__(self, parser, namespace, values, option_string=None): | |
1035 | version = self.version | |
1036 | if version is None: | |
1037 | version = parser.version | |
1038 | formatter = parser._get_formatter() | |
1039 | formatter.add_text(version) | |
1040 | parser.exit(message=formatter.format_help()) | |
1041 | ||
1042 | ||
1043 | class _SubParsersAction(Action): | |
1044 | ||
1045 | class _ChoicesPseudoAction(Action): | |
1046 | ||
1047 | def __init__(self, name, help): | |
1048 | sup = super(_SubParsersAction._ChoicesPseudoAction, self) | |
1049 | sup.__init__(option_strings=[], dest=name, help=help) | |
1050 | ||
1051 | def __init__(self, | |
1052 | option_strings, | |
1053 | prog, | |
1054 | parser_class, | |
1055 | dest=SUPPRESS, | |
1056 | help=None, | |
1057 | metavar=None): | |
1058 | ||
1059 | self._prog_prefix = prog | |
1060 | self._parser_class = parser_class | |
1061 | self._name_parser_map = {} | |
1062 | self._choices_actions = [] | |
1063 | ||
1064 | super(_SubParsersAction, self).__init__( | |
1065 | option_strings=option_strings, | |
1066 | dest=dest, | |
1067 | nargs=PARSER, | |
1068 | choices=self._name_parser_map, | |
1069 | help=help, | |
1070 | metavar=metavar) | |
1071 | ||
1072 | def add_parser(self, name, **kwargs): | |
1073 | # set prog from the existing prefix | |
1074 | if kwargs.get('prog') is None: | |
1075 | kwargs['prog'] = '%s %s' % (self._prog_prefix, name) | |
1076 | ||
1077 | # create a pseudo-action to hold the choice help | |
1078 | if 'help' in kwargs: | |
1079 | help = kwargs.pop('help') | |
1080 | choice_action = self._ChoicesPseudoAction(name, help) | |
1081 | self._choices_actions.append(choice_action) | |
1082 | ||
1083 | # create the parser and add it to the map | |
1084 | parser = self._parser_class(**kwargs) | |
1085 | self._name_parser_map[name] = parser | |
1086 | return parser | |
1087 | ||
1088 | def _get_subactions(self): | |
1089 | return self._choices_actions | |
1090 | ||
1091 | def __call__(self, parser, namespace, values, option_string=None): | |
1092 | parser_name = values[0] | |
1093 | arg_strings = values[1:] | |
1094 | ||
1095 | # set the parser name if requested | |
1096 | if self.dest is not SUPPRESS: | |
1097 | setattr(namespace, self.dest, parser_name) | |
1098 | ||
1099 | # select the parser | |
1100 | try: | |
1101 | parser = self._name_parser_map[parser_name] | |
1102 | except KeyError: | |
1103 | tup = parser_name, ', '.join(self._name_parser_map) | |
1104 | msg = _('unknown parser %r (choices: %s)' % tup) | |
1105 | raise ArgumentError(self, msg) | |
1106 | ||
1107 | # parse all the remaining options into the namespace | |
1108 | # store any unrecognized options on the object, so that the top | |
1109 | # level parser can decide what to do with them | |
1110 | namespace, arg_strings = parser.parse_known_args(arg_strings, namespace) | |
1111 | if arg_strings: | |
1112 | vars(namespace).setdefault(_UNRECOGNIZED_ARGS_ATTR, []) | |
1113 | getattr(namespace, _UNRECOGNIZED_ARGS_ATTR).extend(arg_strings) | |
1114 | ||
1115 | ||
1116 | # ============== | |
1117 | # Type classes | |
1118 | # ============== | |
1119 | ||
1120 | class FileType(object): | |
1121 | """Factory for creating file object types | |
1122 | ||
1123 | Instances of FileType are typically passed as type= arguments to the | |
1124 | ArgumentParser add_argument() method. | |
1125 | ||
1126 | Keyword Arguments: | |
1127 | - mode -- A string indicating how the file is to be opened. Accepts the | |
1128 | same values as the builtin open() function. | |
1129 | - bufsize -- The file's desired buffer size. Accepts the same values as | |
1130 | the builtin open() function. | |
1131 | """ | |
1132 | ||
1133 | def __init__(self, mode='r', bufsize=None): | |
1134 | self._mode = mode | |
1135 | self._bufsize = bufsize | |
1136 | ||
1137 | def __call__(self, string): | |
1138 | # the special argument "-" means sys.std{in,out} | |
1139 | if string == '-': | |
1140 | if 'r' in self._mode: | |
1141 | return _sys.stdin | |
1142 | elif 'w' in self._mode: | |
1143 | return _sys.stdout | |
1144 | else: | |
1145 | msg = _('argument "-" with mode %r' % self._mode) | |
1146 | raise ValueError(msg) | |
1147 | ||
1148 | # all other arguments are used as file names | |
1149 | if self._bufsize: | |
1150 | return open(string, self._mode, self._bufsize) | |
1151 | else: | |
1152 | return open(string, self._mode) | |
1153 | ||
1154 | def __repr__(self): | |
1155 | args = [self._mode, self._bufsize] | |
1156 | args_str = ', '.join([repr(arg) for arg in args if arg is not None]) | |
1157 | return '%s(%s)' % (type(self).__name__, args_str) | |
1158 | ||
1159 | # =========================== | |
1160 | # Optional and Positional Parsing | |
1161 | # =========================== | |
1162 | ||
1163 | class Namespace(_AttributeHolder): | |
1164 | """Simple object for storing attributes. | |
1165 | ||
1166 | Implements equality by attribute names and values, and provides a simple | |
1167 | string representation. | |
1168 | """ | |
1169 | ||
1170 | def __init__(self, **kwargs): | |
1171 | for name in kwargs: | |
1172 | setattr(self, name, kwargs[name]) | |
1173 | ||
1174 | __hash__ = None | |
1175 | ||
1176 | def __eq__(self, other): | |
1177 | return vars(self) == vars(other) | |
1178 | ||
1179 | def __ne__(self, other): | |
1180 | return not (self == other) | |
1181 | ||
1182 | def __contains__(self, key): | |
1183 | return key in self.__dict__ | |
1184 | ||
1185 | ||
1186 | class _ActionsContainer(object): | |
1187 | ||
1188 | def __init__(self, | |
1189 | description, | |
1190 | prefix_chars, | |
1191 | argument_default, | |
1192 | conflict_handler): | |
1193 | super(_ActionsContainer, self).__init__() | |
1194 | ||
1195 | self.description = description | |
1196 | self.argument_default = argument_default | |
1197 | self.prefix_chars = prefix_chars | |
1198 | self.conflict_handler = conflict_handler | |
1199 | ||
1200 | # set up registries | |
1201 | self._registries = {} | |
1202 | ||
1203 | # register actions | |
1204 | self.register('action', None, _StoreAction) | |
1205 | self.register('action', 'store', _StoreAction) | |
1206 | self.register('action', 'store_const', _StoreConstAction) | |
1207 | self.register('action', 'store_true', _StoreTrueAction) | |
1208 | self.register('action', 'store_false', _StoreFalseAction) | |
1209 | self.register('action', 'append', _AppendAction) | |
1210 | self.register('action', 'append_const', _AppendConstAction) | |
1211 | self.register('action', 'count', _CountAction) | |
1212 | self.register('action', 'help', _HelpAction) | |
1213 | self.register('action', 'version', _VersionAction) | |
1214 | self.register('action', 'parsers', _SubParsersAction) | |
1215 | ||
1216 | # raise an exception if the conflict handler is invalid | |
1217 | self._get_handler() | |
1218 | ||
1219 | # action storage | |
1220 | self._actions = [] | |
1221 | self._option_string_actions = {} | |
1222 | ||
1223 | # groups | |
1224 | self._action_groups = [] | |
1225 | self._mutually_exclusive_groups = [] | |
1226 | ||
1227 | # defaults storage | |
1228 | self._defaults = {} | |
1229 | ||
1230 | # determines whether an "option" looks like a negative number | |
1231 | self._negative_number_matcher = _re.compile(r'^-\d+$|^-\d*\.\d+$') | |
1232 | ||
1233 | # whether or not there are any optionals that look like negative | |
1234 | # numbers -- uses a list so it can be shared and edited | |
1235 | self._has_negative_number_optionals = [] | |
1236 | ||
1237 | # ==================== | |
1238 | # Registration methods | |
1239 | # ==================== | |
1240 | def register(self, registry_name, value, object): | |
1241 | registry = self._registries.setdefault(registry_name, {}) | |
1242 | registry[value] = object | |
1243 | ||
1244 | def _registry_get(self, registry_name, value, default=None): | |
1245 | return self._registries[registry_name].get(value, default) | |
1246 | ||
1247 | # ================================== | |
1248 | # Namespace default accessor methods | |
1249 | # ================================== | |
1250 | def set_defaults(self, **kwargs): | |
1251 | self._defaults.update(kwargs) | |
1252 | ||
1253 | # if these defaults match any existing arguments, replace | |
1254 | # the previous default on the object with the new one | |
1255 | for action in self._actions: | |
1256 | if action.dest in kwargs: | |
1257 | action.default = kwargs[action.dest] | |
1258 | ||
1259 | def get_default(self, dest): | |
1260 | for action in self._actions: | |
1261 | if action.dest == dest and action.default is not None: | |
1262 | return action.default | |
1263 | return self._defaults.get(dest, None) | |
1264 | ||
1265 | ||
1266 | # ======================= | |
1267 | # Adding argument actions | |
1268 | # ======================= | |
1269 | def add_argument(self, *args, **kwargs): | |
1270 | """ | |
1271 | add_argument(dest, ..., name=value, ...) | |
1272 | add_argument(option_string, option_string, ..., name=value, ...) | |
1273 | """ | |
1274 | ||
1275 | # if no positional args are supplied or only one is supplied and | |
1276 | # it doesn't look like an option string, parse a positional | |
1277 | # argument | |
1278 | chars = self.prefix_chars | |
1279 | if not args or len(args) == 1 and args[0][0] not in chars: | |
1280 | if args and 'dest' in kwargs: | |
1281 | raise ValueError('dest supplied twice for positional argument') | |
1282 | kwargs = self._get_positional_kwargs(*args, **kwargs) | |
1283 | ||
1284 | # otherwise, we're adding an optional argument | |
1285 | else: | |
1286 | kwargs = self._get_optional_kwargs(*args, **kwargs) | |
1287 | ||
1288 | # if no default was supplied, use the parser-level default | |
1289 | if 'default' not in kwargs: | |
1290 | dest = kwargs['dest'] | |
1291 | if dest in self._defaults: | |
1292 | kwargs['default'] = self._defaults[dest] | |
1293 | elif self.argument_default is not None: | |
1294 | kwargs['default'] = self.argument_default | |
1295 | ||
1296 | # create the action object, and add it to the parser | |
1297 | action_class = self._pop_action_class(kwargs) | |
1298 | if not _callable(action_class): | |
1299 | raise ValueError('unknown action "%s"' % action_class) | |
1300 | action = action_class(**kwargs) | |
1301 | ||
1302 | # raise an error if the action type is not callable | |
1303 | type_func = self._registry_get('type', action.type, action.type) | |
1304 | if not _callable(type_func): | |
1305 | raise ValueError('%r is not callable' % type_func) | |
1306 | ||
1307 | return self._add_action(action) | |
1308 | ||
1309 | def add_argument_group(self, *args, **kwargs): | |
1310 | group = _ArgumentGroup(self, *args, **kwargs) | |
1311 | self._action_groups.append(group) | |
1312 | return group | |
1313 | ||
1314 | def add_mutually_exclusive_group(self, **kwargs): | |
1315 | group = _MutuallyExclusiveGroup(self, **kwargs) | |
1316 | self._mutually_exclusive_groups.append(group) | |
1317 | return group | |
1318 | ||
1319 | def _add_action(self, action): | |
1320 | # resolve any conflicts | |
1321 | self._check_conflict(action) | |
1322 | ||
1323 | # add to actions list | |
1324 | self._actions.append(action) | |
1325 | action.container = self | |
1326 | ||
1327 | # index the action by any option strings it has | |
1328 | for option_string in action.option_strings: | |
1329 | self._option_string_actions[option_string] = action | |
1330 | ||
1331 | # set the flag if any option strings look like negative numbers | |
1332 | for option_string in action.option_strings: | |
1333 | if self._negative_number_matcher.match(option_string): | |
1334 | if not self._has_negative_number_optionals: | |
1335 | self._has_negative_number_optionals.append(True) | |
1336 | ||
1337 | # return the created action | |
1338 | return action | |
1339 | ||
1340 | def _remove_action(self, action): | |
1341 | self._actions.remove(action) | |
1342 | ||
1343 | def _add_container_actions(self, container): | |
1344 | # collect groups by titles | |
1345 | title_group_map = {} | |
1346 | for group in self._action_groups: | |
1347 | if group.title in title_group_map: | |
1348 | msg = _('cannot merge actions - two groups are named %r') | |
1349 | raise ValueError(msg % (group.title)) | |
1350 | title_group_map[group.title] = group | |
1351 | ||
1352 | # map each action to its group | |
1353 | group_map = {} | |
1354 | for group in container._action_groups: | |
1355 | ||
1356 | # if a group with the title exists, use that, otherwise | |
1357 | # create a new group matching the container's group | |
1358 | if group.title not in title_group_map: | |
1359 | title_group_map[group.title] = self.add_argument_group( | |
1360 | title=group.title, | |
1361 | description=group.description, | |
1362 | conflict_handler=group.conflict_handler) | |
1363 | ||
1364 | # map the actions to their new group | |
1365 | for action in group._group_actions: | |
1366 | group_map[action] = title_group_map[group.title] | |
1367 | ||
1368 | # add container's mutually exclusive groups | |
1369 | # NOTE: if add_mutually_exclusive_group ever gains title= and | |
1370 | # description= then this code will need to be expanded as above | |
1371 | for group in container._mutually_exclusive_groups: | |
1372 | mutex_group = self.add_mutually_exclusive_group( | |
1373 | required=group.required) | |
1374 | ||
1375 | # map the actions to their new mutex group | |
1376 | for action in group._group_actions: | |
1377 | group_map[action] = mutex_group | |
1378 | ||
1379 | # add all actions to this container or their group | |
1380 | for action in container._actions: | |
1381 | group_map.get(action, self)._add_action(action) | |
1382 | ||
1383 | def _get_positional_kwargs(self, dest, **kwargs): | |
1384 | # make sure required is not specified | |
1385 | if 'required' in kwargs: | |
1386 | msg = _("'required' is an invalid argument for positionals") | |
1387 | raise TypeError(msg) | |
1388 | ||
1389 | # mark positional arguments as required if at least one is | |
1390 | # always required | |
1391 | if kwargs.get('nargs') not in [OPTIONAL, ZERO_OR_MORE]: | |
1392 | kwargs['required'] = True | |
1393 | if kwargs.get('nargs') == ZERO_OR_MORE and 'default' not in kwargs: | |
1394 | kwargs['required'] = True | |
1395 | ||
1396 | # return the keyword arguments with no option strings | |
1397 | return dict(kwargs, dest=dest, option_strings=[]) | |
1398 | ||
1399 | def _get_optional_kwargs(self, *args, **kwargs): | |
1400 | # determine short and long option strings | |
1401 | option_strings = [] | |
1402 | long_option_strings = [] | |
1403 | for option_string in args: | |
1404 | # error on strings that don't start with an appropriate prefix | |
1405 | if not option_string[0] in self.prefix_chars: | |
1406 | msg = _('invalid option string %r: ' | |
1407 | 'must start with a character %r') | |
1408 | tup = option_string, self.prefix_chars | |
1409 | raise ValueError(msg % tup) | |
1410 | ||
1411 | # strings starting with two prefix characters are long options | |
1412 | option_strings.append(option_string) | |
1413 | if option_string[0] in self.prefix_chars: | |
1414 | if len(option_string) > 1: | |
1415 | if option_string[1] in self.prefix_chars: | |
1416 | long_option_strings.append(option_string) | |
1417 | ||
1418 | # infer destination, '--foo-bar' -> 'foo_bar' and '-x' -> 'x' | |
1419 | dest = kwargs.pop('dest', None) | |
1420 | if dest is None: | |
1421 | if long_option_strings: | |
1422 | dest_option_string = long_option_strings[0] | |
1423 | else: | |
1424 | dest_option_string = option_strings[0] | |
1425 | dest = dest_option_string.lstrip(self.prefix_chars) | |
1426 | if not dest: | |
1427 | msg = _('dest= is required for options like %r') | |
1428 | raise ValueError(msg % option_string) | |
1429 | dest = dest.replace('-', '_') | |
1430 | ||
1431 | # return the updated keyword arguments | |
1432 | return dict(kwargs, dest=dest, option_strings=option_strings) | |
1433 | ||
1434 | def _pop_action_class(self, kwargs, default=None): | |
1435 | action = kwargs.pop('action', default) | |
1436 | return self._registry_get('action', action, action) | |
1437 | ||
1438 | def _get_handler(self): | |
1439 | # determine function from conflict handler string | |
1440 | handler_func_name = '_handle_conflict_%s' % self.conflict_handler | |
1441 | try: | |
1442 | return getattr(self, handler_func_name) | |
1443 | except AttributeError: | |
1444 | msg = _('invalid conflict_resolution value: %r') | |
1445 | raise ValueError(msg % self.conflict_handler) | |
1446 | ||
1447 | def _check_conflict(self, action): | |
1448 | ||
1449 | # find all options that conflict with this option | |
1450 | confl_optionals = [] | |
1451 | for option_string in action.option_strings: | |
1452 | if option_string in self._option_string_actions: | |
1453 | confl_optional = self._option_string_actions[option_string] | |
1454 | confl_optionals.append((option_string, confl_optional)) | |
1455 | ||
1456 | # resolve any conflicts | |
1457 | if confl_optionals: | |
1458 | conflict_handler = self._get_handler() | |
1459 | conflict_handler(action, confl_optionals) | |
1460 | ||
1461 | def _handle_conflict_error(self, action, conflicting_actions): | |
1462 | message = _('conflicting option string(s): %s') | |
1463 | conflict_string = ', '.join([option_string | |
1464 | for option_string, action | |
1465 | in conflicting_actions]) | |
1466 | raise ArgumentError(action, message % conflict_string) | |
1467 | ||
1468 | def _handle_conflict_resolve(self, action, conflicting_actions): | |
1469 | ||
1470 | # remove all conflicting options | |
1471 | for option_string, action in conflicting_actions: | |
1472 | ||
1473 | # remove the conflicting option | |
1474 | action.option_strings.remove(option_string) | |
1475 | self._option_string_actions.pop(option_string, None) | |
1476 | ||
1477 | # if the option now has no option string, remove it from the | |
1478 | # container holding it | |
1479 | if not action.option_strings: | |
1480 | action.container._remove_action(action) | |
1481 | ||
1482 | ||
1483 | class _ArgumentGroup(_ActionsContainer): | |
1484 | ||
1485 | def __init__(self, container, title=None, description=None, **kwargs): | |
1486 | # add any missing keyword arguments by checking the container | |
1487 | update = kwargs.setdefault | |
1488 | update('conflict_handler', container.conflict_handler) | |
1489 | update('prefix_chars', container.prefix_chars) | |
1490 | update('argument_default', container.argument_default) | |
1491 | super_init = super(_ArgumentGroup, self).__init__ | |
1492 | super_init(description=description, **kwargs) | |
1493 | ||
1494 | # group attributes | |
1495 | self.title = title | |
1496 | self._group_actions = [] | |
1497 | ||
1498 | # share most attributes with the container | |
1499 | self._registries = container._registries | |
1500 | self._actions = container._actions | |
1501 | self._option_string_actions = container._option_string_actions | |
1502 | self._defaults = container._defaults | |
1503 | self._has_negative_number_optionals = \ | |
1504 | container._has_negative_number_optionals | |
1505 | ||
1506 | def _add_action(self, action): | |
1507 | action = super(_ArgumentGroup, self)._add_action(action) | |
1508 | self._group_actions.append(action) | |
1509 | return action | |
1510 | ||
1511 | def _remove_action(self, action): | |
1512 | super(_ArgumentGroup, self)._remove_action(action) | |
1513 | self._group_actions.remove(action) | |
1514 | ||
1515 | ||
1516 | class _MutuallyExclusiveGroup(_ArgumentGroup): | |
1517 | ||
1518 | def __init__(self, container, required=False): | |
1519 | super(_MutuallyExclusiveGroup, self).__init__(container) | |
1520 | self.required = required | |
1521 | self._container = container | |
1522 | ||
1523 | def _add_action(self, action): | |
1524 | if action.required: | |
1525 | msg = _('mutually exclusive arguments must be optional') | |
1526 | raise ValueError(msg) | |
1527 | action = self._container._add_action(action) | |
1528 | self._group_actions.append(action) | |
1529 | return action | |
1530 | ||
1531 | def _remove_action(self, action): | |
1532 | self._container._remove_action(action) | |
1533 | self._group_actions.remove(action) | |
1534 | ||
1535 | ||
1536 | class ArgumentParser(_AttributeHolder, _ActionsContainer): | |
1537 | """Object for parsing command line strings into Python objects. | |
1538 | ||
1539 | Keyword Arguments: | |
1540 | - prog -- The name of the program (default: sys.argv[0]) | |
1541 | - usage -- A usage message (default: auto-generated from arguments) | |
1542 | - description -- A description of what the program does | |
1543 | - epilog -- Text following the argument descriptions | |
1544 | - parents -- Parsers whose arguments should be copied into this one | |
1545 | - formatter_class -- HelpFormatter class for printing help messages | |
1546 | - prefix_chars -- Characters that prefix optional arguments | |
1547 | - fromfile_prefix_chars -- Characters that prefix files containing | |
1548 | additional arguments | |
1549 | - argument_default -- The default value for all arguments | |
1550 | - conflict_handler -- String indicating how to handle conflicts | |
1551 | - add_help -- Add a -h/-help option | |
1552 | """ | |
1553 | ||
1554 | def __init__(self, | |
1555 | prog=None, | |
1556 | usage=None, | |
1557 | description=None, | |
1558 | epilog=None, | |
1559 | version=None, | |
1560 | parents=[], | |
1561 | formatter_class=HelpFormatter, | |
1562 | prefix_chars='-', | |
1563 | fromfile_prefix_chars=None, | |
1564 | argument_default=None, | |
1565 | conflict_handler='error', | |
1566 | add_help=True): | |
1567 | ||
1568 | if version is not None: | |
1569 | import warnings | |
1570 | warnings.warn( | |
1571 | """The "version" argument to ArgumentParser is deprecated. """ | |
1572 | """Please use """ | |
1573 | """"add_argument(..., action='version', version="N", ...)" """ | |
1574 | """instead""", DeprecationWarning) | |
1575 | ||
1576 | superinit = super(ArgumentParser, self).__init__ | |
1577 | superinit(description=description, | |
1578 | prefix_chars=prefix_chars, | |
1579 | argument_default=argument_default, | |
1580 | conflict_handler=conflict_handler) | |
1581 | ||
1582 | # default setting for prog | |
1583 | if prog is None: | |
1584 | prog = _os.path.basename(_sys.argv[0]) | |
1585 | ||
1586 | self.prog = prog | |
1587 | self.usage = usage | |
1588 | self.epilog = epilog | |
1589 | self.version = version | |
1590 | self.formatter_class = formatter_class | |
1591 | self.fromfile_prefix_chars = fromfile_prefix_chars | |
1592 | self.add_help = add_help | |
1593 | ||
1594 | add_group = self.add_argument_group | |
1595 | self._positionals = add_group(_('positional arguments')) | |
1596 | self._optionals = add_group(_('optional arguments')) | |
1597 | self._subparsers = None | |
1598 | ||
1599 | # register types | |
1600 | def identity(string): | |
1601 | return string | |
1602 | self.register('type', None, identity) | |
1603 | ||
1604 | # add help and version arguments if necessary | |
1605 | # (using explicit default to override global argument_default) | |
1606 | if '-' in prefix_chars: | |
1607 | default_prefix = '-' | |
1608 | else: | |
1609 | default_prefix = prefix_chars[0] | |
1610 | if self.add_help: | |
1611 | self.add_argument( | |
1612 | default_prefix+'h', default_prefix*2+'help', | |
1613 | action='help', default=SUPPRESS, | |
1614 | help=_('show this help message and exit')) | |
1615 | if self.version: | |
1616 | self.add_argument( | |
1617 | default_prefix+'v', default_prefix*2+'version', | |
1618 | action='version', default=SUPPRESS, | |
1619 | version=self.version, | |
1620 | help=_("show program's version number and exit")) | |
1621 | ||
1622 | # add parent arguments and defaults | |
1623 | for parent in parents: | |
1624 | self._add_container_actions(parent) | |
1625 | try: | |
1626 | defaults = parent._defaults | |
1627 | except AttributeError: | |
1628 | pass | |
1629 | else: | |
1630 | self._defaults.update(defaults) | |
1631 | ||
1632 | # ======================= | |
1633 | # Pretty __repr__ methods | |
1634 | # ======================= | |
1635 | def _get_kwargs(self): | |
1636 | names = [ | |
1637 | 'prog', | |
1638 | 'usage', | |
1639 | 'description', | |
1640 | 'version', | |
1641 | 'formatter_class', | |
1642 | 'conflict_handler', | |
1643 | 'add_help', | |
1644 | ] | |
1645 | return [(name, getattr(self, name)) for name in names] | |
1646 | ||
1647 | # ================================== | |
1648 | # Optional/Positional adding methods | |
1649 | # ================================== | |
1650 | def add_subparsers(self, **kwargs): | |
1651 | if self._subparsers is not None: | |
1652 | self.error(_('cannot have multiple subparser arguments')) | |
1653 | ||
1654 | # add the parser class to the arguments if it's not present | |
1655 | kwargs.setdefault('parser_class', type(self)) | |
1656 | ||
1657 | if 'title' in kwargs or 'description' in kwargs: | |
1658 | title = _(kwargs.pop('title', 'subcommands')) | |
1659 | description = _(kwargs.pop('description', None)) | |
1660 | self._subparsers = self.add_argument_group(title, description) | |
1661 | else: | |
1662 | self._subparsers = self._positionals | |
1663 | ||
1664 | # prog defaults to the usage message of this parser, skipping | |
1665 | # optional arguments and with no "usage:" prefix | |
1666 | if kwargs.get('prog') is None: | |
1667 | formatter = self._get_formatter() | |
1668 | positionals = self._get_positional_actions() | |
1669 | groups = self._mutually_exclusive_groups | |
1670 | formatter.add_usage(self.usage, positionals, groups, '') | |
1671 | kwargs['prog'] = formatter.format_help().strip() | |
1672 | ||
1673 | # create the parsers action and add it to the positionals list | |
1674 | parsers_class = self._pop_action_class(kwargs, 'parsers') | |
1675 | action = parsers_class(option_strings=[], **kwargs) | |
1676 | self._subparsers._add_action(action) | |
1677 | ||
1678 | # return the created parsers action | |
1679 | return action | |
1680 | ||
1681 | def _add_action(self, action): | |
1682 | if action.option_strings: | |
1683 | self._optionals._add_action(action) | |
1684 | else: | |
1685 | self._positionals._add_action(action) | |
1686 | return action | |
1687 | ||
1688 | def _get_optional_actions(self): | |
1689 | return [action | |
1690 | for action in self._actions | |
1691 | if action.option_strings] | |
1692 | ||
1693 | def _get_positional_actions(self): | |
1694 | return [action | |
1695 | for action in self._actions | |
1696 | if not action.option_strings] | |
1697 | ||
1698 | # ===================================== | |
1699 | # Command line argument parsing methods | |
1700 | # ===================================== | |
1701 | def parse_args(self, args=None, namespace=None): | |
1702 | args, argv = self.parse_known_args(args, namespace) | |
1703 | if argv: | |
1704 | msg = _('unrecognized arguments: %s') | |
1705 | self.error(msg % ' '.join(argv)) | |
1706 | return args | |
1707 | ||
1708 | def parse_known_args(self, args=None, namespace=None): | |
1709 | # args default to the system args | |
1710 | if args is None: | |
1711 | args = _sys.argv[1:] | |
1712 | ||
1713 | # default Namespace built from parser defaults | |
1714 | if namespace is None: | |
1715 | namespace = Namespace() | |
1716 | ||
1717 | # add any action defaults that aren't present | |
1718 | for action in self._actions: | |
1719 | if action.dest is not SUPPRESS: | |
1720 | if not hasattr(namespace, action.dest): | |
1721 | if action.default is not SUPPRESS: | |
1722 | default = action.default | |
1723 | if isinstance(action.default, basestring): | |
1724 | default = self._get_value(action, default) | |
1725 | setattr(namespace, action.dest, default) | |
1726 | ||
1727 | # add any parser defaults that aren't present | |
1728 | for dest in self._defaults: | |
1729 | if not hasattr(namespace, dest): | |
1730 | setattr(namespace, dest, self._defaults[dest]) | |
1731 | ||
1732 | # parse the arguments and exit if there are any errors | |
1733 | try: | |
1734 | namespace, args = self._parse_known_args(args, namespace) | |
1735 | if hasattr(namespace, _UNRECOGNIZED_ARGS_ATTR): | |
1736 | args.extend(getattr(namespace, _UNRECOGNIZED_ARGS_ATTR)) | |
1737 | delattr(namespace, _UNRECOGNIZED_ARGS_ATTR) | |
1738 | return namespace, args | |
1739 | except ArgumentError: | |
1740 | err = _sys.exc_info()[1] | |
1741 | self.error(str(err)) | |
1742 | ||
1743 | def _parse_known_args(self, arg_strings, namespace): | |
1744 | # replace arg strings that are file references | |
1745 | if self.fromfile_prefix_chars is not None: | |
1746 | arg_strings = self._read_args_from_files(arg_strings) | |
1747 | ||
1748 | # map all mutually exclusive arguments to the other arguments | |
1749 | # they can't occur with | |
1750 | action_conflicts = {} | |
1751 | for mutex_group in self._mutually_exclusive_groups: | |
1752 | group_actions = mutex_group._group_actions | |
1753 | for i, mutex_action in enumerate(mutex_group._group_actions): | |
1754 | conflicts = action_conflicts.setdefault(mutex_action, []) | |
1755 | conflicts.extend(group_actions[:i]) | |
1756 | conflicts.extend(group_actions[i + 1:]) | |
1757 | ||
1758 | # find all option indices, and determine the arg_string_pattern | |
1759 | # which has an 'O' if there is an option at an index, | |
1760 | # an 'A' if there is an argument, or a '-' if there is a '--' | |
1761 | option_string_indices = {} | |
1762 | arg_string_pattern_parts = [] | |
1763 | arg_strings_iter = iter(arg_strings) | |
1764 | for i, arg_string in enumerate(arg_strings_iter): | |
1765 | ||
1766 | # all args after -- are non-options | |
1767 | if arg_string == '--': | |
1768 | arg_string_pattern_parts.append('-') | |
1769 | for arg_string in arg_strings_iter: | |
1770 | arg_string_pattern_parts.append('A') | |
1771 | ||
1772 | # otherwise, add the arg to the arg strings | |
1773 | # and note the index if it was an option | |
1774 | else: | |
1775 | option_tuple = self._parse_optional(arg_string) | |
1776 | if option_tuple is None: | |
1777 | pattern = 'A' | |
1778 | else: | |
1779 | option_string_indices[i] = option_tuple | |
1780 | pattern = 'O' | |
1781 | arg_string_pattern_parts.append(pattern) | |
1782 | ||
1783 | # join the pieces together to form the pattern | |
1784 | arg_strings_pattern = ''.join(arg_string_pattern_parts) | |
1785 | ||
1786 | # converts arg strings to the appropriate and then takes the action | |
1787 | seen_actions = set() | |
1788 | seen_non_default_actions = set() | |
1789 | ||
1790 | def take_action(action, argument_strings, option_string=None): | |
1791 | seen_actions.add(action) | |
1792 | argument_values = self._get_values(action, argument_strings) | |
1793 | ||
1794 | # error if this argument is not allowed with other previously | |
1795 | # seen arguments, assuming that actions that use the default | |
1796 | # value don't really count as "present" | |
1797 | if argument_values is not action.default: | |
1798 | seen_non_default_actions.add(action) | |
1799 | for conflict_action in action_conflicts.get(action, []): | |
1800 | if conflict_action in seen_non_default_actions: | |
1801 | msg = _('not allowed with argument %s') | |
1802 | action_name = _get_action_name(conflict_action) | |
1803 | raise ArgumentError(action, msg % action_name) | |
1804 | ||
1805 | # take the action if we didn't receive a SUPPRESS value | |
1806 | # (e.g. from a default) | |
1807 | if argument_values is not SUPPRESS: | |
1808 | action(self, namespace, argument_values, option_string) | |
1809 | ||
1810 | # function to convert arg_strings into an optional action | |
1811 | def consume_optional(start_index): | |
1812 | ||
1813 | # get the optional identified at this index | |
1814 | option_tuple = option_string_indices[start_index] | |
1815 | action, option_string, explicit_arg = option_tuple | |
1816 | ||
1817 | # identify additional optionals in the same arg string | |
1818 | # (e.g. -xyz is the same as -x -y -z if no args are required) | |
1819 | match_argument = self._match_argument | |
1820 | action_tuples = [] | |
1821 | while True: | |
1822 | ||
1823 | # if we found no optional action, skip it | |
1824 | if action is None: | |
1825 | extras.append(arg_strings[start_index]) | |
1826 | return start_index + 1 | |
1827 | ||
1828 | # if there is an explicit argument, try to match the | |
1829 | # optional's string arguments to only this | |
1830 | if explicit_arg is not None: | |
1831 | arg_count = match_argument(action, 'A') | |
1832 | ||
1833 | # if the action is a single-dash option and takes no | |
1834 | # arguments, try to parse more single-dash options out | |
1835 | # of the tail of the option string | |
1836 | chars = self.prefix_chars | |
1837 | if arg_count == 0 and option_string[1] not in chars: | |
1838 | action_tuples.append((action, [], option_string)) | |
1839 | char = option_string[0] | |
1840 | option_string = char + explicit_arg[0] | |
1841 | new_explicit_arg = explicit_arg[1:] or None | |
1842 | optionals_map = self._option_string_actions | |
1843 | if option_string in optionals_map: | |
1844 | action = optionals_map[option_string] | |
1845 | explicit_arg = new_explicit_arg | |
1846 | else: | |
1847 | msg = _('ignored explicit argument %r') | |
1848 | raise ArgumentError(action, msg % explicit_arg) | |
1849 | ||
1850 | # if the action expect exactly one argument, we've | |
1851 | # successfully matched the option; exit the loop | |
1852 | elif arg_count == 1: | |
1853 | stop = start_index + 1 | |
1854 | args = [explicit_arg] | |
1855 | action_tuples.append((action, args, option_string)) | |
1856 | break | |
1857 | ||
1858 | # error if a double-dash option did not use the | |
1859 | # explicit argument | |
1860 | else: | |
1861 | msg = _('ignored explicit argument %r') | |
1862 | raise ArgumentError(action, msg % explicit_arg) | |
1863 | ||
1864 | # if there is no explicit argument, try to match the | |
1865 | # optional's string arguments with the following strings | |
1866 | # if successful, exit the loop | |
1867 | else: | |
1868 | start = start_index + 1 | |
1869 | selected_patterns = arg_strings_pattern[start:] | |
1870 | arg_count = match_argument(action, selected_patterns) | |
1871 | stop = start + arg_count | |
1872 | args = arg_strings[start:stop] | |
1873 | action_tuples.append((action, args, option_string)) | |
1874 | break | |
1875 | ||
1876 | # add the Optional to the list and return the index at which | |
1877 | # the Optional's string args stopped | |
1878 | assert action_tuples | |
1879 | for action, args, option_string in action_tuples: | |
1880 | take_action(action, args, option_string) | |
1881 | return stop | |
1882 | ||
1883 | # the list of Positionals left to be parsed; this is modified | |
1884 | # by consume_positionals() | |
1885 | positionals = self._get_positional_actions() | |
1886 | ||
1887 | # function to convert arg_strings into positional actions | |
1888 | def consume_positionals(start_index): | |
1889 | # match as many Positionals as possible | |
1890 | match_partial = self._match_arguments_partial | |
1891 | selected_pattern = arg_strings_pattern[start_index:] | |
1892 | arg_counts = match_partial(positionals, selected_pattern) | |
1893 | ||
1894 | # slice off the appropriate arg strings for each Positional | |
1895 | # and add the Positional and its args to the list | |
1896 | for action, arg_count in zip(positionals, arg_counts): | |
1897 | args = arg_strings[start_index: start_index + arg_count] | |
1898 | start_index += arg_count | |
1899 | take_action(action, args) | |
1900 | ||
1901 | # slice off the Positionals that we just parsed and return the | |
1902 | # index at which the Positionals' string args stopped | |
1903 | positionals[:] = positionals[len(arg_counts):] | |
1904 | return start_index | |
1905 | ||
1906 | # consume Positionals and Optionals alternately, until we have | |
1907 | # passed the last option string | |
1908 | extras = [] | |
1909 | start_index = 0 | |
1910 | if option_string_indices: | |
1911 | max_option_string_index = max(option_string_indices) | |
1912 | else: | |
1913 | max_option_string_index = -1 | |
1914 | while start_index <= max_option_string_index: | |
1915 | ||
1916 | # consume any Positionals preceding the next option | |
1917 | next_option_string_index = min([ | |
1918 | index | |
1919 | for index in option_string_indices | |
1920 | if index >= start_index]) | |
1921 | if start_index != next_option_string_index: | |
1922 | positionals_end_index = consume_positionals(start_index) | |
1923 | ||
1924 | # only try to parse the next optional if we didn't consume | |
1925 | # the option string during the positionals parsing | |
1926 | if positionals_end_index > start_index: | |
1927 | start_index = positionals_end_index | |
1928 | continue | |
1929 | else: | |
1930 | start_index = positionals_end_index | |
1931 | ||
1932 | # if we consumed all the positionals we could and we're not | |
1933 | # at the index of an option string, there were extra arguments | |
1934 | if start_index not in option_string_indices: | |
1935 | strings = arg_strings[start_index:next_option_string_index] | |
1936 | extras.extend(strings) | |
1937 | start_index = next_option_string_index | |
1938 | ||
1939 | # consume the next optional and any arguments for it | |
1940 | start_index = consume_optional(start_index) | |
1941 | ||
1942 | # consume any positionals following the last Optional | |
1943 | stop_index = consume_positionals(start_index) | |
1944 | ||
1945 | # if we didn't consume all the argument strings, there were extras | |
1946 | extras.extend(arg_strings[stop_index:]) | |
1947 | ||
1948 | # if we didn't use all the Positional objects, there were too few | |
1949 | # arg strings supplied. | |
1950 | if positionals: | |
1951 | self.error(_('too few arguments')) | |
1952 | ||
1953 | # make sure all required actions were present | |
1954 | for action in self._actions: | |
1955 | if action.required: | |
1956 | if action not in seen_actions: | |
1957 | name = _get_action_name(action) | |
1958 | self.error(_('argument %s is required') % name) | |
1959 | ||
1960 | # make sure all required groups had one option present | |
1961 | for group in self._mutually_exclusive_groups: | |
1962 | if group.required: | |
1963 | for action in group._group_actions: | |
1964 | if action in seen_non_default_actions: | |
1965 | break | |
1966 | ||
1967 | # if no actions were used, report the error | |
1968 | else: | |
1969 | names = [_get_action_name(action) | |
1970 | for action in group._group_actions | |
1971 | if action.help is not SUPPRESS] | |
1972 | msg = _('one of the arguments %s is required') | |
1973 | self.error(msg % ' '.join(names)) | |
1974 | ||
1975 | # return the updated namespace and the extra arguments | |
1976 | return namespace, extras | |
1977 | ||
1978 | def _read_args_from_files(self, arg_strings): | |
1979 | # expand arguments referencing files | |
1980 | new_arg_strings = [] | |
1981 | for arg_string in arg_strings: | |
1982 | ||
1983 | # for regular arguments, just add them back into the list | |
1984 | if arg_string[0] not in self.fromfile_prefix_chars: | |
1985 | new_arg_strings.append(arg_string) | |
1986 | ||
1987 | # replace arguments referencing files with the file content | |
1988 | else: | |
1989 | try: | |
1990 | args_file = open(arg_string[1:]) | |
1991 | try: | |
1992 | arg_strings = [] | |
1993 | for arg_line in args_file.read().splitlines(): | |
1994 | for arg in self.convert_arg_line_to_args(arg_line): | |
1995 | arg_strings.append(arg) | |
1996 | arg_strings = self._read_args_from_files(arg_strings) | |
1997 | new_arg_strings.extend(arg_strings) | |
1998 | finally: | |
1999 | args_file.close() | |
2000 | except IOError: | |
2001 | err = _sys.exc_info()[1] | |
2002 | self.error(str(err)) | |
2003 | ||
2004 | # return the modified argument list | |
2005 | return new_arg_strings | |
2006 | ||
2007 | def convert_arg_line_to_args(self, arg_line): | |
2008 | return [arg_line] | |
2009 | ||
2010 | def _match_argument(self, action, arg_strings_pattern): | |
2011 | # match the pattern for this action to the arg strings | |
2012 | nargs_pattern = self._get_nargs_pattern(action) | |
2013 | match = _re.match(nargs_pattern, arg_strings_pattern) | |
2014 | ||
2015 | # raise an exception if we weren't able to find a match | |
2016 | if match is None: | |
2017 | nargs_errors = { | |
2018 | None: _('expected one argument'), | |
2019 | OPTIONAL: _('expected at most one argument'), | |
2020 | ONE_OR_MORE: _('expected at least one argument'), | |
2021 | } | |
2022 | default = _('expected %s argument(s)') % action.nargs | |
2023 | msg = nargs_errors.get(action.nargs, default) | |
2024 | raise ArgumentError(action, msg) | |
2025 | ||
2026 | # return the number of arguments matched | |
2027 | return len(match.group(1)) | |
2028 | ||
2029 | def _match_arguments_partial(self, actions, arg_strings_pattern): | |
2030 | # progressively shorten the actions list by slicing off the | |
2031 | # final actions until we find a match | |
2032 | result = [] | |
2033 | for i in range(len(actions), 0, -1): | |
2034 | actions_slice = actions[:i] | |
2035 | pattern = ''.join([self._get_nargs_pattern(action) | |
2036 | for action in actions_slice]) | |
2037 | match = _re.match(pattern, arg_strings_pattern) | |
2038 | if match is not None: | |
2039 | result.extend([len(string) for string in match.groups()]) | |
2040 | break | |
2041 | ||
2042 | # return the list of arg string counts | |
2043 | return result | |
2044 | ||
2045 | def _parse_optional(self, arg_string): | |
2046 | # if it's an empty string, it was meant to be a positional | |
2047 | if not arg_string: | |
2048 | return None | |
2049 | ||
2050 | # if it doesn't start with a prefix, it was meant to be positional | |
2051 | if not arg_string[0] in self.prefix_chars: | |
2052 | return None | |
2053 | ||
2054 | # if the option string is present in the parser, return the action | |
2055 | if arg_string in self._option_string_actions: | |
2056 | action = self._option_string_actions[arg_string] | |
2057 | return action, arg_string, None | |
2058 | ||
2059 | # if it's just a single character, it was meant to be positional | |
2060 | if len(arg_string) == 1: | |
2061 | return None | |
2062 | ||
2063 | # if the option string before the "=" is present, return the action | |
2064 | if '=' in arg_string: | |
2065 | option_string, explicit_arg = arg_string.split('=', 1) | |
2066 | if option_string in self._option_string_actions: | |
2067 | action = self._option_string_actions[option_string] | |
2068 | return action, option_string, explicit_arg | |
2069 | ||
2070 | # search through all possible prefixes of the option string | |
2071 | # and all actions in the parser for possible interpretations | |
2072 | option_tuples = self._get_option_tuples(arg_string) | |
2073 | ||
2074 | # if multiple actions match, the option string was ambiguous | |
2075 | if len(option_tuples) > 1: | |
2076 | options = ', '.join([option_string | |
2077 | for action, option_string, explicit_arg in option_tuples]) | |
2078 | tup = arg_string, options | |
2079 | self.error(_('ambiguous option: %s could match %s') % tup) | |
2080 | ||
2081 | # if exactly one action matched, this segmentation is good, | |
2082 | # so return the parsed action | |
2083 | elif len(option_tuples) == 1: | |
2084 | option_tuple, = option_tuples | |
2085 | return option_tuple | |
2086 | ||
2087 | # if it was not found as an option, but it looks like a negative | |
2088 | # number, it was meant to be positional | |
2089 | # unless there are negative-number-like options | |
2090 | if self._negative_number_matcher.match(arg_string): | |
2091 | if not self._has_negative_number_optionals: | |
2092 | return None | |
2093 | ||
2094 | # if it contains a space, it was meant to be a positional | |
2095 | if ' ' in arg_string: | |
2096 | return None | |
2097 | ||
2098 | # it was meant to be an optional but there is no such option | |
2099 | # in this parser (though it might be a valid option in a subparser) | |
2100 | return None, arg_string, None | |
2101 | ||
2102 | def _get_option_tuples(self, option_string): | |
2103 | result = [] | |
2104 | ||
2105 | # option strings starting with two prefix characters are only | |
2106 | # split at the '=' | |
2107 | chars = self.prefix_chars | |
2108 | if option_string[0] in chars and option_string[1] in chars: | |
2109 | if '=' in option_string: | |
2110 | option_prefix, explicit_arg = option_string.split('=', 1) | |
2111 | else: | |
2112 | option_prefix = option_string | |
2113 | explicit_arg = None | |
2114 | for option_string in self._option_string_actions: | |
2115 | if option_string.startswith(option_prefix): | |
2116 | action = self._option_string_actions[option_string] | |
2117 | tup = action, option_string, explicit_arg | |
2118 | result.append(tup) | |
2119 | ||
2120 | # single character options can be concatenated with their arguments | |
2121 | # but multiple character options always have to have their argument | |
2122 | # separate | |
2123 | elif option_string[0] in chars and option_string[1] not in chars: | |
2124 | option_prefix = option_string | |
2125 | explicit_arg = None | |
2126 | short_option_prefix = option_string[:2] | |
2127 | short_explicit_arg = option_string[2:] | |
2128 | ||
2129 | for option_string in self._option_string_actions: | |
2130 | if option_string == short_option_prefix: | |
2131 | action = self._option_string_actions[option_string] | |
2132 | tup = action, option_string, short_explicit_arg | |
2133 | result.append(tup) | |
2134 | elif option_string.startswith(option_prefix): | |
2135 | action = self._option_string_actions[option_string] | |
2136 | tup = action, option_string, explicit_arg | |
2137 | result.append(tup) | |
2138 | ||
2139 | # shouldn't ever get here | |
2140 | else: | |
2141 | self.error(_('unexpected option string: %s') % option_string) | |
2142 | ||
2143 | # return the collected option tuples | |
2144 | return result | |
2145 | ||
2146 | def _get_nargs_pattern(self, action): | |
2147 | # in all examples below, we have to allow for '--' args | |
2148 | # which are represented as '-' in the pattern | |
2149 | nargs = action.nargs | |
2150 | ||
2151 | # the default (None) is assumed to be a single argument | |
2152 | if nargs is None: | |
2153 | nargs_pattern = '(-*A-*)' | |
2154 | ||
2155 | # allow zero or one arguments | |
2156 | elif nargs == OPTIONAL: | |
2157 | nargs_pattern = '(-*A?-*)' | |
2158 | ||
2159 | # allow zero or more arguments | |
2160 | elif nargs == ZERO_OR_MORE: | |
2161 | nargs_pattern = '(-*[A-]*)' | |
2162 | ||
2163 | # allow one or more arguments | |
2164 | elif nargs == ONE_OR_MORE: | |
2165 | nargs_pattern = '(-*A[A-]*)' | |
2166 | ||
2167 | # allow any number of options or arguments | |
2168 | elif nargs == REMAINDER: | |
2169 | nargs_pattern = '([-AO]*)' | |
2170 | ||
2171 | # allow one argument followed by any number of options or arguments | |
2172 | elif nargs == PARSER: | |
2173 | nargs_pattern = '(-*A[-AO]*)' | |
2174 | ||
2175 | # all others should be integers | |
2176 | else: | |
2177 | nargs_pattern = '(-*%s-*)' % '-*'.join('A' * nargs) | |
2178 | ||
2179 | # if this is an optional action, -- is not allowed | |
2180 | if action.option_strings: | |
2181 | nargs_pattern = nargs_pattern.replace('-*', '') | |
2182 | nargs_pattern = nargs_pattern.replace('-', '') | |
2183 | ||
2184 | # return the pattern | |
2185 | return nargs_pattern | |
2186 | ||
2187 | # ======================== | |
2188 | # Value conversion methods | |
2189 | # ======================== | |
2190 | def _get_values(self, action, arg_strings): | |
2191 | # for everything but PARSER args, strip out '--' | |
2192 | if action.nargs not in [PARSER, REMAINDER]: | |
2193 | arg_strings = [s for s in arg_strings if s != '--'] | |
2194 | ||
2195 | # optional argument produces a default when not present | |
2196 | if not arg_strings and action.nargs == OPTIONAL: | |
2197 | if action.option_strings: | |
2198 | value = action.const | |
2199 | else: | |
2200 | value = action.default | |
2201 | if isinstance(value, basestring): | |
2202 | value = self._get_value(action, value) | |
2203 | self._check_value(action, value) | |
2204 | ||
2205 | # when nargs='*' on a positional, if there were no command-line | |
2206 | # args, use the default if it is anything other than None | |
2207 | elif (not arg_strings and action.nargs == ZERO_OR_MORE and | |
2208 | not action.option_strings): | |
2209 | if action.default is not None: | |
2210 | value = action.default | |
2211 | else: | |
2212 | value = arg_strings | |
2213 | self._check_value(action, value) | |
2214 | ||
2215 | # single argument or optional argument produces a single value | |
2216 | elif len(arg_strings) == 1 and action.nargs in [None, OPTIONAL]: | |
2217 | arg_string, = arg_strings | |
2218 | value = self._get_value(action, arg_string) | |
2219 | self._check_value(action, value) | |
2220 | ||
2221 | # REMAINDER arguments convert all values, checking none | |
2222 | elif action.nargs == REMAINDER: | |
2223 | value = [self._get_value(action, v) for v in arg_strings] | |
2224 | ||
2225 | # PARSER arguments convert all values, but check only the first | |
2226 | elif action.nargs == PARSER: | |
2227 | value = [self._get_value(action, v) for v in arg_strings] | |
2228 | self._check_value(action, value[0]) | |
2229 | ||
2230 | # all other types of nargs produce a list | |
2231 | else: | |
2232 | value = [self._get_value(action, v) for v in arg_strings] | |
2233 | for v in value: | |
2234 | self._check_value(action, v) | |
2235 | ||
2236 | # return the converted value | |
2237 | return value | |
2238 | ||
2239 | def _get_value(self, action, arg_string): | |
2240 | type_func = self._registry_get('type', action.type, action.type) | |
2241 | if not _callable(type_func): | |
2242 | msg = _('%r is not callable') | |
2243 | raise ArgumentError(action, msg % type_func) | |
2244 | ||
2245 | # convert the value to the appropriate type | |
2246 | try: | |
2247 | result = type_func(arg_string) | |
2248 | ||
2249 | # ArgumentTypeErrors indicate errors | |
2250 | except ArgumentTypeError: | |
2251 | name = getattr(action.type, '__name__', repr(action.type)) | |
2252 | msg = str(_sys.exc_info()[1]) | |
2253 | raise ArgumentError(action, msg) | |
2254 | ||
2255 | # TypeErrors or ValueErrors also indicate errors | |
2256 | except (TypeError, ValueError): | |
2257 | name = getattr(action.type, '__name__', repr(action.type)) | |
2258 | msg = _('invalid %s value: %r') | |
2259 | raise ArgumentError(action, msg % (name, arg_string)) | |
2260 | ||
2261 | # return the converted value | |
2262 | return result | |
2263 | ||
2264 | def _check_value(self, action, value): | |
2265 | # converted value must be one of the choices (if specified) | |
2266 | if action.choices is not None and value not in action.choices: | |
2267 | tup = value, ', '.join(map(repr, action.choices)) | |
2268 | msg = _('invalid choice: %r (choose from %s)') % tup | |
2269 | raise ArgumentError(action, msg) | |
2270 | ||
2271 | # ======================= | |
2272 | # Help-formatting methods | |
2273 | # ======================= | |
2274 | def format_usage(self): | |
2275 | formatter = self._get_formatter() | |
2276 | formatter.add_usage(self.usage, self._actions, | |
2277 | self._mutually_exclusive_groups) | |
2278 | return formatter.format_help() | |
2279 | ||
2280 | def format_help(self): | |
2281 | formatter = self._get_formatter() | |
2282 | ||
2283 | # usage | |
2284 | formatter.add_usage(self.usage, self._actions, | |
2285 | self._mutually_exclusive_groups) | |
2286 | ||
2287 | # description | |
2288 | formatter.add_text(self.description) | |
2289 | ||
2290 | # positionals, optionals and user-defined groups | |
2291 | for action_group in self._action_groups: | |
2292 | formatter.start_section(action_group.title) | |
2293 | formatter.add_text(action_group.description) | |
2294 | formatter.add_arguments(action_group._group_actions) | |
2295 | formatter.end_section() | |
2296 | ||
2297 | # epilog | |
2298 | formatter.add_text(self.epilog) | |
2299 | ||
2300 | # determine help from format above | |
2301 | return formatter.format_help() | |
2302 | ||
2303 | def format_version(self): | |
2304 | import warnings | |
2305 | warnings.warn( | |
2306 | 'The format_version method is deprecated -- the "version" ' | |
2307 | 'argument to ArgumentParser is no longer supported.', | |
2308 | DeprecationWarning) | |
2309 | formatter = self._get_formatter() | |
2310 | formatter.add_text(self.version) | |
2311 | return formatter.format_help() | |
2312 | ||
2313 | def _get_formatter(self): | |
2314 | return self.formatter_class(prog=self.prog) | |
2315 | ||
2316 | # ===================== | |
2317 | # Help-printing methods | |
2318 | # ===================== | |
2319 | def print_usage(self, file=None): | |
2320 | if file is None: | |
2321 | file = _sys.stdout | |
2322 | self._print_message(self.format_usage(), file) | |
2323 | ||
2324 | def print_help(self, file=None): | |
2325 | if file is None: | |
2326 | file = _sys.stdout | |
2327 | self._print_message(self.format_help(), file) | |
2328 | ||
2329 | def print_version(self, file=None): | |
2330 | import warnings | |
2331 | warnings.warn( | |
2332 | 'The print_version method is deprecated -- the "version" ' | |
2333 | 'argument to ArgumentParser is no longer supported.', | |
2334 | DeprecationWarning) | |
2335 | self._print_message(self.format_version(), file) | |
2336 | ||
2337 | def _print_message(self, message, file=None): | |
2338 | if message: | |
2339 | if file is None: | |
2340 | file = _sys.stderr | |
2341 | file.write(message) | |
2342 | ||
2343 | # =============== | |
2344 | # Exiting methods | |
2345 | # =============== | |
2346 | def exit(self, status=0, message=None): | |
2347 | if message: | |
2348 | self._print_message(message, _sys.stderr) | |
2349 | _sys.exit(status) | |
2350 | ||
2351 | def error(self, message): | |
2352 | """error(message: string) | |
2353 | ||
2354 | Prints a usage message incorporating the message to stderr and | |
2355 | exits. | |
2356 | ||
2357 | If you override this in a subclass, it should not return -- it | |
2358 | should either exit or raise an exception. | |
2359 | """ | |
2360 | self.print_usage(_sys.stderr) | |
2361 | self.exit(2, _('%s: error: %s\n') % (self.prog, message)) |
4 | 4 | |
5 | 5 | # Please remember to run "make -C docs html" after update "desc" attributes. |
6 | 6 | |
7 | import argparse | |
7 | 8 | import copy |
8 | 9 | import grp |
9 | 10 | import inspect |
10 | try: | |
11 | import argparse | |
12 | except ImportError: # python 2.6 | |
13 | from . import argparse_compat as argparse | |
14 | 11 | import os |
15 | 12 | import pwd |
16 | 13 | import re |
14 | import shlex | |
17 | 15 | import ssl |
18 | 16 | import sys |
19 | 17 | import textwrap |
20 | import shlex | |
21 | ||
22 | from gunicorn import __version__ | |
23 | from gunicorn import _compat | |
18 | ||
19 | from gunicorn import __version__, util | |
24 | 20 | from gunicorn.errors import ConfigError |
25 | 21 | from gunicorn.reloader import reloader_engines |
26 | from gunicorn import six | |
27 | from gunicorn import util | |
28 | 22 | |
29 | 23 | KNOWN_SETTINGS = [] |
30 | 24 | PLATFORM = sys.platform |
64 | 58 | def __setattr__(self, name, value): |
65 | 59 | if name != "settings" and name in self.settings: |
66 | 60 | raise AttributeError("Invalid access!") |
67 | super(Config, self).__setattr__(name, value) | |
61 | super().__setattr__(name, value) | |
68 | 62 | |
69 | 63 | def set(self, name, value): |
70 | 64 | if name not in self.settings: |
121 | 115 | @property |
122 | 116 | def address(self): |
123 | 117 | s = self.settings['bind'].get() |
124 | return [util.parse_address(_compat.bytes_to_str(bind)) for bind in s] | |
118 | return [util.parse_address(util.bytes_to_str(bind)) for bind in s] | |
125 | 119 | |
126 | 120 | @property |
127 | 121 | def uid(self): |
182 | 176 | return env |
183 | 177 | |
184 | 178 | for e in raw_env: |
185 | s = _compat.bytes_to_str(e) | |
179 | s = util.bytes_to_str(e) | |
186 | 180 | try: |
187 | 181 | k, v = s.split('=', 1) |
188 | 182 | except ValueError: |
215 | 209 | |
216 | 210 | global_conf = {} |
217 | 211 | for e in raw_global_conf: |
218 | s = _compat.bytes_to_str(e) | |
212 | s = util.bytes_to_str(e) | |
219 | 213 | try: |
220 | 214 | k, v = re.split(r'(?<!\\)=', s, 1) |
221 | 215 | except ValueError: |
229 | 223 | |
230 | 224 | class SettingMeta(type): |
231 | 225 | def __new__(cls, name, bases, attrs): |
232 | super_new = super(SettingMeta, cls).__new__ | |
226 | super_new = super().__new__ | |
233 | 227 | parents = [b for b in bases if isinstance(b, SettingMeta)] |
234 | 228 | if not parents: |
235 | 229 | return super_new(cls, name, bases, attrs) |
304 | 298 | return self.value |
305 | 299 | |
306 | 300 | def set(self, val): |
307 | if not six.callable(self.validator): | |
301 | if not callable(self.validator): | |
308 | 302 | raise TypeError('Invalid validator: %s' % self.name) |
309 | 303 | self.value = self.validator(val) |
310 | 304 | |
322 | 316 | |
323 | 317 | if isinstance(val, bool): |
324 | 318 | return val |
325 | if not isinstance(val, six.string_types): | |
319 | if not isinstance(val, str): | |
326 | 320 | raise TypeError("Invalid type for casting: %s" % val) |
327 | 321 | if val.lower().strip() == "true": |
328 | 322 | return True |
339 | 333 | |
340 | 334 | |
341 | 335 | def validate_pos_int(val): |
342 | if not isinstance(val, six.integer_types): | |
336 | if not isinstance(val, int): | |
343 | 337 | val = int(val, 0) |
344 | 338 | else: |
345 | 339 | # Booleans are ints! |
349 | 343 | return val |
350 | 344 | |
351 | 345 | |
346 | def validate_ssl_version(val): | |
347 | ssl_versions = {} | |
348 | for protocol in [p for p in dir(ssl) if p.startswith("PROTOCOL_")]: | |
349 | ssl_versions[protocol[9:]] = getattr(ssl, protocol) | |
350 | if val in ssl_versions: | |
351 | # string matching PROTOCOL_... | |
352 | return ssl_versions[val] | |
353 | ||
354 | try: | |
355 | intval = validate_pos_int(val) | |
356 | if intval in ssl_versions.values(): | |
357 | # positive int matching a protocol int constant | |
358 | return intval | |
359 | except (ValueError, TypeError): | |
360 | # negative integer or not an integer | |
361 | # drop this in favour of the more descriptive ValueError below | |
362 | pass | |
363 | ||
364 | raise ValueError("Invalid ssl_version: %s. Valid options: %s" | |
365 | % (val, ', '.join(ssl_versions))) | |
366 | ||
367 | ||
352 | 368 | def validate_string(val): |
353 | 369 | if val is None: |
354 | 370 | return None |
355 | if not isinstance(val, six.string_types): | |
371 | if not isinstance(val, str): | |
356 | 372 | raise TypeError("Not a string: %s" % val) |
357 | 373 | return val.strip() |
358 | 374 | |
370 | 386 | return [] |
371 | 387 | |
372 | 388 | # legacy syntax |
373 | if isinstance(val, six.string_types): | |
389 | if isinstance(val, str): | |
374 | 390 | val = [val] |
375 | 391 | |
376 | 392 | return [validate_string(v) for v in val] |
399 | 415 | |
400 | 416 | def validate_callable(arity): |
401 | 417 | def _validate_callable(val): |
402 | if isinstance(val, six.string_types): | |
418 | if isinstance(val, str): | |
403 | 419 | try: |
404 | 420 | mod_name, obj_name = val.rsplit(".", 1) |
405 | 421 | except ValueError: |
413 | 429 | except AttributeError: |
414 | 430 | raise TypeError("Can not load '%s' from '%s'" |
415 | 431 | "" % (obj_name, mod_name)) |
416 | if not six.callable(val): | |
417 | raise TypeError("Value is not six.callable: %s" % val) | |
418 | if arity != -1 and arity != _compat.get_arity(val): | |
432 | if not callable(val): | |
433 | raise TypeError("Value is not callable: %s" % val) | |
434 | if arity != -1 and arity != util.get_arity(val): | |
419 | 435 | raise TypeError("Value must have an arity of: %s" % arity) |
420 | 436 | return val |
421 | 437 | return _validate_callable |
453 | 469 | def validate_post_request(val): |
454 | 470 | val = validate_callable(-1)(val) |
455 | 471 | |
456 | largs = _compat.get_arity(val) | |
472 | largs = util.get_arity(val) | |
457 | 473 | if largs == 4: |
458 | 474 | return val |
459 | 475 | elif largs == 3: |
540 | 556 | desc = """\ |
541 | 557 | The socket to bind. |
542 | 558 | |
543 | A string of the form: ``HOST``, ``HOST:PORT``, ``unix:PATH``. An IP is | |
544 | a valid ``HOST``. | |
559 | A string of the form: ``HOST``, ``HOST:PORT``, ``unix:PATH``, | |
560 | ``fd://FD``. An IP is a valid ``HOST``. | |
561 | ||
562 | .. versionchanged:: 20.0 | |
563 | Support for ``fd://FD`` got added. | |
545 | 564 | |
546 | 565 | Multiple addresses can be bound. ex.:: |
547 | 566 | |
605 | 624 | The default class (``sync``) should handle most "normal" types of |
606 | 625 | workloads. You'll want to read :doc:`design` for information on when |
607 | 626 | you might want to choose one of the other worker classes. Required |
608 | libraries may be installed using setuptools' ``extra_require`` feature. | |
627 | libraries may be installed using setuptools' ``extras_require`` feature. | |
609 | 628 | |
610 | 629 | A string referring to one of the following bundled classes: |
611 | 630 | |
612 | 631 | * ``sync`` |
613 | * ``eventlet`` - Requires eventlet >= 0.9.7 (or install it via | |
632 | * ``eventlet`` - Requires eventlet >= 0.24.1 (or install it via | |
614 | 633 | ``pip install gunicorn[eventlet]``) |
615 | * ``gevent`` - Requires gevent >= 0.13 (or install it via | |
634 | * ``gevent`` - Requires gevent >= 1.4 (or install it via | |
616 | 635 | ``pip install gunicorn[gevent]``) |
617 | * ``tornado`` - Requires tornado >= 0.2 (or install it via | |
636 | * ``tornado`` - Requires tornado >= 0.2 (or install it via | |
618 | 637 | ``pip install gunicorn[tornado]``) |
619 | 638 | * ``gthread`` - Python 2 requires the futures package to be installed |
620 | 639 | (or install it via ``pip install gunicorn[gthread]``) |
621 | * ``gaiohttp`` - Deprecated. | |
622 | 640 | |
623 | 641 | Optionally, you can provide your own worker by giving Gunicorn a |
624 | 642 | Python path to a subclass of ``gunicorn.workers.base.Worker``. |
625 | 643 | This alternative syntax will load the gevent class: |
626 | 644 | ``gunicorn.workers.ggevent.GeventWorker``. |
627 | ||
628 | .. deprecated:: 19.8 | |
629 | The ``gaiohttp`` worker is deprecated. Please use | |
630 | ``aiohttp.worker.GunicornWebWorker`` instead. See | |
631 | :ref:`asyncio-workers` for more information on how to use it. | |
632 | 645 | """ |
633 | 646 | |
634 | 647 | class WorkerThreads(Setting): |
651 | 664 | If it is not defined, the default is ``1``. |
652 | 665 | |
653 | 666 | This setting only affects the Gthread worker type. |
654 | ||
667 | ||
655 | 668 | .. note:: |
656 | 669 | If you try to use the ``sync`` worker type and set the ``threads`` |
657 | 670 | setting to more than 1, the ``gthread`` worker type will be used |
685 | 698 | desc = """\ |
686 | 699 | The maximum number of requests a worker will process before restarting. |
687 | 700 | |
688 | Any value greater than zero will limit the number of requests a work | |
701 | Any value greater than zero will limit the number of requests a worker | |
689 | 702 | will process before automatically restarting. This is a simple method |
690 | 703 | to help limit the damage of memory leaks. |
691 | 704 | |
1246 | 1259 | D request time in microseconds |
1247 | 1260 | L request time in decimal seconds |
1248 | 1261 | p process ID |
1249 | {Header}i request header | |
1250 | {Header}o response header | |
1251 | {Variable}e environment variable | |
1262 | {header}i request header | |
1263 | {header}o response header | |
1264 | {variable}e environment variable | |
1252 | 1265 | =========== =========== |
1266 | ||
1267 | Use lowercase for header and environment variable names, and put | |
1268 | ``{...}x`` names inside ``%(...)s``. For example:: | |
1269 | ||
1270 | %({x-forwarded-for}i)s | |
1253 | 1271 | """ |
1254 | 1272 | |
1255 | 1273 | |
1459 | 1477 | .. versionadded:: 19.1 |
1460 | 1478 | """ |
1461 | 1479 | |
1480 | # Datadog Statsd (dogstatsd) tags. https://docs.datadoghq.com/developers/dogstatsd/ | |
1481 | class DogstatsdTags(Setting): | |
1482 | name = "dogstatsd_tags" | |
1483 | section = "Logging" | |
1484 | cli = ["--dogstatsd-tags"] | |
1485 | meta = "DOGSTATSD_TAGS" | |
1486 | default = "" | |
1487 | validator = validate_string | |
1488 | desc = """\ | |
1489 | A comma-delimited list of datadog statsd (dogstatsd) tags to append to statsd metrics. | |
1490 | ||
1491 | .. versionadded:: 20 | |
1492 | """ | |
1493 | ||
1462 | 1494 | class StatsdPrefix(Setting): |
1463 | 1495 | name = "statsd_prefix" |
1464 | 1496 | section = "Logging" |
1539 | 1571 | name = "on_starting" |
1540 | 1572 | section = "Server Hooks" |
1541 | 1573 | validator = validate_callable(1) |
1542 | type = six.callable | |
1574 | type = callable | |
1543 | 1575 | |
1544 | 1576 | def on_starting(server): |
1545 | 1577 | pass |
1555 | 1587 | name = "on_reload" |
1556 | 1588 | section = "Server Hooks" |
1557 | 1589 | validator = validate_callable(1) |
1558 | type = six.callable | |
1590 | type = callable | |
1559 | 1591 | |
1560 | 1592 | def on_reload(server): |
1561 | 1593 | pass |
1571 | 1603 | name = "when_ready" |
1572 | 1604 | section = "Server Hooks" |
1573 | 1605 | validator = validate_callable(1) |
1574 | type = six.callable | |
1606 | type = callable | |
1575 | 1607 | |
1576 | 1608 | def when_ready(server): |
1577 | 1609 | pass |
1587 | 1619 | name = "pre_fork" |
1588 | 1620 | section = "Server Hooks" |
1589 | 1621 | validator = validate_callable(2) |
1590 | type = six.callable | |
1622 | type = callable | |
1591 | 1623 | |
1592 | 1624 | def pre_fork(server, worker): |
1593 | 1625 | pass |
1604 | 1636 | name = "post_fork" |
1605 | 1637 | section = "Server Hooks" |
1606 | 1638 | validator = validate_callable(2) |
1607 | type = six.callable | |
1639 | type = callable | |
1608 | 1640 | |
1609 | 1641 | def post_fork(server, worker): |
1610 | 1642 | pass |
1621 | 1653 | name = "post_worker_init" |
1622 | 1654 | section = "Server Hooks" |
1623 | 1655 | validator = validate_callable(1) |
1624 | type = six.callable | |
1656 | type = callable | |
1625 | 1657 | |
1626 | 1658 | def post_worker_init(worker): |
1627 | 1659 | pass |
1638 | 1670 | name = "worker_int" |
1639 | 1671 | section = "Server Hooks" |
1640 | 1672 | validator = validate_callable(1) |
1641 | type = six.callable | |
1673 | type = callable | |
1642 | 1674 | |
1643 | 1675 | def worker_int(worker): |
1644 | 1676 | pass |
1656 | 1688 | name = "worker_abort" |
1657 | 1689 | section = "Server Hooks" |
1658 | 1690 | validator = validate_callable(1) |
1659 | type = six.callable | |
1691 | type = callable | |
1660 | 1692 | |
1661 | 1693 | def worker_abort(worker): |
1662 | 1694 | pass |
1676 | 1708 | name = "pre_exec" |
1677 | 1709 | section = "Server Hooks" |
1678 | 1710 | validator = validate_callable(1) |
1679 | type = six.callable | |
1711 | type = callable | |
1680 | 1712 | |
1681 | 1713 | def pre_exec(server): |
1682 | 1714 | pass |
1692 | 1724 | name = "pre_request" |
1693 | 1725 | section = "Server Hooks" |
1694 | 1726 | validator = validate_callable(2) |
1695 | type = six.callable | |
1727 | type = callable | |
1696 | 1728 | |
1697 | 1729 | def pre_request(worker, req): |
1698 | 1730 | worker.log.debug("%s %s" % (req.method, req.path)) |
1709 | 1741 | name = "post_request" |
1710 | 1742 | section = "Server Hooks" |
1711 | 1743 | validator = validate_post_request |
1712 | type = six.callable | |
1744 | type = callable | |
1713 | 1745 | |
1714 | 1746 | def post_request(worker, req, environ, resp): |
1715 | 1747 | pass |
1726 | 1758 | name = "child_exit" |
1727 | 1759 | section = "Server Hooks" |
1728 | 1760 | validator = validate_callable(2) |
1729 | type = six.callable | |
1761 | type = callable | |
1730 | 1762 | |
1731 | 1763 | def child_exit(server, worker): |
1732 | 1764 | pass |
1745 | 1777 | name = "worker_exit" |
1746 | 1778 | section = "Server Hooks" |
1747 | 1779 | validator = validate_callable(2) |
1748 | type = six.callable | |
1780 | type = callable | |
1749 | 1781 | |
1750 | 1782 | def worker_exit(server, worker): |
1751 | 1783 | pass |
1762 | 1794 | name = "nworkers_changed" |
1763 | 1795 | section = "Server Hooks" |
1764 | 1796 | validator = validate_callable(3) |
1765 | type = six.callable | |
1797 | type = callable | |
1766 | 1798 | |
1767 | 1799 | def nworkers_changed(server, new_value, old_value): |
1768 | 1800 | pass |
1861 | 1893 | name = "ssl_version" |
1862 | 1894 | section = "SSL" |
1863 | 1895 | cli = ["--ssl-version"] |
1864 | validator = validate_pos_int | |
1896 | validator = validate_ssl_version | |
1865 | 1897 | default = ssl.PROTOCOL_SSLv23 |
1866 | 1898 | desc = """\ |
1867 | SSL version to use (see stdlib ssl module's) | |
1899 | SSL version to use. | |
1900 | ||
1901 | ============= ============ | |
1902 | --ssl-version Description | |
1903 | ============= ============ | |
1904 | SSLv3 SSLv3 is not-secure and is strongly discouraged. | |
1905 | SSLv23 Alias for TLS. Deprecated in Python 3.6, use TLS. | |
1906 | TLS Negotiate highest possible version between client/server. | |
1907 | Can yield SSL. (Python 3.6+) | |
1908 | TLSv1 TLS 1.0 | |
1909 | TLSv1_1 TLS 1.1 (Python 3.4+) | |
1910 | TLSv1_2 TLS 1.2 (Python 3.4+) | |
1911 | TLS_SERVER Auto-negotiate the highest protocol version like TLS, | |
1912 | but only support server-side SSLSocket connections. | |
1913 | (Python 3.6+) | |
1914 | ============= ============ | |
1868 | 1915 | |
1869 | 1916 | .. versionchanged:: 19.7 |
1870 | 1917 | The default value has been changed from ``ssl.PROTOCOL_TLSv1`` to |
1871 | 1918 | ``ssl.PROTOCOL_SSLv23``. |
1919 | .. versionchanged:: 20.0 | |
1920 | This setting now accepts string names based on ``ssl.PROTOCOL_`` | |
1921 | constants. | |
1872 | 1922 | """ |
1873 | 1923 | |
1874 | 1924 | class CertReqs(Setting): |
1915 | 1965 | """ |
1916 | 1966 | |
1917 | 1967 | |
1918 | if sys.version_info >= (2, 7): | |
1919 | class Ciphers(Setting): | |
1920 | name = "ciphers" | |
1921 | section = "SSL" | |
1922 | cli = ["--ciphers"] | |
1923 | validator = validate_string | |
1924 | default = 'TLSv1' | |
1925 | desc = """\ | |
1926 | Ciphers to use (see stdlib ssl module's) | |
1927 | """ | |
1968 | class Ciphers(Setting): | |
1969 | name = "ciphers" | |
1970 | section = "SSL" | |
1971 | cli = ["--ciphers"] | |
1972 | validator = validate_string | |
1973 | default = None | |
1974 | desc = """\ | |
1975 | SSL Cipher suite to use, in the format of an OpenSSL cipher list. | |
1976 | ||
1977 | By default we use the default cipher list from Python's ``ssl`` module, | |
1978 | which contains ciphers considered strong at the time of each Python | |
1979 | release. | |
1980 | ||
1981 | As a recommended alternative, the Open Web App Security Project (OWASP) | |
1982 | offers `a vetted set of strong cipher strings rated A+ to C- | |
1983 | <https://www.owasp.org/index.php/TLS_Cipher_String_Cheat_Sheet>`_. | |
1984 | OWASP provides details on user-agent compatibility at each security level. | |
1985 | ||
1986 | See the `OpenSSL Cipher List Format Documentation | |
1987 | <https://www.openssl.org/docs/manmaster/man1/ciphers.html#CIPHER-LIST-FORMAT>`_ | |
1988 | for details on the format of an OpenSSL cipher list. | |
1989 | """ | |
1928 | 1990 | |
1929 | 1991 | |
1930 | 1992 | class PasteGlobalConf(Setting): |
7 | 7 | import time |
8 | 8 | import logging |
9 | 9 | logging.Logger.manager.emittedNoHandlerWarning = 1 |
10 | from logging.config import dictConfig | |
10 | 11 | from logging.config import fileConfig |
11 | try: | |
12 | from logging.config import dictConfig | |
13 | except ImportError: | |
14 | # python 2.6 | |
15 | dictConfig = None | |
16 | 12 | import os |
17 | 13 | import socket |
18 | 14 | import sys |
20 | 16 | import traceback |
21 | 17 | |
22 | 18 | from gunicorn import util |
23 | from gunicorn.six import PY3, string_types | |
24 | 19 | |
25 | 20 | |
26 | 21 | # syslog facility codes |
53 | 48 | version=1, |
54 | 49 | disable_existing_loggers=False, |
55 | 50 | |
51 | root={"level": "INFO", "handlers": ["console"]}, | |
56 | 52 | loggers={ |
57 | "root": {"level": "INFO", "handlers": ["console"]}, | |
58 | 53 | "gunicorn.error": { |
59 | 54 | "level": "INFO", |
60 | 55 | "handlers": ["error_console"], |
103 | 98 | def __init__(self, atoms): |
104 | 99 | dict.__init__(self) |
105 | 100 | for key, value in atoms.items(): |
106 | if isinstance(value, string_types): | |
101 | if isinstance(value, str): | |
107 | 102 | self[key] = value.replace('"', '\\"') |
108 | 103 | else: |
109 | 104 | self[key] = value |
112 | 107 | if k.startswith("{"): |
113 | 108 | kl = k.lower() |
114 | 109 | if kl in self: |
115 | return super(SafeAtoms, self).__getitem__(kl) | |
110 | return super().__getitem__(kl) | |
116 | 111 | else: |
117 | 112 | return "-" |
118 | 113 | if k in self: |
119 | return super(SafeAtoms, self).__getitem__(k) | |
114 | return super().__getitem__(k) | |
120 | 115 | else: |
121 | 116 | return '-' |
122 | 117 | |
230 | 225 | self.access_log, cfg, self.syslog_fmt, "access" |
231 | 226 | ) |
232 | 227 | |
233 | if dictConfig is None and cfg.logconfig_dict: | |
234 | util.warn("Dictionary-based log configuration requires " | |
235 | "Python 2.7 or above.") | |
236 | ||
237 | if dictConfig and cfg.logconfig_dict: | |
228 | if cfg.logconfig_dict: | |
238 | 229 | config = CONFIG_DEFAULTS.copy() |
239 | 230 | config.update(cfg.logconfig_dict) |
240 | 231 | try: |
276 | 267 | self.error_log.exception(msg, *args, **kwargs) |
277 | 268 | |
278 | 269 | def log(self, lvl, msg, *args, **kwargs): |
279 | if isinstance(lvl, string_types): | |
270 | if isinstance(lvl, str): | |
280 | 271 | lvl = self.LOG_LEVELS.get(lvl.lower(), logging.INFO) |
281 | 272 | self.error_log.log(lvl, msg, *args, **kwargs) |
282 | 273 | |
317 | 308 | if hasattr(req_headers, "items"): |
318 | 309 | req_headers = req_headers.items() |
319 | 310 | |
320 | atoms.update(dict([("{%s}i" % k.lower(), v) for k, v in req_headers])) | |
311 | atoms.update({"{%s}i" % k.lower(): v for k, v in req_headers}) | |
321 | 312 | |
322 | 313 | resp_headers = resp.headers |
323 | 314 | if hasattr(resp_headers, "items"): |
324 | 315 | resp_headers = resp_headers.items() |
325 | 316 | |
326 | 317 | # add response headers |
327 | atoms.update(dict([("{%s}o" % k.lower(), v) for k, v in resp_headers])) | |
318 | atoms.update({"{%s}o" % k.lower(): v for k, v in resp_headers}) | |
328 | 319 | |
329 | 320 | # add environ variables |
330 | 321 | environ_variables = environ.items() |
331 | atoms.update(dict([("{%s}e" % k.lower(), v) for k, v in environ_variables])) | |
322 | atoms.update({"{%s}e" % k.lower(): v for k, v in environ_variables}) | |
332 | 323 | |
333 | 324 | return atoms |
334 | 325 | |
443 | 434 | socktype, addr = parse_syslog_address(cfg.syslog_addr) |
444 | 435 | |
445 | 436 | # finally setup the syslog handler |
446 | if sys.version_info >= (2, 7): | |
447 | h = logging.handlers.SysLogHandler(address=addr, | |
448 | facility=facility, socktype=socktype) | |
449 | else: | |
450 | # socktype is only supported in 2.7 and sup | |
451 | # fix issue #541 | |
452 | h = logging.handlers.SysLogHandler(address=addr, | |
453 | facility=facility) | |
437 | h = logging.handlers.SysLogHandler(address=addr, | |
438 | facility=facility, socktype=socktype) | |
454 | 439 | |
455 | 440 | h.setFormatter(fmt) |
456 | 441 | h._gunicorn = True |
459 | 444 | def _get_user(self, environ): |
460 | 445 | user = None |
461 | 446 | http_auth = environ.get("HTTP_AUTHORIZATION") |
462 | if http_auth and http_auth.startswith('Basic'): | |
447 | if http_auth and http_auth.lower().startswith('basic'): | |
463 | 448 | auth = http_auth.split(" ", 1) |
464 | 449 | if len(auth) == 2: |
465 | 450 | try: |
466 | 451 | # b64decode doesn't accept unicode in Python < 3.3 |
467 | 452 | # so we need to convert it to a byte string |
468 | 453 | auth = base64.b64decode(auth[1].strip().encode('utf-8')) |
469 | if PY3: # b64decode returns a byte string in Python 3 | |
470 | auth = auth.decode('utf-8') | |
454 | # b64decode returns a byte string | |
455 | auth = auth.decode('utf-8') | |
471 | 456 | auth = auth.split(":", 1) |
472 | 457 | except (TypeError, binascii.Error, UnicodeDecodeError) as exc: |
473 | 458 | self.debug("Couldn't get username: %s", exc) |
0 | # -*- coding: utf-8 - | |
1 | # | |
2 | # This file is part of gunicorn released under the MIT license. | |
3 | # See the NOTICE for more information. | |
4 | ||
5 | import errno | |
6 | import os | |
7 | import sys | |
8 | ||
9 | try: | |
10 | import ctypes | |
11 | import ctypes.util | |
12 | except MemoryError: | |
13 | # selinux execmem denial | |
14 | # https://bugzilla.redhat.com/show_bug.cgi?id=488396 | |
15 | raise ImportError | |
16 | ||
17 | SUPPORTED_PLATFORMS = ( | |
18 | 'darwin', | |
19 | 'freebsd', | |
20 | 'dragonfly', | |
21 | 'linux2') | |
22 | ||
23 | if sys.platform not in SUPPORTED_PLATFORMS: | |
24 | raise ImportError("sendfile isn't supported on this platform") | |
25 | ||
26 | _libc = ctypes.CDLL(ctypes.util.find_library("c"), use_errno=True) | |
27 | _sendfile = _libc.sendfile | |
28 | ||
29 | ||
30 | def sendfile(fdout, fdin, offset, nbytes): | |
31 | if sys.platform == 'darwin': | |
32 | _sendfile.argtypes = [ctypes.c_int, ctypes.c_int, ctypes.c_uint64, | |
33 | ctypes.POINTER(ctypes.c_uint64), ctypes.c_voidp, | |
34 | ctypes.c_int] | |
35 | _nbytes = ctypes.c_uint64(nbytes) | |
36 | result = _sendfile(fdin, fdout, offset, _nbytes, None, 0) | |
37 | ||
38 | if result == -1: | |
39 | e = ctypes.get_errno() | |
40 | if e == errno.EAGAIN and _nbytes.value is not None: | |
41 | return _nbytes.value | |
42 | raise OSError(e, os.strerror(e)) | |
43 | return _nbytes.value | |
44 | elif sys.platform in ('freebsd', 'dragonfly',): | |
45 | _sendfile.argtypes = [ctypes.c_int, ctypes.c_int, ctypes.c_uint64, | |
46 | ctypes.c_uint64, ctypes.c_voidp, | |
47 | ctypes.POINTER(ctypes.c_uint64), ctypes.c_int] | |
48 | _sbytes = ctypes.c_uint64() | |
49 | result = _sendfile(fdin, fdout, offset, nbytes, None, _sbytes, 0) | |
50 | if result == -1: | |
51 | e = ctypes.get_errno() | |
52 | if e == errno.EAGAIN and _sbytes.value is not None: | |
53 | return _sbytes.value | |
54 | raise OSError(e, os.strerror(e)) | |
55 | return _sbytes.value | |
56 | ||
57 | else: | |
58 | _sendfile.argtypes = [ctypes.c_int, ctypes.c_int, | |
59 | ctypes.POINTER(ctypes.c_uint64), ctypes.c_size_t] | |
60 | ||
61 | _offset = ctypes.c_uint64(offset) | |
62 | sent = _sendfile(fdout, fdin, _offset, nbytes) | |
63 | if sent == -1: | |
64 | e = ctypes.get_errno() | |
65 | raise OSError(e, os.strerror(e)) | |
66 | return sent |
2 | 2 | # This file is part of gunicorn released under the MIT license. |
3 | 3 | # See the NOTICE for more information. |
4 | 4 | |
5 | import io | |
6 | import sys | |
7 | ||
5 | 8 | from gunicorn.http.errors import (NoMoreData, ChunkMissingTerminator, |
6 | InvalidChunkSize) | |
7 | from gunicorn import six | |
9 | InvalidChunkSize) | |
8 | 10 | |
9 | 11 | |
10 | 12 | class ChunkedReader(object): |
11 | 13 | def __init__(self, req, unreader): |
12 | 14 | self.req = req |
13 | 15 | self.parser = self.parse_chunked(unreader) |
14 | self.buf = six.BytesIO() | |
16 | self.buf = io.BytesIO() | |
15 | 17 | |
16 | 18 | def read(self, size): |
17 | if not isinstance(size, six.integer_types): | |
19 | if not isinstance(size, int): | |
18 | 20 | raise TypeError("size must be an integral type") |
19 | 21 | if size < 0: |
20 | 22 | raise ValueError("Size must be positive.") |
24 | 26 | if self.parser: |
25 | 27 | while self.buf.tell() < size: |
26 | 28 | try: |
27 | self.buf.write(six.next(self.parser)) | |
29 | self.buf.write(next(self.parser)) | |
28 | 30 | except StopIteration: |
29 | 31 | self.parser = None |
30 | 32 | break |
31 | 33 | |
32 | 34 | data = self.buf.getvalue() |
33 | 35 | ret, rest = data[:size], data[size:] |
34 | self.buf = six.BytesIO() | |
36 | self.buf = io.BytesIO() | |
35 | 37 | self.buf.write(rest) |
36 | 38 | return ret |
37 | 39 | |
38 | 40 | def parse_trailers(self, unreader, data): |
39 | buf = six.BytesIO() | |
41 | buf = io.BytesIO() | |
40 | 42 | buf.write(data) |
41 | 43 | |
42 | 44 | idx = buf.getvalue().find(b"\r\n\r\n") |
70 | 72 | (size, rest) = self.parse_chunk_size(unreader, data=rest[2:]) |
71 | 73 | |
72 | 74 | def parse_chunk_size(self, unreader, data=None): |
73 | buf = six.BytesIO() | |
75 | buf = io.BytesIO() | |
74 | 76 | if data is not None: |
75 | 77 | buf.write(data) |
76 | 78 | |
109 | 111 | self.length = length |
110 | 112 | |
111 | 113 | def read(self, size): |
112 | if not isinstance(size, six.integer_types): | |
114 | if not isinstance(size, int): | |
113 | 115 | raise TypeError("size must be an integral type") |
114 | 116 | |
115 | 117 | size = min(self.length, size) |
118 | 120 | if size == 0: |
119 | 121 | return b"" |
120 | 122 | |
121 | buf = six.BytesIO() | |
123 | buf = io.BytesIO() | |
122 | 124 | data = self.unreader.read() |
123 | 125 | while data: |
124 | 126 | buf.write(data) |
136 | 138 | class EOFReader(object): |
137 | 139 | def __init__(self, unreader): |
138 | 140 | self.unreader = unreader |
139 | self.buf = six.BytesIO() | |
141 | self.buf = io.BytesIO() | |
140 | 142 | self.finished = False |
141 | 143 | |
142 | 144 | def read(self, size): |
143 | if not isinstance(size, six.integer_types): | |
145 | if not isinstance(size, int): | |
144 | 146 | raise TypeError("size must be an integral type") |
145 | 147 | if size < 0: |
146 | 148 | raise ValueError("Size must be positive.") |
150 | 152 | if self.finished: |
151 | 153 | data = self.buf.getvalue() |
152 | 154 | ret, rest = data[:size], data[size:] |
153 | self.buf = six.BytesIO() | |
155 | self.buf = io.BytesIO() | |
154 | 156 | self.buf.write(rest) |
155 | 157 | return ret |
156 | 158 | |
166 | 168 | |
167 | 169 | data = self.buf.getvalue() |
168 | 170 | ret, rest = data[:size], data[size:] |
169 | self.buf = six.BytesIO() | |
171 | self.buf = io.BytesIO() | |
170 | 172 | self.buf.write(rest) |
171 | 173 | return ret |
172 | 174 | |
174 | 176 | class Body(object): |
175 | 177 | def __init__(self, reader): |
176 | 178 | self.reader = reader |
177 | self.buf = six.BytesIO() | |
179 | self.buf = io.BytesIO() | |
178 | 180 | |
179 | 181 | def __iter__(self): |
180 | 182 | return self |
184 | 186 | if not ret: |
185 | 187 | raise StopIteration() |
186 | 188 | return ret |
189 | ||
187 | 190 | next = __next__ |
188 | 191 | |
189 | 192 | def getsize(self, size): |
190 | 193 | if size is None: |
191 | return six.MAXSIZE | |
192 | elif not isinstance(size, six.integer_types): | |
194 | return sys.maxsize | |
195 | elif not isinstance(size, int): | |
193 | 196 | raise TypeError("size must be an integral type") |
194 | 197 | elif size < 0: |
195 | return six.MAXSIZE | |
198 | return sys.maxsize | |
196 | 199 | return size |
197 | 200 | |
198 | 201 | def read(self, size=None): |
203 | 206 | if size < self.buf.tell(): |
204 | 207 | data = self.buf.getvalue() |
205 | 208 | ret, rest = data[:size], data[size:] |
206 | self.buf = six.BytesIO() | |
209 | self.buf = io.BytesIO() | |
207 | 210 | self.buf.write(rest) |
208 | 211 | return ret |
209 | 212 | |
215 | 218 | |
216 | 219 | data = self.buf.getvalue() |
217 | 220 | ret, rest = data[:size], data[size:] |
218 | self.buf = six.BytesIO() | |
221 | self.buf = io.BytesIO() | |
219 | 222 | self.buf.write(rest) |
220 | 223 | return ret |
221 | 224 | |
225 | 228 | return b"" |
226 | 229 | |
227 | 230 | data = self.buf.getvalue() |
228 | self.buf = six.BytesIO() | |
231 | self.buf = io.BytesIO() | |
229 | 232 | |
230 | 233 | ret = [] |
231 | 234 | while 1: |
2 | 2 | # This file is part of gunicorn released under the MIT license. |
3 | 3 | # See the NOTICE for more information. |
4 | 4 | |
5 | import io | |
5 | 6 | import re |
6 | 7 | import socket |
7 | 8 | from errno import ENOTCONN |
8 | 9 | |
9 | from gunicorn._compat import bytes_to_str | |
10 | 10 | from gunicorn.http.unreader import SocketUnreader |
11 | 11 | from gunicorn.http.body import ChunkedReader, LengthReader, EOFReader, Body |
12 | 12 | from gunicorn.http.errors import (InvalidHeader, InvalidHeaderName, NoMoreData, |
14 | 14 | LimitRequestLine, LimitRequestHeaders) |
15 | 15 | from gunicorn.http.errors import InvalidProxyLine, ForbiddenProxyRequest |
16 | 16 | from gunicorn.http.errors import InvalidSchemeHeaders |
17 | from gunicorn.six import BytesIO, string_types | |
18 | from gunicorn.util import split_request_uri | |
17 | from gunicorn.util import bytes_to_str, split_request_uri | |
19 | 18 | |
20 | 19 | MAX_REQUEST_LINE = 8190 |
21 | 20 | MAX_HEADERS = 32768 |
71 | 70 | secure_scheme_headers = cfg.secure_scheme_headers |
72 | 71 | elif isinstance(self.unreader, SocketUnreader): |
73 | 72 | remote_addr = self.unreader.sock.getpeername() |
74 | if isinstance(remote_addr, tuple): | |
73 | if self.unreader.sock.family in (socket.AF_INET, socket.AF_INET6): | |
75 | 74 | remote_host = remote_addr[0] |
76 | 75 | if remote_host in cfg.forwarded_allow_ips: |
77 | 76 | secure_scheme_headers = cfg.secure_scheme_headers |
78 | elif isinstance(remote_addr, string_types): | |
77 | elif self.unreader.sock.family == socket.AF_UNIX: | |
79 | 78 | secure_scheme_headers = cfg.secure_scheme_headers |
80 | 79 | |
81 | 80 | # Parse headers into key/value pairs paying attention |
177 | 176 | |
178 | 177 | self.req_number = req_number |
179 | 178 | self.proxy_protocol_info = None |
180 | super(Request, self).__init__(cfg, unreader) | |
179 | super().__init__(cfg, unreader) | |
181 | 180 | |
182 | 181 | def get_data(self, unreader, buf, stop=False): |
183 | 182 | data = unreader.read() |
188 | 187 | buf.write(data) |
189 | 188 | |
190 | 189 | def parse(self, unreader): |
191 | buf = BytesIO() | |
190 | buf = io.BytesIO() | |
192 | 191 | self.get_data(unreader, buf, stop=True) |
193 | 192 | |
194 | 193 | # get request line |
197 | 196 | # proxy protocol |
198 | 197 | if self.proxy_protocol(bytes_to_str(line)): |
199 | 198 | # get next request line |
200 | buf = BytesIO() | |
199 | buf = io.BytesIO() | |
201 | 200 | buf.write(rbuf) |
202 | 201 | line, rbuf = self.read_line(unreader, buf, self.limit_request_line) |
203 | 202 | |
204 | 203 | self.parse_request_line(line) |
205 | buf = BytesIO() | |
204 | buf = io.BytesIO() | |
206 | 205 | buf.write(rbuf) |
207 | 206 | |
208 | 207 | # Headers |
242 | 241 | if idx > limit > 0: |
243 | 242 | raise LimitRequestLine(idx, limit) |
244 | 243 | break |
245 | elif len(data) - 2 > limit > 0: | |
244 | if len(data) - 2 > limit > 0: | |
246 | 245 | raise LimitRequestLine(len(data), limit) |
247 | 246 | self.get_data(unreader, buf) |
248 | 247 | data = buf.getvalue() |
357 | 356 | self.version = (int(match.group(1)), int(match.group(2))) |
358 | 357 | |
359 | 358 | def set_body_reader(self): |
360 | super(Request, self).set_body_reader() | |
359 | super().set_body_reader() | |
361 | 360 | if isinstance(self.body.reader, EOFReader): |
362 | 361 | self.body = Body(LengthReader(self.unreader, 0)) |
2 | 2 | # This file is part of gunicorn released under the MIT license. |
3 | 3 | # See the NOTICE for more information. |
4 | 4 | |
5 | import io | |
5 | 6 | import os |
6 | ||
7 | from gunicorn import six | |
8 | 7 | |
9 | 8 | # Classes that can undo reading data from |
10 | 9 | # a given type of data source. |
12 | 11 | |
13 | 12 | class Unreader(object): |
14 | 13 | def __init__(self): |
15 | self.buf = six.BytesIO() | |
14 | self.buf = io.BytesIO() | |
16 | 15 | |
17 | 16 | def chunk(self): |
18 | 17 | raise NotImplementedError() |
19 | 18 | |
20 | 19 | def read(self, size=None): |
21 | if size is not None and not isinstance(size, six.integer_types): | |
20 | if size is not None and not isinstance(size, int): | |
22 | 21 | raise TypeError("size parameter must be an int or long.") |
23 | 22 | |
24 | 23 | if size is not None: |
31 | 30 | |
32 | 31 | if size is None and self.buf.tell(): |
33 | 32 | ret = self.buf.getvalue() |
34 | self.buf = six.BytesIO() | |
33 | self.buf = io.BytesIO() | |
35 | 34 | return ret |
36 | 35 | if size is None: |
37 | 36 | d = self.chunk() |
41 | 40 | chunk = self.chunk() |
42 | 41 | if not chunk: |
43 | 42 | ret = self.buf.getvalue() |
44 | self.buf = six.BytesIO() | |
43 | self.buf = io.BytesIO() | |
45 | 44 | return ret |
46 | 45 | self.buf.write(chunk) |
47 | 46 | data = self.buf.getvalue() |
48 | self.buf = six.BytesIO() | |
47 | self.buf = io.BytesIO() | |
49 | 48 | self.buf.write(data[size:]) |
50 | 49 | return data[:size] |
51 | 50 | |
56 | 55 | |
57 | 56 | class SocketUnreader(Unreader): |
58 | 57 | def __init__(self, sock, max_chunk=8192): |
59 | super(SocketUnreader, self).__init__() | |
58 | super().__init__() | |
60 | 59 | self.sock = sock |
61 | 60 | self.mxchunk = max_chunk |
62 | 61 | |
66 | 65 | |
67 | 66 | class IterUnreader(Unreader): |
68 | 67 | def __init__(self, iterable): |
69 | super(IterUnreader, self).__init__() | |
68 | super().__init__() | |
70 | 69 | self.iter = iter(iterable) |
71 | 70 | |
72 | 71 | def chunk(self): |
73 | 72 | if not self.iter: |
74 | 73 | return b"" |
75 | 74 | try: |
76 | return six.next(self.iter) | |
75 | return next(self.iter) | |
77 | 76 | except StopIteration: |
78 | 77 | self.iter = None |
79 | 78 | return b"" |
8 | 8 | import re |
9 | 9 | import sys |
10 | 10 | |
11 | from gunicorn._compat import unquote_to_wsgi_str | |
12 | 11 | from gunicorn.http.message import HEADER_RE |
13 | 12 | from gunicorn.http.errors import InvalidHeader, InvalidHeaderName |
14 | from gunicorn.six import string_types, binary_type, reraise | |
15 | 13 | from gunicorn import SERVER_SOFTWARE |
16 | 14 | import gunicorn.util as util |
17 | ||
18 | try: | |
19 | # Python 3.3 has os.sendfile(). | |
20 | from os import sendfile | |
21 | except ImportError: | |
22 | try: | |
23 | from ._sendfile import sendfile | |
24 | except ImportError: | |
25 | sendfile = None | |
26 | 15 | |
27 | 16 | # Send files in at most 1GB blocks as some operating systems can have problems |
28 | 17 | # with sending files in blocks over 2GB. |
83 | 72 | "wsgi.multiprocess": (cfg.workers > 1), |
84 | 73 | "wsgi.run_once": False, |
85 | 74 | "wsgi.file_wrapper": FileWrapper, |
75 | "wsgi.input_terminated": True, | |
86 | 76 | "SERVER_SOFTWARE": SERVER_SOFTWARE, |
87 | 77 | } |
88 | 78 | |
140 | 130 | continue |
141 | 131 | elif hdr_name == "CONTENT-LENGTH": |
142 | 132 | environ['CONTENT_LENGTH'] = hdr_value |
133 | environ['wsgi.input_terminated'] = False | |
143 | 134 | continue |
144 | 135 | |
145 | 136 | key = 'HTTP_' + hdr_name.replace('-', '_') |
154 | 145 | # authors should be aware that REMOTE_HOST and REMOTE_ADDR |
155 | 146 | # may not qualify the remote addr: |
156 | 147 | # http://www.ietf.org/rfc/rfc3875 |
157 | if isinstance(client, string_types): | |
148 | if isinstance(client, str): | |
158 | 149 | environ['REMOTE_ADDR'] = client |
159 | elif isinstance(client, binary_type): | |
150 | elif isinstance(client, bytes): | |
160 | 151 | environ['REMOTE_ADDR'] = client.decode() |
161 | 152 | else: |
162 | 153 | environ['REMOTE_ADDR'] = client[0] |
166 | 157 | # Normally only the application should use the Host header but since the |
167 | 158 | # WSGI spec doesn't support unix sockets, we are using it to create |
168 | 159 | # viable SERVER_* if possible. |
169 | if isinstance(server, string_types): | |
160 | if isinstance(server, str): | |
170 | 161 | server = server.split(":") |
171 | 162 | if len(server) == 1: |
172 | 163 | # unix socket |
190 | 181 | path_info = req.path |
191 | 182 | if script_name: |
192 | 183 | path_info = path_info.split(script_name, 1)[1] |
193 | environ['PATH_INFO'] = unquote_to_wsgi_str(path_info) | |
184 | environ['PATH_INFO'] = util.unquote_to_wsgi_str(path_info) | |
194 | 185 | environ['SCRIPT_NAME'] = script_name |
195 | 186 | |
196 | 187 | # override the environ with the correct remote and server address if |
233 | 224 | if exc_info: |
234 | 225 | try: |
235 | 226 | if self.status and self.headers_sent: |
236 | reraise(exc_info[0], exc_info[1], exc_info[2]) | |
227 | util.reraise(exc_info[0], exc_info[1], exc_info[2]) | |
237 | 228 | finally: |
238 | 229 | exc_info = None |
239 | 230 | elif self.status is not None: |
255 | 246 | |
256 | 247 | def process_headers(self, headers): |
257 | 248 | for name, value in headers: |
258 | if not isinstance(name, string_types): | |
249 | if not isinstance(name, str): | |
259 | 250 | raise TypeError('%r is not a string' % name) |
260 | 251 | |
261 | 252 | if HEADER_RE.search(name): |
262 | 253 | raise InvalidHeaderName('%r' % name) |
263 | 254 | |
255 | if not isinstance(value, str): | |
256 | raise TypeError('%r is not a string' % value) | |
257 | ||
264 | 258 | if HEADER_VALUE_RE.search(value): |
265 | 259 | raise InvalidHeader('%r' % value) |
266 | 260 | |
267 | value = str(value).strip() | |
261 | value = value.strip() | |
268 | 262 | lname = name.lower().strip() |
269 | 263 | if lname == "content-length": |
270 | 264 | self.response_length = int(value) |
325 | 319 | tosend.extend(["%s: %s\r\n" % (k, v) for k, v in self.headers]) |
326 | 320 | |
327 | 321 | header_str = "%s\r\n" % "".join(tosend) |
328 | util.write(self.sock, util.to_bytestring(header_str, "ascii")) | |
322 | util.write(self.sock, util.to_bytestring(header_str, "latin-1")) | |
329 | 323 | self.headers_sent = True |
330 | 324 | |
331 | 325 | def write(self, arg): |
332 | 326 | self.send_headers() |
333 | if not isinstance(arg, binary_type): | |
327 | if not isinstance(arg, bytes): | |
334 | 328 | raise TypeError('%r is not a byte' % arg) |
335 | 329 | arglen = len(arg) |
336 | 330 | tosend = arglen |
352 | 346 | util.write(self.sock, arg, self.chunked) |
353 | 347 | |
354 | 348 | def can_sendfile(self): |
355 | return self.cfg.sendfile is not False and sendfile is not None | |
349 | return self.cfg.sendfile is not False | |
356 | 350 | |
357 | 351 | def sendfile(self, respiter): |
358 | 352 | if self.cfg.is_ssl or not self.can_sendfile(): |
389 | 383 | |
390 | 384 | while sent != nbytes: |
391 | 385 | count = min(nbytes - sent, BLKSIZE) |
392 | sent += sendfile(sockno, fileno, offset + sent, count) | |
386 | sent += os.sendfile(sockno, fileno, offset + sent, count) | |
393 | 387 | |
394 | 388 | if self.is_chunked(): |
395 | 389 | self.sock.sendall(b"\r\n") |
4 | 4 | |
5 | 5 | "Bare-bones implementation of statsD's protocol, client-side" |
6 | 6 | |
7 | import logging | |
7 | 8 | import socket |
8 | import logging | |
9 | 9 | from re import sub |
10 | 10 | |
11 | 11 | from gunicorn.glogging import Logger |
12 | from gunicorn import six | |
13 | 12 | |
14 | 13 | # Instrumentation constants |
15 | 14 | METRIC_VAR = "metric" |
34 | 33 | except Exception: |
35 | 34 | self.sock = None |
36 | 35 | |
36 | self.dogstatsd_tags = cfg.dogstatsd_tags | |
37 | ||
37 | 38 | # Log errors and warnings |
38 | 39 | def critical(self, msg, *args, **kwargs): |
39 | 40 | Logger.critical(self, msg, *args, **kwargs) |
51 | 52 | Logger.exception(self, msg, *args, **kwargs) |
52 | 53 | self.increment("gunicorn.log.exception", 1) |
53 | 54 | |
54 | # Special treatement for info, the most common log level | |
55 | # Special treatment for info, the most common log level | |
55 | 56 | def info(self, msg, *args, **kwargs): |
56 | 57 | self.log(logging.INFO, msg, *args, **kwargs) |
57 | 58 | |
114 | 115 | |
115 | 116 | def _sock_send(self, msg): |
116 | 117 | try: |
117 | if isinstance(msg, six.text_type): | |
118 | if isinstance(msg, str): | |
118 | 119 | msg = msg.encode("ascii") |
120 | ||
121 | # http://docs.datadoghq.com/guides/dogstatsd/#datagram-format | |
122 | if self.dogstatsd_tags: | |
123 | msg = msg + b"|#" + self.dogstatsd_tags.encode('ascii') | |
124 | ||
119 | 125 | if self.sock: |
120 | 126 | self.sock.send(msg) |
121 | 127 | except Exception: |
1 | 1 | # |
2 | 2 | # This file is part of gunicorn released under the MIT license. |
3 | 3 | # See the NOTICE for more information. |
4 | # pylint: disable=no-else-continue | |
4 | 5 | |
5 | 6 | import os |
6 | 7 | import os.path |
14 | 15 | |
15 | 16 | class Reloader(threading.Thread): |
16 | 17 | def __init__(self, extra_files=None, interval=1, callback=None): |
17 | super(Reloader, self).__init__() | |
18 | super().__init__() | |
18 | 19 | self.setDaemon(True) |
19 | 20 | self._extra_files = set(extra_files or ()) |
20 | 21 | self._extra_files_lock = threading.RLock() |
73 | 74 | | inotify.constants.IN_MOVED_TO) |
74 | 75 | |
75 | 76 | def __init__(self, extra_files=None, callback=None): |
76 | super(InotifyReloader, self).__init__() | |
77 | super().__init__() | |
77 | 78 | self.setDaemon(True) |
78 | 79 | self._callback = callback |
79 | 80 | self._dirs = set() |
95 | 96 | fnames = [ |
96 | 97 | os.path.dirname(COMPILED_EXT_RE.sub('py', module.__file__)) |
97 | 98 | for module in tuple(sys.modules.values()) |
98 | if hasattr(module, '__file__') | |
99 | if getattr(module, '__file__', None) | |
99 | 100 | ] |
100 | 101 | |
101 | 102 | return set(fnames) |
0 | """Selectors module. | |
1 | ||
2 | This module allows high-level and efficient I/O multiplexing, built upon the | |
3 | `select` module primitives. | |
4 | ||
5 | The following code adapted from trollius.selectors. | |
6 | """ | |
7 | ||
8 | ||
9 | from abc import ABCMeta, abstractmethod | |
10 | from collections import namedtuple, Mapping | |
11 | import math | |
12 | import select | |
13 | import sys | |
14 | ||
15 | from gunicorn._compat import wrap_error, InterruptedError | |
16 | from gunicorn import six | |
17 | ||
18 | ||
19 | # generic events, that must be mapped to implementation-specific ones | |
20 | EVENT_READ = (1 << 0) | |
21 | EVENT_WRITE = (1 << 1) | |
22 | ||
23 | ||
24 | def _fileobj_to_fd(fileobj): | |
25 | """Return a file descriptor from a file object. | |
26 | ||
27 | Parameters: | |
28 | fileobj -- file object or file descriptor | |
29 | ||
30 | Returns: | |
31 | corresponding file descriptor | |
32 | ||
33 | Raises: | |
34 | ValueError if the object is invalid | |
35 | """ | |
36 | if isinstance(fileobj, six.integer_types): | |
37 | fd = fileobj | |
38 | else: | |
39 | try: | |
40 | fd = int(fileobj.fileno()) | |
41 | except (AttributeError, TypeError, ValueError): | |
42 | raise ValueError("Invalid file object: " | |
43 | "{0!r}".format(fileobj)) | |
44 | if fd < 0: | |
45 | raise ValueError("Invalid file descriptor: {0}".format(fd)) | |
46 | return fd | |
47 | ||
48 | ||
49 | SelectorKey = namedtuple('SelectorKey', ['fileobj', 'fd', 'events', 'data']) | |
50 | """Object used to associate a file object to its backing file descriptor, | |
51 | selected event mask and attached data.""" | |
52 | ||
53 | ||
54 | class _SelectorMapping(Mapping): | |
55 | """Mapping of file objects to selector keys.""" | |
56 | ||
57 | def __init__(self, selector): | |
58 | self._selector = selector | |
59 | ||
60 | def __len__(self): | |
61 | return len(self._selector._fd_to_key) | |
62 | ||
63 | def __getitem__(self, fileobj): | |
64 | try: | |
65 | fd = self._selector._fileobj_lookup(fileobj) | |
66 | return self._selector._fd_to_key[fd] | |
67 | except KeyError: | |
68 | raise KeyError("{0!r} is not registered".format(fileobj)) | |
69 | ||
70 | def __iter__(self): | |
71 | return iter(self._selector._fd_to_key) | |
72 | ||
73 | ||
74 | class BaseSelector(six.with_metaclass(ABCMeta)): | |
75 | """Selector abstract base class. | |
76 | ||
77 | A selector supports registering file objects to be monitored for specific | |
78 | I/O events. | |
79 | ||
80 | A file object is a file descriptor or any object with a `fileno()` method. | |
81 | An arbitrary object can be attached to the file object, which can be used | |
82 | for example to store context information, a callback, etc. | |
83 | ||
84 | A selector can use various implementations (select(), poll(), epoll()...) | |
85 | depending on the platform. The default `Selector` class uses the most | |
86 | efficient implementation on the current platform. | |
87 | """ | |
88 | ||
89 | @abstractmethod | |
90 | def register(self, fileobj, events, data=None): | |
91 | """Register a file object. | |
92 | ||
93 | Parameters: | |
94 | fileobj -- file object or file descriptor | |
95 | events -- events to monitor (bitwise mask of EVENT_READ|EVENT_WRITE) | |
96 | data -- attached data | |
97 | ||
98 | Returns: | |
99 | SelectorKey instance | |
100 | ||
101 | Raises: | |
102 | ValueError if events is invalid | |
103 | KeyError if fileobj is already registered | |
104 | OSError if fileobj is closed or otherwise is unacceptable to | |
105 | the underlying system call (if a system call is made) | |
106 | ||
107 | Note: | |
108 | OSError may or may not be raised | |
109 | """ | |
110 | raise NotImplementedError | |
111 | ||
112 | @abstractmethod | |
113 | def unregister(self, fileobj): | |
114 | """Unregister a file object. | |
115 | ||
116 | Parameters: | |
117 | fileobj -- file object or file descriptor | |
118 | ||
119 | Returns: | |
120 | SelectorKey instance | |
121 | ||
122 | Raises: | |
123 | KeyError if fileobj is not registered | |
124 | ||
125 | Note: | |
126 | If fileobj is registered but has since been closed this does | |
127 | *not* raise OSError (even if the wrapped syscall does) | |
128 | """ | |
129 | raise NotImplementedError | |
130 | ||
131 | def modify(self, fileobj, events, data=None): | |
132 | """Change a registered file object monitored events or attached data. | |
133 | ||
134 | Parameters: | |
135 | fileobj -- file object or file descriptor | |
136 | events -- events to monitor (bitwise mask of EVENT_READ|EVENT_WRITE) | |
137 | data -- attached data | |
138 | ||
139 | Returns: | |
140 | SelectorKey instance | |
141 | ||
142 | Raises: | |
143 | Anything that unregister() or register() raises | |
144 | """ | |
145 | self.unregister(fileobj) | |
146 | return self.register(fileobj, events, data) | |
147 | ||
148 | @abstractmethod | |
149 | def select(self, timeout=None): | |
150 | """Perform the actual selection, until some monitored file objects are | |
151 | ready or a timeout expires. | |
152 | ||
153 | Parameters: | |
154 | timeout -- if timeout > 0, this specifies the maximum wait time, in | |
155 | seconds | |
156 | if timeout <= 0, the select() call won't block, and will | |
157 | report the currently ready file objects | |
158 | if timeout is None, select() will block until a monitored | |
159 | file object becomes ready | |
160 | ||
161 | Returns: | |
162 | list of (key, events) for ready file objects | |
163 | `events` is a bitwise mask of EVENT_READ|EVENT_WRITE | |
164 | """ | |
165 | raise NotImplementedError | |
166 | ||
167 | def close(self): | |
168 | """Close the selector. | |
169 | ||
170 | This must be called to make sure that any underlying resource is freed. | |
171 | """ | |
172 | pass | |
173 | ||
174 | def get_key(self, fileobj): | |
175 | """Return the key associated to a registered file object. | |
176 | ||
177 | Returns: | |
178 | SelectorKey for this file object | |
179 | """ | |
180 | mapping = self.get_map() | |
181 | try: | |
182 | return mapping[fileobj] | |
183 | except KeyError: | |
184 | raise KeyError("{0!r} is not registered".format(fileobj)) | |
185 | ||
186 | @abstractmethod | |
187 | def get_map(self): | |
188 | """Return a mapping of file objects to selector keys.""" | |
189 | raise NotImplementedError | |
190 | ||
191 | def __enter__(self): | |
192 | return self | |
193 | ||
194 | def __exit__(self, *args): | |
195 | self.close() | |
196 | ||
197 | ||
198 | class _BaseSelectorImpl(BaseSelector): | |
199 | """Base selector implementation.""" | |
200 | ||
201 | def __init__(self): | |
202 | # this maps file descriptors to keys | |
203 | self._fd_to_key = {} | |
204 | # read-only mapping returned by get_map() | |
205 | self._map = _SelectorMapping(self) | |
206 | ||
207 | def _fileobj_lookup(self, fileobj): | |
208 | """Return a file descriptor from a file object. | |
209 | ||
210 | This wraps _fileobj_to_fd() to do an exhaustive search in case | |
211 | the object is invalid but we still have it in our map. This | |
212 | is used by unregister() so we can unregister an object that | |
213 | was previously registered even if it is closed. It is also | |
214 | used by _SelectorMapping. | |
215 | """ | |
216 | try: | |
217 | return _fileobj_to_fd(fileobj) | |
218 | except ValueError: | |
219 | # Do an exhaustive search. | |
220 | for key in self._fd_to_key.values(): | |
221 | if key.fileobj is fileobj: | |
222 | return key.fd | |
223 | # Raise ValueError after all. | |
224 | raise | |
225 | ||
226 | def register(self, fileobj, events, data=None): | |
227 | if (not events) or (events & ~(EVENT_READ | EVENT_WRITE)): | |
228 | raise ValueError("Invalid events: {0!r}".format(events)) | |
229 | ||
230 | key = SelectorKey(fileobj, self._fileobj_lookup(fileobj), events, data) | |
231 | ||
232 | if key.fd in self._fd_to_key: | |
233 | raise KeyError("{0!r} (FD {1}) is already registered" | |
234 | .format(fileobj, key.fd)) | |
235 | ||
236 | self._fd_to_key[key.fd] = key | |
237 | return key | |
238 | ||
239 | def unregister(self, fileobj): | |
240 | try: | |
241 | key = self._fd_to_key.pop(self._fileobj_lookup(fileobj)) | |
242 | except KeyError: | |
243 | raise KeyError("{0!r} is not registered".format(fileobj)) | |
244 | return key | |
245 | ||
246 | def modify(self, fileobj, events, data=None): | |
247 | # TODO: Subclasses can probably optimize this even further. | |
248 | try: | |
249 | key = self._fd_to_key[self._fileobj_lookup(fileobj)] | |
250 | except KeyError: | |
251 | raise KeyError("{0!r} is not registered".format(fileobj)) | |
252 | if events != key.events: | |
253 | self.unregister(fileobj) | |
254 | key = self.register(fileobj, events, data) | |
255 | elif data != key.data: | |
256 | # Use a shortcut to update the data. | |
257 | key = key._replace(data=data) | |
258 | self._fd_to_key[key.fd] = key | |
259 | return key | |
260 | ||
261 | def close(self): | |
262 | self._fd_to_key.clear() | |
263 | ||
264 | def get_map(self): | |
265 | return self._map | |
266 | ||
267 | def _key_from_fd(self, fd): | |
268 | """Return the key associated to a given file descriptor. | |
269 | ||
270 | Parameters: | |
271 | fd -- file descriptor | |
272 | ||
273 | Returns: | |
274 | corresponding key, or None if not found | |
275 | """ | |
276 | try: | |
277 | return self._fd_to_key[fd] | |
278 | except KeyError: | |
279 | return None | |
280 | ||
281 | ||
282 | class SelectSelector(_BaseSelectorImpl): | |
283 | """Select-based selector.""" | |
284 | ||
285 | def __init__(self): | |
286 | super(SelectSelector, self).__init__() | |
287 | self._readers = set() | |
288 | self._writers = set() | |
289 | ||
290 | def register(self, fileobj, events, data=None): | |
291 | key = super(SelectSelector, self).register(fileobj, events, data) | |
292 | if events & EVENT_READ: | |
293 | self._readers.add(key.fd) | |
294 | if events & EVENT_WRITE: | |
295 | self._writers.add(key.fd) | |
296 | return key | |
297 | ||
298 | def unregister(self, fileobj): | |
299 | key = super(SelectSelector, self).unregister(fileobj) | |
300 | self._readers.discard(key.fd) | |
301 | self._writers.discard(key.fd) | |
302 | return key | |
303 | ||
304 | if sys.platform == 'win32': | |
305 | def _select(self, r, w, _, timeout=None): | |
306 | r, w, x = select.select(r, w, w, timeout) | |
307 | return r, w + x, [] | |
308 | else: | |
309 | _select = select.select | |
310 | ||
311 | def select(self, timeout=None): | |
312 | timeout = None if timeout is None else max(timeout, 0) | |
313 | ready = [] | |
314 | try: | |
315 | r, w, _ = wrap_error(self._select, | |
316 | self._readers, self._writers, [], timeout) | |
317 | except InterruptedError: | |
318 | return ready | |
319 | r = set(r) | |
320 | w = set(w) | |
321 | for fd in r | w: | |
322 | events = 0 | |
323 | if fd in r: | |
324 | events |= EVENT_READ | |
325 | if fd in w: | |
326 | events |= EVENT_WRITE | |
327 | ||
328 | key = self._key_from_fd(fd) | |
329 | if key: | |
330 | ready.append((key, events & key.events)) | |
331 | return ready | |
332 | ||
333 | ||
334 | if hasattr(select, 'poll'): | |
335 | ||
336 | class PollSelector(_BaseSelectorImpl): | |
337 | """Poll-based selector.""" | |
338 | ||
339 | def __init__(self): | |
340 | super(PollSelector, self).__init__() | |
341 | self._poll = select.poll() | |
342 | ||
343 | def register(self, fileobj, events, data=None): | |
344 | key = super(PollSelector, self).register(fileobj, events, data) | |
345 | poll_events = 0 | |
346 | if events & EVENT_READ: | |
347 | poll_events |= select.POLLIN | |
348 | if events & EVENT_WRITE: | |
349 | poll_events |= select.POLLOUT | |
350 | self._poll.register(key.fd, poll_events) | |
351 | return key | |
352 | ||
353 | def unregister(self, fileobj): | |
354 | key = super(PollSelector, self).unregister(fileobj) | |
355 | self._poll.unregister(key.fd) | |
356 | return key | |
357 | ||
358 | def select(self, timeout=None): | |
359 | if timeout is None: | |
360 | timeout = None | |
361 | elif timeout <= 0: | |
362 | timeout = 0 | |
363 | else: | |
364 | # poll() has a resolution of 1 millisecond, round away from | |
365 | # zero to wait *at least* timeout seconds. | |
366 | timeout = int(math.ceil(timeout * 1e3)) | |
367 | ready = [] | |
368 | try: | |
369 | fd_event_list = wrap_error(self._poll.poll, timeout) | |
370 | except InterruptedError: | |
371 | return ready | |
372 | for fd, event in fd_event_list: | |
373 | events = 0 | |
374 | if event & ~select.POLLIN: | |
375 | events |= EVENT_WRITE | |
376 | if event & ~select.POLLOUT: | |
377 | events |= EVENT_READ | |
378 | ||
379 | key = self._key_from_fd(fd) | |
380 | if key: | |
381 | ready.append((key, events & key.events)) | |
382 | return ready | |
383 | ||
384 | ||
385 | if hasattr(select, 'epoll'): | |
386 | ||
387 | class EpollSelector(_BaseSelectorImpl): | |
388 | """Epoll-based selector.""" | |
389 | ||
390 | def __init__(self): | |
391 | super(EpollSelector, self).__init__() | |
392 | self._epoll = select.epoll() | |
393 | ||
394 | def fileno(self): | |
395 | return self._epoll.fileno() | |
396 | ||
397 | def register(self, fileobj, events, data=None): | |
398 | key = super(EpollSelector, self).register(fileobj, events, data) | |
399 | epoll_events = 0 | |
400 | if events & EVENT_READ: | |
401 | epoll_events |= select.EPOLLIN | |
402 | if events & EVENT_WRITE: | |
403 | epoll_events |= select.EPOLLOUT | |
404 | self._epoll.register(key.fd, epoll_events) | |
405 | return key | |
406 | ||
407 | def unregister(self, fileobj): | |
408 | key = super(EpollSelector, self).unregister(fileobj) | |
409 | try: | |
410 | self._epoll.unregister(key.fd) | |
411 | except OSError: | |
412 | # This can happen if the FD was closed since it | |
413 | # was registered. | |
414 | pass | |
415 | return key | |
416 | ||
417 | def select(self, timeout=None): | |
418 | if timeout is None: | |
419 | timeout = -1 | |
420 | elif timeout <= 0: | |
421 | timeout = 0 | |
422 | else: | |
423 | # epoll_wait() has a resolution of 1 millisecond, round away | |
424 | # from zero to wait *at least* timeout seconds. | |
425 | timeout = math.ceil(timeout * 1e3) * 1e-3 | |
426 | max_ev = len(self._fd_to_key) | |
427 | ready = [] | |
428 | try: | |
429 | fd_event_list = wrap_error(self._epoll.poll, timeout, max_ev) | |
430 | except InterruptedError: | |
431 | return ready | |
432 | for fd, event in fd_event_list: | |
433 | events = 0 | |
434 | if event & ~select.EPOLLIN: | |
435 | events |= EVENT_WRITE | |
436 | if event & ~select.EPOLLOUT: | |
437 | events |= EVENT_READ | |
438 | ||
439 | key = self._key_from_fd(fd) | |
440 | if key: | |
441 | ready.append((key, events & key.events)) | |
442 | return ready | |
443 | ||
444 | def close(self): | |
445 | self._epoll.close() | |
446 | super(EpollSelector, self).close() | |
447 | ||
448 | ||
449 | if hasattr(select, 'devpoll'): | |
450 | ||
451 | class DevpollSelector(_BaseSelectorImpl): | |
452 | """Solaris /dev/poll selector.""" | |
453 | ||
454 | def __init__(self): | |
455 | super(DevpollSelector, self).__init__() | |
456 | self._devpoll = select.devpoll() | |
457 | ||
458 | def fileno(self): | |
459 | return self._devpoll.fileno() | |
460 | ||
461 | def register(self, fileobj, events, data=None): | |
462 | key = super(DevpollSelector, self).register(fileobj, events, data) | |
463 | poll_events = 0 | |
464 | if events & EVENT_READ: | |
465 | poll_events |= select.POLLIN | |
466 | if events & EVENT_WRITE: | |
467 | poll_events |= select.POLLOUT | |
468 | self._devpoll.register(key.fd, poll_events) | |
469 | return key | |
470 | ||
471 | def unregister(self, fileobj): | |
472 | key = super(DevpollSelector, self).unregister(fileobj) | |
473 | self._devpoll.unregister(key.fd) | |
474 | return key | |
475 | ||
476 | def select(self, timeout=None): | |
477 | if timeout is None: | |
478 | timeout = None | |
479 | elif timeout <= 0: | |
480 | timeout = 0 | |
481 | else: | |
482 | # devpoll() has a resolution of 1 millisecond, round away from | |
483 | # zero to wait *at least* timeout seconds. | |
484 | timeout = math.ceil(timeout * 1e3) | |
485 | ready = [] | |
486 | try: | |
487 | fd_event_list = self._devpoll.poll(timeout) | |
488 | except InterruptedError: | |
489 | return ready | |
490 | for fd, event in fd_event_list: | |
491 | events = 0 | |
492 | if event & ~select.POLLIN: | |
493 | events |= EVENT_WRITE | |
494 | if event & ~select.POLLOUT: | |
495 | events |= EVENT_READ | |
496 | ||
497 | key = self._key_from_fd(fd) | |
498 | if key: | |
499 | ready.append((key, events & key.events)) | |
500 | return ready | |
501 | ||
502 | def close(self): | |
503 | self._devpoll.close() | |
504 | super(DevpollSelector, self).close() | |
505 | ||
506 | ||
507 | if hasattr(select, 'kqueue'): | |
508 | ||
509 | class KqueueSelector(_BaseSelectorImpl): | |
510 | """Kqueue-based selector.""" | |
511 | ||
512 | def __init__(self): | |
513 | super(KqueueSelector, self).__init__() | |
514 | self._kqueue = select.kqueue() | |
515 | ||
516 | def fileno(self): | |
517 | return self._kqueue.fileno() | |
518 | ||
519 | def register(self, fileobj, events, data=None): | |
520 | key = super(KqueueSelector, self).register(fileobj, events, data) | |
521 | if events & EVENT_READ: | |
522 | kev = select.kevent(key.fd, select.KQ_FILTER_READ, | |
523 | select.KQ_EV_ADD) | |
524 | self._kqueue.control([kev], 0, 0) | |
525 | if events & EVENT_WRITE: | |
526 | kev = select.kevent(key.fd, select.KQ_FILTER_WRITE, | |
527 | select.KQ_EV_ADD) | |
528 | self._kqueue.control([kev], 0, 0) | |
529 | return key | |
530 | ||
531 | def unregister(self, fileobj): | |
532 | key = super(KqueueSelector, self).unregister(fileobj) | |
533 | if key.events & EVENT_READ: | |
534 | kev = select.kevent(key.fd, select.KQ_FILTER_READ, | |
535 | select.KQ_EV_DELETE) | |
536 | try: | |
537 | self._kqueue.control([kev], 0, 0) | |
538 | except OSError: | |
539 | # This can happen if the FD was closed since it | |
540 | # was registered. | |
541 | pass | |
542 | if key.events & EVENT_WRITE: | |
543 | kev = select.kevent(key.fd, select.KQ_FILTER_WRITE, | |
544 | select.KQ_EV_DELETE) | |
545 | try: | |
546 | self._kqueue.control([kev], 0, 0) | |
547 | except OSError: | |
548 | # See comment above. | |
549 | pass | |
550 | return key | |
551 | ||
552 | def select(self, timeout=None): | |
553 | timeout = None if timeout is None else max(timeout, 0) | |
554 | max_ev = len(self._fd_to_key) | |
555 | ready = [] | |
556 | try: | |
557 | kev_list = wrap_error(self._kqueue.control, | |
558 | None, max_ev, timeout) | |
559 | except InterruptedError: | |
560 | return ready | |
561 | for kev in kev_list: | |
562 | fd = kev.ident | |
563 | flag = kev.filter | |
564 | events = 0 | |
565 | if flag == select.KQ_FILTER_READ: | |
566 | events |= EVENT_READ | |
567 | if flag == select.KQ_FILTER_WRITE: | |
568 | events |= EVENT_WRITE | |
569 | ||
570 | key = self._key_from_fd(fd) | |
571 | if key: | |
572 | ready.append((key, events & key.events)) | |
573 | return ready | |
574 | ||
575 | def close(self): | |
576 | self._kqueue.close() | |
577 | super(KqueueSelector, self).close() | |
578 | ||
579 | ||
580 | # Choose the best implementation: roughly, epoll|kqueue|devpoll > poll > select. | |
581 | # select() also can't accept a FD > FD_SETSIZE (usually around 1024) | |
582 | if 'KqueueSelector' in globals(): | |
583 | DefaultSelector = KqueueSelector | |
584 | elif 'EpollSelector' in globals(): | |
585 | DefaultSelector = EpollSelector | |
586 | elif 'DevpollSelector' in globals(): | |
587 | DefaultSelector = DevpollSelector | |
588 | elif 'PollSelector' in globals(): | |
589 | DefaultSelector = PollSelector | |
590 | else: | |
591 | DefaultSelector = SelectSelector |
0 | """Utilities for writing code that runs on Python 2 and 3""" | |
1 | ||
2 | # Copyright (c) 2010-2014 Benjamin Peterson | |
3 | # | |
4 | # Permission is hereby granted, free of charge, to any person obtaining a copy | |
5 | # of this software and associated documentation files (the "Software"), to deal | |
6 | # in the Software without restriction, including without limitation the rights | |
7 | # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell | |
8 | # copies of the Software, and to permit persons to whom the Software is | |
9 | # furnished to do so, subject to the following conditions: | |
10 | # | |
11 | # The above copyright notice and this permission notice shall be included in all | |
12 | # copies or substantial portions of the Software. | |
13 | # | |
14 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR | |
15 | # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, | |
16 | # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE | |
17 | # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER | |
18 | # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, | |
19 | # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE | |
20 | # SOFTWARE. | |
21 | ||
22 | from __future__ import absolute_import | |
23 | ||
24 | import functools | |
25 | import operator | |
26 | import sys | |
27 | import types | |
28 | ||
29 | __author__ = "Benjamin Peterson <benjamin@python.org>" | |
30 | __version__ = "1.8.0" | |
31 | ||
32 | ||
33 | # Useful for very coarse version differentiation. | |
34 | PY2 = sys.version_info[0] == 2 | |
35 | PY3 = sys.version_info[0] == 3 | |
36 | ||
37 | if PY3: | |
38 | string_types = str, | |
39 | integer_types = int, | |
40 | class_types = type, | |
41 | text_type = str | |
42 | binary_type = bytes | |
43 | ||
44 | MAXSIZE = sys.maxsize | |
45 | else: | |
46 | string_types = basestring, | |
47 | integer_types = (int, long) | |
48 | class_types = (type, types.ClassType) | |
49 | text_type = unicode | |
50 | binary_type = str | |
51 | ||
52 | if sys.platform.startswith("java"): | |
53 | # Jython always uses 32 bits. | |
54 | MAXSIZE = int((1 << 31) - 1) | |
55 | else: | |
56 | # It's possible to have sizeof(long) != sizeof(Py_ssize_t). | |
57 | class X(object): | |
58 | def __len__(self): | |
59 | return 1 << 31 | |
60 | try: | |
61 | len(X()) | |
62 | except OverflowError: | |
63 | # 32-bit | |
64 | MAXSIZE = int((1 << 31) - 1) | |
65 | else: | |
66 | # 64-bit | |
67 | MAXSIZE = int((1 << 63) - 1) | |
68 | del X | |
69 | ||
70 | ||
71 | def _add_doc(func, doc): | |
72 | """Add documentation to a function.""" | |
73 | func.__doc__ = doc | |
74 | ||
75 | ||
76 | def _import_module(name): | |
77 | """Import module, returning the module after the last dot.""" | |
78 | __import__(name) | |
79 | return sys.modules[name] | |
80 | ||
81 | ||
82 | class _LazyDescr(object): | |
83 | ||
84 | def __init__(self, name): | |
85 | self.name = name | |
86 | ||
87 | def __get__(self, obj, tp): | |
88 | result = self._resolve() | |
89 | setattr(obj, self.name, result) # Invokes __set__. | |
90 | # This is a bit ugly, but it avoids running this again. | |
91 | delattr(obj.__class__, self.name) | |
92 | return result | |
93 | ||
94 | ||
95 | class MovedModule(_LazyDescr): | |
96 | ||
97 | def __init__(self, name, old, new=None): | |
98 | super(MovedModule, self).__init__(name) | |
99 | if PY3: | |
100 | if new is None: | |
101 | new = name | |
102 | self.mod = new | |
103 | else: | |
104 | self.mod = old | |
105 | ||
106 | def _resolve(self): | |
107 | return _import_module(self.mod) | |
108 | ||
109 | def __getattr__(self, attr): | |
110 | _module = self._resolve() | |
111 | value = getattr(_module, attr) | |
112 | setattr(self, attr, value) | |
113 | return value | |
114 | ||
115 | ||
116 | class _LazyModule(types.ModuleType): | |
117 | ||
118 | def __init__(self, name): | |
119 | super(_LazyModule, self).__init__(name) | |
120 | self.__doc__ = self.__class__.__doc__ | |
121 | ||
122 | def __dir__(self): | |
123 | attrs = ["__doc__", "__name__"] | |
124 | attrs += [attr.name for attr in self._moved_attributes] | |
125 | return attrs | |
126 | ||
127 | # Subclasses should override this | |
128 | _moved_attributes = [] | |
129 | ||
130 | ||
131 | class MovedAttribute(_LazyDescr): | |
132 | ||
133 | def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None): | |
134 | super(MovedAttribute, self).__init__(name) | |
135 | if PY3: | |
136 | if new_mod is None: | |
137 | new_mod = name | |
138 | self.mod = new_mod | |
139 | if new_attr is None: | |
140 | if old_attr is None: | |
141 | new_attr = name | |
142 | else: | |
143 | new_attr = old_attr | |
144 | self.attr = new_attr | |
145 | else: | |
146 | self.mod = old_mod | |
147 | if old_attr is None: | |
148 | old_attr = name | |
149 | self.attr = old_attr | |
150 | ||
151 | def _resolve(self): | |
152 | module = _import_module(self.mod) | |
153 | return getattr(module, self.attr) | |
154 | ||
155 | ||
156 | class _SixMetaPathImporter(object): | |
157 | """ | |
158 | A meta path importer to import six.moves and its submodules. | |
159 | ||
160 | This class implements a PEP302 finder and loader. It should be compatible | |
161 | with Python 2.5 and all existing versions of Python3 | |
162 | """ | |
163 | def __init__(self, six_module_name): | |
164 | self.name = six_module_name | |
165 | self.known_modules = {} | |
166 | ||
167 | def _add_module(self, mod, *fullnames): | |
168 | for fullname in fullnames: | |
169 | self.known_modules[self.name + "." + fullname] = mod | |
170 | ||
171 | def _get_module(self, fullname): | |
172 | return self.known_modules[self.name + "." + fullname] | |
173 | ||
174 | def find_module(self, fullname, path=None): | |
175 | if fullname in self.known_modules: | |
176 | return self | |
177 | return None | |
178 | ||
179 | def __get_module(self, fullname): | |
180 | try: | |
181 | return self.known_modules[fullname] | |
182 | except KeyError: | |
183 | raise ImportError("This loader does not know module " + fullname) | |
184 | ||
185 | def load_module(self, fullname): | |
186 | try: | |
187 | # in case of a reload | |
188 | return sys.modules[fullname] | |
189 | except KeyError: | |
190 | pass | |
191 | mod = self.__get_module(fullname) | |
192 | if isinstance(mod, MovedModule): | |
193 | mod = mod._resolve() | |
194 | else: | |
195 | mod.__loader__ = self | |
196 | sys.modules[fullname] = mod | |
197 | return mod | |
198 | ||
199 | def is_package(self, fullname): | |
200 | """ | |
201 | Return true, if the named module is a package. | |
202 | ||
203 | We need this method to get correct spec objects with | |
204 | Python 3.4 (see PEP451) | |
205 | """ | |
206 | return hasattr(self.__get_module(fullname), "__path__") | |
207 | ||
208 | def get_code(self, fullname): | |
209 | """Return None | |
210 | ||
211 | Required, if is_package is implemented""" | |
212 | self.__get_module(fullname) # eventually raises ImportError | |
213 | return None | |
214 | get_source = get_code # same as get_code | |
215 | ||
216 | _importer = _SixMetaPathImporter(__name__) | |
217 | ||
218 | ||
219 | class _MovedItems(_LazyModule): | |
220 | """Lazy loading of moved objects""" | |
221 | __path__ = [] # mark as package | |
222 | ||
223 | ||
224 | _moved_attributes = [ | |
225 | MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"), | |
226 | MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"), | |
227 | MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"), | |
228 | MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"), | |
229 | MovedAttribute("intern", "__builtin__", "sys"), | |
230 | MovedAttribute("map", "itertools", "builtins", "imap", "map"), | |
231 | MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"), | |
232 | MovedAttribute("reload_module", "__builtin__", "imp", "reload"), | |
233 | MovedAttribute("reduce", "__builtin__", "functools"), | |
234 | MovedAttribute("shlex_quote", "pipes", "shlex", "quote"), | |
235 | MovedAttribute("StringIO", "StringIO", "io"), | |
236 | MovedAttribute("UserDict", "UserDict", "collections"), | |
237 | MovedAttribute("UserList", "UserList", "collections"), | |
238 | MovedAttribute("UserString", "UserString", "collections"), | |
239 | MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"), | |
240 | MovedAttribute("zip", "itertools", "builtins", "izip", "zip"), | |
241 | MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"), | |
242 | ||
243 | MovedModule("builtins", "__builtin__"), | |
244 | MovedModule("configparser", "ConfigParser"), | |
245 | MovedModule("copyreg", "copy_reg"), | |
246 | MovedModule("dbm_gnu", "gdbm", "dbm.gnu"), | |
247 | MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"), | |
248 | MovedModule("http_cookiejar", "cookielib", "http.cookiejar"), | |
249 | MovedModule("http_cookies", "Cookie", "http.cookies"), | |
250 | MovedModule("html_entities", "htmlentitydefs", "html.entities"), | |
251 | MovedModule("html_parser", "HTMLParser", "html.parser"), | |
252 | MovedModule("http_client", "httplib", "http.client"), | |
253 | MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"), | |
254 | MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"), | |
255 | MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"), | |
256 | MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"), | |
257 | MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"), | |
258 | MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"), | |
259 | MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"), | |
260 | MovedModule("cPickle", "cPickle", "pickle"), | |
261 | MovedModule("queue", "Queue"), | |
262 | MovedModule("reprlib", "repr"), | |
263 | MovedModule("socketserver", "SocketServer"), | |
264 | MovedModule("_thread", "thread", "_thread"), | |
265 | MovedModule("tkinter", "Tkinter"), | |
266 | MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"), | |
267 | MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"), | |
268 | MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"), | |
269 | MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"), | |
270 | MovedModule("tkinter_tix", "Tix", "tkinter.tix"), | |
271 | MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"), | |
272 | MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"), | |
273 | MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"), | |
274 | MovedModule("tkinter_colorchooser", "tkColorChooser", | |
275 | "tkinter.colorchooser"), | |
276 | MovedModule("tkinter_commondialog", "tkCommonDialog", | |
277 | "tkinter.commondialog"), | |
278 | MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"), | |
279 | MovedModule("tkinter_font", "tkFont", "tkinter.font"), | |
280 | MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"), | |
281 | MovedModule("tkinter_tksimpledialog", "tkSimpleDialog", | |
282 | "tkinter.simpledialog"), | |
283 | MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"), | |
284 | MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"), | |
285 | MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"), | |
286 | MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"), | |
287 | MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"), | |
288 | MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"), | |
289 | MovedModule("winreg", "_winreg"), | |
290 | ] | |
291 | for attr in _moved_attributes: | |
292 | setattr(_MovedItems, attr.name, attr) | |
293 | if isinstance(attr, MovedModule): | |
294 | _importer._add_module(attr, "moves." + attr.name) | |
295 | del attr | |
296 | ||
297 | _MovedItems._moved_attributes = _moved_attributes | |
298 | ||
299 | moves = _MovedItems(__name__ + ".moves") | |
300 | _importer._add_module(moves, "moves") | |
301 | ||
302 | ||
303 | class Module_six_moves_urllib_parse(_LazyModule): | |
304 | """Lazy loading of moved objects in six.moves.urllib_parse""" | |
305 | ||
306 | ||
307 | _urllib_parse_moved_attributes = [ | |
308 | MovedAttribute("ParseResult", "urlparse", "urllib.parse"), | |
309 | MovedAttribute("SplitResult", "urlparse", "urllib.parse"), | |
310 | MovedAttribute("parse_qs", "urlparse", "urllib.parse"), | |
311 | MovedAttribute("parse_qsl", "urlparse", "urllib.parse"), | |
312 | MovedAttribute("urldefrag", "urlparse", "urllib.parse"), | |
313 | MovedAttribute("urljoin", "urlparse", "urllib.parse"), | |
314 | MovedAttribute("urlparse", "urlparse", "urllib.parse"), | |
315 | MovedAttribute("urlsplit", "urlparse", "urllib.parse"), | |
316 | MovedAttribute("urlunparse", "urlparse", "urllib.parse"), | |
317 | MovedAttribute("urlunsplit", "urlparse", "urllib.parse"), | |
318 | MovedAttribute("quote", "urllib", "urllib.parse"), | |
319 | MovedAttribute("quote_plus", "urllib", "urllib.parse"), | |
320 | MovedAttribute("unquote", "urllib", "urllib.parse"), | |
321 | MovedAttribute("unquote_plus", "urllib", "urllib.parse"), | |
322 | MovedAttribute("urlencode", "urllib", "urllib.parse"), | |
323 | MovedAttribute("splitquery", "urllib", "urllib.parse"), | |
324 | MovedAttribute("splittag", "urllib", "urllib.parse"), | |
325 | MovedAttribute("splituser", "urllib", "urllib.parse"), | |
326 | MovedAttribute("uses_fragment", "urlparse", "urllib.parse"), | |
327 | MovedAttribute("uses_netloc", "urlparse", "urllib.parse"), | |
328 | MovedAttribute("uses_params", "urlparse", "urllib.parse"), | |
329 | MovedAttribute("uses_query", "urlparse", "urllib.parse"), | |
330 | MovedAttribute("uses_relative", "urlparse", "urllib.parse"), | |
331 | ] | |
332 | for attr in _urllib_parse_moved_attributes: | |
333 | setattr(Module_six_moves_urllib_parse, attr.name, attr) | |
334 | del attr | |
335 | ||
336 | Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes | |
337 | ||
338 | _importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"), | |
339 | "moves.urllib_parse", "moves.urllib.parse") | |
340 | ||
341 | ||
342 | class Module_six_moves_urllib_error(_LazyModule): | |
343 | """Lazy loading of moved objects in six.moves.urllib_error""" | |
344 | ||
345 | ||
346 | _urllib_error_moved_attributes = [ | |
347 | MovedAttribute("URLError", "urllib2", "urllib.error"), | |
348 | MovedAttribute("HTTPError", "urllib2", "urllib.error"), | |
349 | MovedAttribute("ContentTooShortError", "urllib", "urllib.error"), | |
350 | ] | |
351 | for attr in _urllib_error_moved_attributes: | |
352 | setattr(Module_six_moves_urllib_error, attr.name, attr) | |
353 | del attr | |
354 | ||
355 | Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes | |
356 | ||
357 | _importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"), | |
358 | "moves.urllib_error", "moves.urllib.error") | |
359 | ||
360 | ||
361 | class Module_six_moves_urllib_request(_LazyModule): | |
362 | """Lazy loading of moved objects in six.moves.urllib_request""" | |
363 | ||
364 | ||
365 | _urllib_request_moved_attributes = [ | |
366 | MovedAttribute("urlopen", "urllib2", "urllib.request"), | |
367 | MovedAttribute("install_opener", "urllib2", "urllib.request"), | |
368 | MovedAttribute("build_opener", "urllib2", "urllib.request"), | |
369 | MovedAttribute("pathname2url", "urllib", "urllib.request"), | |
370 | MovedAttribute("url2pathname", "urllib", "urllib.request"), | |
371 | MovedAttribute("getproxies", "urllib", "urllib.request"), | |
372 | MovedAttribute("Request", "urllib2", "urllib.request"), | |
373 | MovedAttribute("OpenerDirector", "urllib2", "urllib.request"), | |
374 | MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"), | |
375 | MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"), | |
376 | MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"), | |
377 | MovedAttribute("ProxyHandler", "urllib2", "urllib.request"), | |
378 | MovedAttribute("BaseHandler", "urllib2", "urllib.request"), | |
379 | MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"), | |
380 | MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"), | |
381 | MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"), | |
382 | MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"), | |
383 | MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"), | |
384 | MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"), | |
385 | MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"), | |
386 | MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"), | |
387 | MovedAttribute("HTTPHandler", "urllib2", "urllib.request"), | |
388 | MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"), | |
389 | MovedAttribute("FileHandler", "urllib2", "urllib.request"), | |
390 | MovedAttribute("FTPHandler", "urllib2", "urllib.request"), | |
391 | MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"), | |
392 | MovedAttribute("UnknownHandler", "urllib2", "urllib.request"), | |
393 | MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"), | |
394 | MovedAttribute("urlretrieve", "urllib", "urllib.request"), | |
395 | MovedAttribute("urlcleanup", "urllib", "urllib.request"), | |
396 | MovedAttribute("URLopener", "urllib", "urllib.request"), | |
397 | MovedAttribute("FancyURLopener", "urllib", "urllib.request"), | |
398 | MovedAttribute("proxy_bypass", "urllib", "urllib.request"), | |
399 | ] | |
400 | for attr in _urllib_request_moved_attributes: | |
401 | setattr(Module_six_moves_urllib_request, attr.name, attr) | |
402 | del attr | |
403 | ||
404 | Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes | |
405 | ||
406 | _importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"), | |
407 | "moves.urllib_request", "moves.urllib.request") | |
408 | ||
409 | ||
410 | class Module_six_moves_urllib_response(_LazyModule): | |
411 | """Lazy loading of moved objects in six.moves.urllib_response""" | |
412 | ||
413 | ||
414 | _urllib_response_moved_attributes = [ | |
415 | MovedAttribute("addbase", "urllib", "urllib.response"), | |
416 | MovedAttribute("addclosehook", "urllib", "urllib.response"), | |
417 | MovedAttribute("addinfo", "urllib", "urllib.response"), | |
418 | MovedAttribute("addinfourl", "urllib", "urllib.response"), | |
419 | ] | |
420 | for attr in _urllib_response_moved_attributes: | |
421 | setattr(Module_six_moves_urllib_response, attr.name, attr) | |
422 | del attr | |
423 | ||
424 | Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes | |
425 | ||
426 | _importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"), | |
427 | "moves.urllib_response", "moves.urllib.response") | |
428 | ||
429 | ||
430 | class Module_six_moves_urllib_robotparser(_LazyModule): | |
431 | """Lazy loading of moved objects in six.moves.urllib_robotparser""" | |
432 | ||
433 | ||
434 | _urllib_robotparser_moved_attributes = [ | |
435 | MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"), | |
436 | ] | |
437 | for attr in _urllib_robotparser_moved_attributes: | |
438 | setattr(Module_six_moves_urllib_robotparser, attr.name, attr) | |
439 | del attr | |
440 | ||
441 | Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes | |
442 | ||
443 | _importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"), | |
444 | "moves.urllib_robotparser", "moves.urllib.robotparser") | |
445 | ||
446 | ||
447 | class Module_six_moves_urllib(types.ModuleType): | |
448 | """Create a six.moves.urllib namespace that resembles the Python 3 namespace""" | |
449 | __path__ = [] # mark as package | |
450 | parse = _importer._get_module("moves.urllib_parse") | |
451 | error = _importer._get_module("moves.urllib_error") | |
452 | request = _importer._get_module("moves.urllib_request") | |
453 | response = _importer._get_module("moves.urllib_response") | |
454 | robotparser = _importer._get_module("moves.urllib_robotparser") | |
455 | ||
456 | def __dir__(self): | |
457 | return ['parse', 'error', 'request', 'response', 'robotparser'] | |
458 | ||
459 | _importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"), | |
460 | "moves.urllib") | |
461 | ||
462 | ||
463 | def add_move(move): | |
464 | """Add an item to six.moves.""" | |
465 | setattr(_MovedItems, move.name, move) | |
466 | ||
467 | ||
468 | def remove_move(name): | |
469 | """Remove item from six.moves.""" | |
470 | try: | |
471 | delattr(_MovedItems, name) | |
472 | except AttributeError: | |
473 | try: | |
474 | del moves.__dict__[name] | |
475 | except KeyError: | |
476 | raise AttributeError("no such move, %r" % (name,)) | |
477 | ||
478 | ||
479 | if PY3: | |
480 | _meth_func = "__func__" | |
481 | _meth_self = "__self__" | |
482 | ||
483 | _func_closure = "__closure__" | |
484 | _func_code = "__code__" | |
485 | _func_defaults = "__defaults__" | |
486 | _func_globals = "__globals__" | |
487 | else: | |
488 | _meth_func = "im_func" | |
489 | _meth_self = "im_self" | |
490 | ||
491 | _func_closure = "func_closure" | |
492 | _func_code = "func_code" | |
493 | _func_defaults = "func_defaults" | |
494 | _func_globals = "func_globals" | |
495 | ||
496 | ||
497 | try: | |
498 | advance_iterator = next | |
499 | except NameError: | |
500 | def advance_iterator(it): | |
501 | return it.next() | |
502 | next = advance_iterator | |
503 | ||
504 | ||
505 | try: | |
506 | callable = callable | |
507 | except NameError: | |
508 | def callable(obj): | |
509 | return any("__call__" in klass.__dict__ for klass in type(obj).__mro__) | |
510 | ||
511 | ||
512 | if PY3: | |
513 | def get_unbound_function(unbound): | |
514 | return unbound | |
515 | ||
516 | create_bound_method = types.MethodType | |
517 | ||
518 | Iterator = object | |
519 | else: | |
520 | def get_unbound_function(unbound): | |
521 | return unbound.im_func | |
522 | ||
523 | def create_bound_method(func, obj): | |
524 | return types.MethodType(func, obj, obj.__class__) | |
525 | ||
526 | class Iterator(object): | |
527 | ||
528 | def next(self): | |
529 | return type(self).__next__(self) | |
530 | ||
531 | callable = callable | |
532 | _add_doc(get_unbound_function, | |
533 | """Get the function out of a possibly unbound function""") | |
534 | ||
535 | ||
536 | get_method_function = operator.attrgetter(_meth_func) | |
537 | get_method_self = operator.attrgetter(_meth_self) | |
538 | get_function_closure = operator.attrgetter(_func_closure) | |
539 | get_function_code = operator.attrgetter(_func_code) | |
540 | get_function_defaults = operator.attrgetter(_func_defaults) | |
541 | get_function_globals = operator.attrgetter(_func_globals) | |
542 | ||
543 | ||
544 | if PY3: | |
545 | def iterkeys(d, **kw): | |
546 | return iter(d.keys(**kw)) | |
547 | ||
548 | def itervalues(d, **kw): | |
549 | return iter(d.values(**kw)) | |
550 | ||
551 | def iteritems(d, **kw): | |
552 | return iter(d.items(**kw)) | |
553 | ||
554 | def iterlists(d, **kw): | |
555 | return iter(d.lists(**kw)) | |
556 | else: | |
557 | def iterkeys(d, **kw): | |
558 | return iter(d.iterkeys(**kw)) | |
559 | ||
560 | def itervalues(d, **kw): | |
561 | return iter(d.itervalues(**kw)) | |
562 | ||
563 | def iteritems(d, **kw): | |
564 | return iter(d.iteritems(**kw)) | |
565 | ||
566 | def iterlists(d, **kw): | |
567 | return iter(d.iterlists(**kw)) | |
568 | ||
569 | _add_doc(iterkeys, "Return an iterator over the keys of a dictionary.") | |
570 | _add_doc(itervalues, "Return an iterator over the values of a dictionary.") | |
571 | _add_doc(iteritems, | |
572 | "Return an iterator over the (key, value) pairs of a dictionary.") | |
573 | _add_doc(iterlists, | |
574 | "Return an iterator over the (key, [values]) pairs of a dictionary.") | |
575 | ||
576 | ||
577 | if PY3: | |
578 | def b(s): | |
579 | return s.encode("latin-1") | |
580 | def u(s): | |
581 | return s | |
582 | unichr = chr | |
583 | if sys.version_info[1] <= 1: | |
584 | def int2byte(i): | |
585 | return bytes((i,)) | |
586 | else: | |
587 | # This is about 2x faster than the implementation above on 3.2+ | |
588 | int2byte = operator.methodcaller("to_bytes", 1, "big") | |
589 | byte2int = operator.itemgetter(0) | |
590 | indexbytes = operator.getitem | |
591 | iterbytes = iter | |
592 | import io | |
593 | StringIO = io.StringIO | |
594 | BytesIO = io.BytesIO | |
595 | else: | |
596 | def b(s): | |
597 | return s | |
598 | # Workaround for standalone backslash | |
599 | def u(s): | |
600 | return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape") | |
601 | unichr = unichr | |
602 | int2byte = chr | |
603 | def byte2int(bs): | |
604 | return ord(bs[0]) | |
605 | def indexbytes(buf, i): | |
606 | return ord(buf[i]) | |
607 | def iterbytes(buf): | |
608 | return (ord(byte) for byte in buf) | |
609 | import StringIO | |
610 | StringIO = BytesIO = StringIO.StringIO | |
611 | _add_doc(b, """Byte literal""") | |
612 | _add_doc(u, """Text literal""") | |
613 | ||
614 | ||
615 | if PY3: | |
616 | exec_ = getattr(moves.builtins, "exec") | |
617 | ||
618 | ||
619 | def reraise(tp, value, tb=None): | |
620 | if value is None: | |
621 | value = tp() | |
622 | if value.__traceback__ is not tb: | |
623 | raise value.with_traceback(tb) | |
624 | raise value | |
625 | ||
626 | else: | |
627 | def exec_(_code_, _globs_=None, _locs_=None): | |
628 | """Execute code in a namespace.""" | |
629 | if _globs_ is None: | |
630 | frame = sys._getframe(1) | |
631 | _globs_ = frame.f_globals | |
632 | if _locs_ is None: | |
633 | _locs_ = frame.f_locals | |
634 | del frame | |
635 | elif _locs_ is None: | |
636 | _locs_ = _globs_ | |
637 | exec("""exec _code_ in _globs_, _locs_""") | |
638 | ||
639 | ||
640 | exec_("""def reraise(tp, value, tb=None): | |
641 | raise tp, value, tb | |
642 | """) | |
643 | ||
644 | ||
645 | print_ = getattr(moves.builtins, "print", None) | |
646 | if print_ is None: | |
647 | def print_(*args, **kwargs): | |
648 | """The new-style print function for Python 2.4 and 2.5.""" | |
649 | fp = kwargs.pop("file", sys.stdout) | |
650 | if fp is None: | |
651 | return | |
652 | def write(data): | |
653 | if not isinstance(data, basestring): | |
654 | data = str(data) | |
655 | # If the file has an encoding, encode unicode with it. | |
656 | if (isinstance(fp, file) and | |
657 | isinstance(data, unicode) and | |
658 | fp.encoding is not None): | |
659 | errors = getattr(fp, "errors", None) | |
660 | if errors is None: | |
661 | errors = "strict" | |
662 | data = data.encode(fp.encoding, errors) | |
663 | fp.write(data) | |
664 | want_unicode = False | |
665 | sep = kwargs.pop("sep", None) | |
666 | if sep is not None: | |
667 | if isinstance(sep, unicode): | |
668 | want_unicode = True | |
669 | elif not isinstance(sep, str): | |
670 | raise TypeError("sep must be None or a string") | |
671 | end = kwargs.pop("end", None) | |
672 | if end is not None: | |
673 | if isinstance(end, unicode): | |
674 | want_unicode = True | |
675 | elif not isinstance(end, str): | |
676 | raise TypeError("end must be None or a string") | |
677 | if kwargs: | |
678 | raise TypeError("invalid keyword arguments to print()") | |
679 | if not want_unicode: | |
680 | for arg in args: | |
681 | if isinstance(arg, unicode): | |
682 | want_unicode = True | |
683 | break | |
684 | if want_unicode: | |
685 | newline = unicode("\n") | |
686 | space = unicode(" ") | |
687 | else: | |
688 | newline = "\n" | |
689 | space = " " | |
690 | if sep is None: | |
691 | sep = space | |
692 | if end is None: | |
693 | end = newline | |
694 | for i, arg in enumerate(args): | |
695 | if i: | |
696 | write(sep) | |
697 | write(arg) | |
698 | write(end) | |
699 | ||
700 | _add_doc(reraise, """Reraise an exception.""") | |
701 | ||
702 | if sys.version_info[0:2] < (3, 4): | |
703 | def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS, | |
704 | updated=functools.WRAPPER_UPDATES): | |
705 | def wrapper(f): | |
706 | f = functools.wraps(wrapped)(f) | |
707 | f.__wrapped__ = wrapped | |
708 | return f | |
709 | return wrapper | |
710 | else: | |
711 | wraps = functools.wraps | |
712 | ||
713 | def with_metaclass(meta, *bases): | |
714 | """Create a base class with a metaclass.""" | |
715 | # This requires a bit of explanation: the basic idea is to make a dummy | |
716 | # metaclass for one level of class instantiation that replaces itself with | |
717 | # the actual metaclass. | |
718 | class metaclass(meta): | |
719 | def __new__(cls, name, this_bases, d): | |
720 | return meta(name, bases, d) | |
721 | return type.__new__(metaclass, 'temporary_class', (), {}) | |
722 | ||
723 | ||
724 | def add_metaclass(metaclass): | |
725 | """Class decorator for creating a class with a metaclass.""" | |
726 | def wrapper(cls): | |
727 | orig_vars = cls.__dict__.copy() | |
728 | slots = orig_vars.get('__slots__') | |
729 | if slots is not None: | |
730 | if isinstance(slots, str): | |
731 | slots = [slots] | |
732 | for slots_var in slots: | |
733 | orig_vars.pop(slots_var) | |
734 | orig_vars.pop('__dict__', None) | |
735 | orig_vars.pop('__weakref__', None) | |
736 | return metaclass(cls.__name__, cls.__bases__, orig_vars) | |
737 | return wrapper | |
738 | ||
739 | # Complete the moves implementation. | |
740 | # This code is at the end of this module to speed up module loading. | |
741 | # Turn this module into a package. | |
742 | __path__ = [] # required for PEP 302 and PEP 451 | |
743 | __package__ = __name__ # see PEP 366 @ReservedAssignment | |
744 | if globals().get("__spec__") is not None: | |
745 | __spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable | |
746 | # Remove other six meta path importers, since they cause problems. This can | |
747 | # happen if six is removed from sys.modules and then reloaded. (Setuptools does | |
748 | # this for some reason.) | |
749 | if sys.meta_path: | |
750 | for i, importer in enumerate(sys.meta_path): | |
751 | # Here's some real nastiness: Another "instance" of the six module might | |
752 | # be floating around. Therefore, we can't use isinstance() to check for | |
753 | # the six meta path importer, since the other six instance will have | |
754 | # inserted an importer with different class. | |
755 | if (type(importer).__name__ == "_SixMetaPathImporter" and | |
756 | importer.name == __name__): | |
757 | del sys.meta_path[i] | |
758 | break | |
759 | del i, importer | |
760 | # Finally, add the importer to the meta path import hook. | |
761 | sys.meta_path.append(_importer) |
10 | 10 | import time |
11 | 11 | |
12 | 12 | from gunicorn import util |
13 | from gunicorn.six import string_types | |
13 | from gunicorn.socketfromfd import fromfd | |
14 | 14 | |
15 | 15 | |
16 | 16 | class BaseSocket(object): |
86 | 86 | |
87 | 87 | def set_options(self, sock, bound=False): |
88 | 88 | sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) |
89 | return super(TCPSocket, self).set_options(sock, bound=bound) | |
89 | return super().set_options(sock, bound=bound) | |
90 | 90 | |
91 | 91 | |
92 | 92 | class TCP6Socket(TCPSocket): |
114 | 114 | os.remove(addr) |
115 | 115 | else: |
116 | 116 | raise ValueError("%r is not a socket" % addr) |
117 | super(UnixSocket, self).__init__(addr, conf, log, fd=fd) | |
117 | super().__init__(addr, conf, log, fd=fd) | |
118 | 118 | |
119 | 119 | def __str__(self): |
120 | 120 | return "unix:%s" % self.cfg_addr |
132 | 132 | sock_type = TCP6Socket |
133 | 133 | else: |
134 | 134 | sock_type = TCPSocket |
135 | elif isinstance(addr, string_types): | |
135 | elif isinstance(addr, (str, bytes)): | |
136 | 136 | sock_type = UnixSocket |
137 | 137 | else: |
138 | 138 | raise TypeError("Unable to create socket from: %r" % addr) |
150 | 150 | listeners = [] |
151 | 151 | |
152 | 152 | # get it only once |
153 | laddr = conf.address | |
153 | addr = conf.address | |
154 | fdaddr = [bind for bind in addr if isinstance(bind, int)] | |
155 | if fds: | |
156 | fdaddr += list(fds) | |
157 | laddr = [bind for bind in addr if not isinstance(bind, int)] | |
154 | 158 | |
155 | 159 | # check ssl config early to raise the error on startup |
156 | 160 | # only the certfile is needed since it can contains the keyfile |
161 | 165 | raise ValueError('keyfile "%s" does not exist' % conf.keyfile) |
162 | 166 | |
163 | 167 | # sockets are already bound |
164 | if fds is not None: | |
165 | for fd in fds: | |
166 | sock = socket.fromfd(fd, socket.AF_UNIX, socket.SOCK_STREAM) | |
168 | if fdaddr: | |
169 | for fd in fdaddr: | |
170 | sock = fromfd(fd) | |
167 | 171 | sock_name = sock.getsockname() |
168 | 172 | sock_type = _sock_type(sock_name) |
169 | 173 | listener = sock_type(sock_name, conf, log, fd=fd) |
0 | # Copyright (C) 2016 Christian Heimes | |
1 | """socketfromfd -- socket.fromd() with auto-discovery | |
2 | ||
3 | ATTENTION: Do not remove this backport till the minimum required version is | |
4 | Python 3.7. See https://bugs.python.org/issue28134 for details. | |
5 | """ | |
6 | from __future__ import print_function | |
7 | ||
8 | import ctypes | |
9 | import os | |
10 | import socket | |
11 | import sys | |
12 | from ctypes.util import find_library | |
13 | ||
14 | __all__ = ('fromfd',) | |
15 | ||
16 | SO_DOMAIN = getattr(socket, 'SO_DOMAIN', 39) | |
17 | SO_TYPE = getattr(socket, 'SO_TYPE', 3) | |
18 | SO_PROTOCOL = getattr(socket, 'SO_PROTOCOL', 38) | |
19 | ||
20 | ||
21 | _libc_name = find_library('c') | |
22 | if _libc_name is not None: | |
23 | libc = ctypes.CDLL(_libc_name, use_errno=True) | |
24 | else: | |
25 | raise OSError('libc not found') | |
26 | ||
27 | ||
28 | def _errcheck_errno(result, func, arguments): | |
29 | """Raise OSError by errno for -1 | |
30 | """ | |
31 | if result == -1: | |
32 | errno = ctypes.get_errno() | |
33 | raise OSError(errno, os.strerror(errno)) | |
34 | return arguments | |
35 | ||
36 | ||
37 | _libc_getsockopt = libc.getsockopt | |
38 | _libc_getsockopt.argtypes = [ | |
39 | ctypes.c_int, # int sockfd | |
40 | ctypes.c_int, # int level | |
41 | ctypes.c_int, # int optname | |
42 | ctypes.c_void_p, # void *optval | |
43 | ctypes.POINTER(ctypes.c_uint32) # socklen_t *optlen | |
44 | ] | |
45 | _libc_getsockopt.restype = ctypes.c_int # 0: ok, -1: err | |
46 | _libc_getsockopt.errcheck = _errcheck_errno | |
47 | ||
48 | ||
49 | def _raw_getsockopt(fd, level, optname): | |
50 | """Make raw getsockopt() call for int32 optval | |
51 | ||
52 | :param fd: socket fd | |
53 | :param level: SOL_* | |
54 | :param optname: SO_* | |
55 | :return: value as int | |
56 | """ | |
57 | optval = ctypes.c_int(0) | |
58 | optlen = ctypes.c_uint32(4) | |
59 | _libc_getsockopt(fd, level, optname, | |
60 | ctypes.byref(optval), ctypes.byref(optlen)) | |
61 | return optval.value | |
62 | ||
63 | ||
64 | def fromfd(fd, keep_fd=True): | |
65 | """Create a socket from a file descriptor | |
66 | ||
67 | socket domain (family), type and protocol are auto-detected. By default | |
68 | the socket uses a dup()ed fd. The original fd can be closed. | |
69 | ||
70 | The parameter `keep_fd` influences fd duplication. Under Python 2 the | |
71 | fd is still duplicated but the input fd is closed. Under Python 3 and | |
72 | with `keep_fd=True`, the new socket object uses the same fd. | |
73 | ||
74 | :param fd: socket fd | |
75 | :type fd: int | |
76 | :param keep_fd: keep input fd | |
77 | :type keep_fd: bool | |
78 | :return: socket.socket instance | |
79 | :raises OSError: for invalid socket fd | |
80 | """ | |
81 | family = _raw_getsockopt(fd, socket.SOL_SOCKET, SO_DOMAIN) | |
82 | typ = _raw_getsockopt(fd, socket.SOL_SOCKET, SO_TYPE) | |
83 | proto = _raw_getsockopt(fd, socket.SOL_SOCKET, SO_PROTOCOL) | |
84 | if sys.version_info.major == 2: | |
85 | # Python 2 has no fileno argument and always duplicates the fd | |
86 | sockobj = socket.fromfd(fd, family, typ, proto) | |
87 | sock = socket.socket(None, None, None, _sock=sockobj) | |
88 | if not keep_fd: | |
89 | os.close(fd) | |
90 | return sock | |
91 | else: | |
92 | if keep_fd: | |
93 | return socket.fromfd(fd, family, typ, proto) | |
94 | else: | |
95 | return socket.socket(family, typ, proto, fileno=fd) |
3 | 3 | # See the NOTICE for more information. |
4 | 4 | |
5 | 5 | import os |
6 | import socket | |
6 | 7 | |
7 | 8 | SD_LISTEN_FDS_START = 3 |
8 | 9 | |
42 | 43 | os.environ.pop('LISTEN_FDS', None) |
43 | 44 | |
44 | 45 | return fds |
46 | ||
47 | ||
48 | def sd_notify(state, logger, unset_environment=False): | |
49 | """Send a notification to systemd. state is a string; see | |
50 | the man page of sd_notify (http://www.freedesktop.org/software/systemd/man/sd_notify.html) | |
51 | for a description of the allowable values. | |
52 | ||
53 | If the unset_environment parameter is True, sd_notify() will unset | |
54 | the $NOTIFY_SOCKET environment variable before returning (regardless of | |
55 | whether the function call itself succeeded or not). Further calls to | |
56 | sd_notify() will then fail, but the variable is no longer inherited by | |
57 | child processes. | |
58 | """ | |
59 | ||
60 | ||
61 | addr = os.environ.get('NOTIFY_SOCKET') | |
62 | if addr is None: | |
63 | # not run in a service, just a noop | |
64 | return | |
65 | try: | |
66 | sock = socket.socket(socket.AF_UNIX, socket.SOCK_DGRAM | socket.SOCK_CLOEXEC) | |
67 | if addr[0] == '@': | |
68 | addr = '\0' + addr[1:] | |
69 | sock.connect(addr) | |
70 | sock.sendall(state.encode('utf-8')) | |
71 | except: | |
72 | logger.debug("Exception while invoking sd_notify()", exc_info=True) | |
73 | finally: | |
74 | if unset_environment: | |
75 | os.environ.pop('NOTIFY_SOCKET') | |
76 | sock.close() |
2 | 2 | # This file is part of gunicorn released under the MIT license. |
3 | 3 | # See the NOTICE for more information. |
4 | 4 | |
5 | from __future__ import print_function | |
6 | ||
7 | 5 | import email.utils |
6 | import errno | |
8 | 7 | import fcntl |
8 | import html | |
9 | import importlib | |
10 | import inspect | |
9 | 11 | import io |
12 | import logging | |
10 | 13 | import os |
11 | import pkg_resources | |
12 | 14 | import pwd |
13 | 15 | import random |
16 | import re | |
14 | 17 | import socket |
15 | 18 | import sys |
16 | 19 | import textwrap |
17 | 20 | import time |
18 | 21 | import traceback |
19 | import inspect | |
20 | import errno | |
21 | 22 | import warnings |
22 | import logging | |
23 | import re | |
24 | ||
25 | from gunicorn import _compat | |
23 | ||
24 | import pkg_resources | |
25 | ||
26 | 26 | from gunicorn.errors import AppImportError |
27 | from gunicorn.six import text_type | |
28 | 27 | from gunicorn.workers import SUPPORTED_WORKERS |
28 | import urllib.parse | |
29 | 29 | |
30 | 30 | REDIRECT_TO = getattr(os, 'devnull', '/dev/null') |
31 | 31 | |
50 | 50 | setproctitle("gunicorn: %s" % title) |
51 | 51 | except ImportError: |
52 | 52 | def _setproctitle(title): |
53 | return | |
54 | ||
55 | ||
56 | try: | |
57 | from importlib import import_module | |
58 | except ImportError: | |
59 | def _resolve_name(name, package, level): | |
60 | """Return the absolute name of the module to be imported.""" | |
61 | if not hasattr(package, 'rindex'): | |
62 | raise ValueError("'package' not set to a string") | |
63 | dot = len(package) | |
64 | for _ in range(level, 1, -1): | |
65 | try: | |
66 | dot = package.rindex('.', 0, dot) | |
67 | except ValueError: | |
68 | msg = "attempted relative import beyond top-level package" | |
69 | raise ValueError(msg) | |
70 | return "%s.%s" % (package[:dot], name) | |
71 | ||
72 | def import_module(name, package=None): | |
73 | """Import a module. | |
74 | ||
75 | The 'package' argument is required when performing a relative import. It | |
76 | specifies the package to use as the anchor point from which to resolve the | |
77 | relative import to an absolute import. | |
78 | ||
79 | """ | |
80 | if name.startswith('.'): | |
81 | if not package: | |
82 | raise TypeError("relative imports require the 'package' argument") | |
83 | level = 0 | |
84 | for character in name: | |
85 | if character != '.': | |
86 | break | |
87 | level += 1 | |
88 | name = _resolve_name(name[level:], package, level) | |
89 | __import__(name) | |
90 | return sys.modules[name] | |
53 | pass | |
91 | 54 | |
92 | 55 | |
93 | 56 | def load_class(uri, default="gunicorn.workers.sync.SyncWorker", |
131 | 94 | klass = components.pop(-1) |
132 | 95 | |
133 | 96 | try: |
134 | mod = import_module('.'.join(components)) | |
97 | mod = importlib.import_module('.'.join(components)) | |
135 | 98 | except: |
136 | 99 | exc = traceback.format_exc() |
137 | 100 | msg = "class uri %r invalid or not found: \n\n[%s]" |
138 | 101 | raise RuntimeError(msg % (uri, exc)) |
139 | 102 | return getattr(mod, klass) |
103 | ||
104 | ||
105 | positionals = ( | |
106 | inspect.Parameter.POSITIONAL_ONLY, | |
107 | inspect.Parameter.POSITIONAL_OR_KEYWORD, | |
108 | ) | |
109 | ||
110 | ||
111 | def get_arity(f): | |
112 | sig = inspect.signature(f) | |
113 | arity = 0 | |
114 | ||
115 | for param in sig.parameters.values(): | |
116 | if param.kind in positionals: | |
117 | arity += 1 | |
118 | ||
119 | return arity | |
140 | 120 | |
141 | 121 | |
142 | 122 | def get_username(uid): |
168 | 148 | |
169 | 149 | |
170 | 150 | def chown(path, uid, gid): |
171 | gid = abs(gid) & 0x7FFFFFFF # see note above. | |
172 | 151 | os.chown(path, uid, gid) |
173 | 152 | |
174 | 153 | |
175 | 154 | if sys.platform.startswith("win"): |
176 | 155 | def _waitfor(func, pathname, waitall=False): |
177 | # Peform the operation | |
156 | # Perform the operation | |
178 | 157 | func(pathname) |
179 | 158 | # Now setup the wait loop |
180 | 159 | if waitall: |
231 | 210 | return True |
232 | 211 | |
233 | 212 | |
234 | def parse_address(netloc, default_port=8000): | |
213 | def parse_address(netloc, default_port='8000'): | |
235 | 214 | if re.match(r'unix:(//)?', netloc): |
236 | 215 | return re.split(r'unix:(//)?', netloc)[-1] |
237 | 216 | |
217 | if netloc.startswith("fd://"): | |
218 | fd = netloc[5:] | |
219 | try: | |
220 | return int(fd) | |
221 | except ValueError: | |
222 | raise RuntimeError("%r is not a valid file descriptor." % fd) from None | |
223 | ||
238 | 224 | if netloc.startswith("tcp://"): |
239 | 225 | netloc = netloc.split("tcp://")[1] |
240 | ||
241 | # get host | |
226 | host, port = netloc, default_port | |
227 | ||
242 | 228 | if '[' in netloc and ']' in netloc: |
243 | host = netloc.split(']')[0][1:].lower() | |
229 | host = netloc.split(']')[0][1:] | |
230 | port = (netloc.split(']:') + [default_port])[1] | |
244 | 231 | elif ':' in netloc: |
245 | host = netloc.split(':')[0].lower() | |
232 | host, port = (netloc.split(':') + [default_port])[:2] | |
246 | 233 | elif netloc == "": |
247 | host = "0.0.0.0" | |
248 | else: | |
249 | host = netloc.lower() | |
250 | ||
251 | #get port | |
252 | netloc = netloc.split(']')[-1] | |
253 | if ":" in netloc: | |
254 | port = netloc.split(':', 1)[1] | |
255 | if not port.isdigit(): | |
256 | raise RuntimeError("%r is not a valid port number." % port) | |
234 | host, port = "0.0.0.0", default_port | |
235 | ||
236 | try: | |
257 | 237 | port = int(port) |
258 | else: | |
259 | port = default_port | |
260 | return (host, port) | |
238 | except ValueError: | |
239 | raise RuntimeError("%r is not a valid port number." % port) | |
240 | ||
241 | return host.lower(), port | |
261 | 242 | |
262 | 243 | |
263 | 244 | def close_on_exec(fd): |
290 | 271 | |
291 | 272 | |
292 | 273 | def write_chunk(sock, data): |
293 | if isinstance(data, text_type): | |
274 | if isinstance(data, str): | |
294 | 275 | data = data.encode('utf-8') |
295 | 276 | chunk_size = "%X\r\n" % len(data) |
296 | 277 | chunk = b"".join([chunk_size.encode('utf-8'), data, b"\r\n"]) |
316 | 297 | |
317 | 298 | |
318 | 299 | def write_error(sock, status_int, reason, mesg): |
319 | html = textwrap.dedent("""\ | |
300 | html_error = textwrap.dedent("""\ | |
320 | 301 | <html> |
321 | 302 | <head> |
322 | 303 | <title>%(reason)s</title> |
326 | 307 | %(mesg)s |
327 | 308 | </body> |
328 | 309 | </html> |
329 | """) % {"reason": reason, "mesg": _compat.html_escape(mesg)} | |
310 | """) % {"reason": reason, "mesg": html.escape(mesg)} | |
330 | 311 | |
331 | 312 | http = textwrap.dedent("""\ |
332 | 313 | HTTP/1.1 %s %s\r |
334 | 315 | Content-Type: text/html\r |
335 | 316 | Content-Length: %d\r |
336 | 317 | \r |
337 | %s""") % (str(status_int), reason, len(html), html) | |
318 | %s""") % (str(status_int), reason, len(html_error), html_error) | |
338 | 319 | write_nonblock(sock, http.encode('latin1')) |
339 | 320 | |
340 | 321 | |
341 | 322 | def import_app(module): |
342 | 323 | parts = module.split(":", 1) |
343 | 324 | if len(parts) == 1: |
344 | module, obj = module, "application" | |
325 | obj = "application" | |
345 | 326 | else: |
346 | 327 | module, obj = parts[0], parts[1] |
347 | 328 | |
348 | 329 | try: |
349 | __import__(module) | |
330 | mod = importlib.import_module(module) | |
350 | 331 | except ImportError: |
351 | 332 | if module.endswith(".py") and os.path.exists(module): |
352 | 333 | msg = "Failed to find application, did you mean '%s:%s'?" |
353 | 334 | raise ImportError(msg % (module.rsplit(".", 1)[0], obj)) |
354 | else: | |
355 | raise | |
356 | ||
357 | mod = sys.modules[module] | |
335 | raise | |
358 | 336 | |
359 | 337 | is_debug = logging.root.level == logging.DEBUG |
360 | 338 | try: |
361 | app = eval(obj, vars(mod)) | |
362 | except NameError: | |
339 | app = getattr(mod, obj) | |
340 | except AttributeError: | |
363 | 341 | if is_debug: |
364 | 342 | traceback.print_exception(*sys.exc_info()) |
365 | 343 | raise AppImportError("Failed to find application object %r in %r" % (obj, module)) |
500 | 478 | """Converts a string argument to a byte string""" |
501 | 479 | if isinstance(value, bytes): |
502 | 480 | return value |
503 | if not isinstance(value, text_type): | |
481 | if not isinstance(value, str): | |
504 | 482 | raise TypeError('%r is not a string' % value) |
505 | 483 | |
506 | 484 | return value.encode(encoding) |
485 | ||
507 | 486 | |
508 | 487 | def has_fileno(obj): |
509 | 488 | if not hasattr(obj, "fileno"): |
550 | 529 | # relative uri while the RFC says we should consider it as abs_path |
551 | 530 | # http://www.w3.org/Protocols/rfc2616/rfc2616-sec5.html#sec5.1.2 |
552 | 531 | # We use temporary dot prefix to workaround this behaviour |
553 | parts = _compat.urlsplit("." + uri) | |
532 | parts = urllib.parse.urlsplit("." + uri) | |
554 | 533 | return parts._replace(path=parts.path[1:]) |
555 | 534 | |
556 | return _compat.urlsplit(uri) | |
535 | return urllib.parse.urlsplit(uri) | |
536 | ||
537 | ||
538 | # From six.reraise | |
539 | def reraise(tp, value, tb=None): | |
540 | try: | |
541 | if value is None: | |
542 | value = tp() | |
543 | if value.__traceback__ is not tb: | |
544 | raise value.with_traceback(tb) | |
545 | raise value | |
546 | finally: | |
547 | value = None | |
548 | tb = None | |
549 | ||
550 | ||
551 | def bytes_to_str(b): | |
552 | if isinstance(b, str): | |
553 | return b | |
554 | return str(b, 'latin1') | |
555 | ||
556 | ||
557 | def unquote_to_wsgi_str(string): | |
558 | return urllib.parse.unquote_to_bytes(string).decode('latin-1') |
1 | 1 | # |
2 | 2 | # This file is part of gunicorn released under the MIT license. |
3 | 3 | # See the NOTICE for more information. |
4 | ||
5 | import sys | |
6 | 4 | |
7 | 5 | # supported gunicorn workers. |
8 | 6 | SUPPORTED_WORKERS = { |
14 | 12 | "tornado": "gunicorn.workers.gtornado.TornadoWorker", |
15 | 13 | "gthread": "gunicorn.workers.gthread.ThreadWorker", |
16 | 14 | } |
17 | ||
18 | ||
19 | if sys.version_info >= (3, 4): | |
20 | # gaiohttp worker can be used with Python 3.4+ only. | |
21 | SUPPORTED_WORKERS["gaiohttp"] = "gunicorn.workers.gaiohttp.AiohttpWorker" |
0 | # -*- coding: utf-8 - | |
1 | # | |
2 | # This file is part of gunicorn released under the MIT license. | |
3 | # See the NOTICE for more information. | |
4 | ||
5 | import asyncio | |
6 | import datetime | |
7 | import functools | |
8 | import logging | |
9 | import os | |
10 | ||
11 | try: | |
12 | import ssl | |
13 | except ImportError: | |
14 | ssl = None | |
15 | ||
16 | import gunicorn.workers.base as base | |
17 | ||
18 | from aiohttp.wsgi import WSGIServerHttpProtocol as OldWSGIServerHttpProtocol | |
19 | ||
20 | ||
21 | class WSGIServerHttpProtocol(OldWSGIServerHttpProtocol): | |
22 | def log_access(self, request, environ, response, time): | |
23 | self.logger.access(response, request, environ, datetime.timedelta(0, 0, time)) | |
24 | ||
25 | ||
26 | class AiohttpWorker(base.Worker): | |
27 | ||
28 | def __init__(self, *args, **kw): # pragma: no cover | |
29 | super().__init__(*args, **kw) | |
30 | cfg = self.cfg | |
31 | if cfg.is_ssl: | |
32 | self.ssl_context = self._create_ssl_context(cfg) | |
33 | else: | |
34 | self.ssl_context = None | |
35 | self.servers = [] | |
36 | self.connections = {} | |
37 | ||
38 | def init_process(self): | |
39 | # create new event_loop after fork | |
40 | asyncio.get_event_loop().close() | |
41 | ||
42 | self.loop = asyncio.new_event_loop() | |
43 | asyncio.set_event_loop(self.loop) | |
44 | ||
45 | super().init_process() | |
46 | ||
47 | def run(self): | |
48 | self._runner = asyncio.ensure_future(self._run(), loop=self.loop) | |
49 | ||
50 | try: | |
51 | self.loop.run_until_complete(self._runner) | |
52 | finally: | |
53 | self.loop.close() | |
54 | ||
55 | def wrap_protocol(self, proto): | |
56 | proto.connection_made = _wrp( | |
57 | proto, proto.connection_made, self.connections) | |
58 | proto.connection_lost = _wrp( | |
59 | proto, proto.connection_lost, self.connections, False) | |
60 | return proto | |
61 | ||
62 | def factory(self, wsgi, addr): | |
63 | # are we in debug level | |
64 | is_debug = self.log.loglevel == logging.DEBUG | |
65 | ||
66 | proto = WSGIServerHttpProtocol( | |
67 | wsgi, readpayload=True, | |
68 | loop=self.loop, | |
69 | log=self.log, | |
70 | debug=is_debug, | |
71 | keep_alive=self.cfg.keepalive, | |
72 | access_log=self.log.access_log, | |
73 | access_log_format=self.cfg.access_log_format) | |
74 | return self.wrap_protocol(proto) | |
75 | ||
76 | def get_factory(self, sock, addr): | |
77 | return functools.partial(self.factory, self.wsgi, addr) | |
78 | ||
79 | @asyncio.coroutine | |
80 | def close(self): | |
81 | try: | |
82 | if hasattr(self.wsgi, 'close'): | |
83 | yield from self.wsgi.close() | |
84 | except: | |
85 | self.log.exception('Process shutdown exception') | |
86 | ||
87 | @asyncio.coroutine | |
88 | def _run(self): | |
89 | for sock in self.sockets: | |
90 | factory = self.get_factory(sock.sock, sock.cfg_addr) | |
91 | self.servers.append( | |
92 | (yield from self._create_server(factory, sock))) | |
93 | ||
94 | # If our parent changed then we shut down. | |
95 | pid = os.getpid() | |
96 | try: | |
97 | while self.alive or self.connections: | |
98 | self.notify() | |
99 | ||
100 | if (self.alive and | |
101 | pid == os.getpid() and self.ppid != os.getppid()): | |
102 | self.log.info("Parent changed, shutting down: %s", self) | |
103 | self.alive = False | |
104 | ||
105 | # stop accepting requests | |
106 | if not self.alive: | |
107 | if self.servers: | |
108 | self.log.info( | |
109 | "Stopping server: %s, connections: %s", | |
110 | pid, len(self.connections)) | |
111 | for server in self.servers: | |
112 | server.close() | |
113 | self.servers.clear() | |
114 | ||
115 | # prepare connections for closing | |
116 | for conn in self.connections.values(): | |
117 | if hasattr(conn, 'closing'): | |
118 | conn.closing() | |
119 | ||
120 | yield from asyncio.sleep(1.0, loop=self.loop) | |
121 | except KeyboardInterrupt: | |
122 | pass | |
123 | ||
124 | if self.servers: | |
125 | for server in self.servers: | |
126 | server.close() | |
127 | ||
128 | yield from self.close() | |
129 | ||
130 | @asyncio.coroutine | |
131 | def _create_server(self, factory, sock): | |
132 | return self.loop.create_server(factory, sock=sock.sock, | |
133 | ssl=self.ssl_context) | |
134 | ||
135 | @staticmethod | |
136 | def _create_ssl_context(cfg): | |
137 | """ Creates SSLContext instance for usage in asyncio.create_server. | |
138 | ||
139 | See ssl.SSLSocket.__init__ for more details. | |
140 | """ | |
141 | ctx = ssl.SSLContext(cfg.ssl_version) | |
142 | ctx.load_cert_chain(cfg.certfile, cfg.keyfile) | |
143 | ctx.verify_mode = cfg.cert_reqs | |
144 | if cfg.ca_certs: | |
145 | ctx.load_verify_locations(cfg.ca_certs) | |
146 | if cfg.ciphers: | |
147 | ctx.set_ciphers(cfg.ciphers) | |
148 | return ctx | |
149 | ||
150 | ||
151 | class _wrp: | |
152 | ||
153 | def __init__(self, proto, meth, tracking, add=True): | |
154 | self._proto = proto | |
155 | self._id = id(proto) | |
156 | self._meth = meth | |
157 | self._tracking = tracking | |
158 | self._add = add | |
159 | ||
160 | def __call__(self, *args): | |
161 | if self._add: | |
162 | self._tracking[self._id] = self._proto | |
163 | elif self._id in self._tracking: | |
164 | del self._tracking[self._id] | |
165 | ||
166 | conn = self._meth(*args) | |
167 | return conn |
2 | 2 | # This file is part of gunicorn released under the MIT license. |
3 | 3 | # See the NOTICE for more information. |
4 | 4 | |
5 | from datetime import datetime | |
5 | import io | |
6 | 6 | import os |
7 | from random import randint | |
8 | 7 | import signal |
9 | from ssl import SSLError | |
10 | 8 | import sys |
11 | 9 | import time |
12 | 10 | import traceback |
13 | ||
14 | from gunicorn import six | |
11 | from datetime import datetime | |
12 | from random import randint | |
13 | from ssl import SSLError | |
14 | ||
15 | 15 | from gunicorn import util |
16 | from gunicorn.http.errors import ( | |
17 | ForbiddenProxyRequest, InvalidHeader, | |
18 | InvalidHeaderName, InvalidHTTPVersion, | |
19 | InvalidProxyLine, InvalidRequestLine, | |
20 | InvalidRequestMethod, InvalidSchemeHeaders, | |
21 | LimitRequestHeaders, LimitRequestLine, | |
22 | ) | |
23 | from gunicorn.http.wsgi import Response, default_environ | |
24 | from gunicorn.reloader import reloader_engines | |
16 | 25 | from gunicorn.workers.workertmp import WorkerTmp |
17 | from gunicorn.reloader import reloader_engines | |
18 | from gunicorn.http.errors import ( | |
19 | InvalidHeader, InvalidHeaderName, InvalidRequestLine, InvalidRequestMethod, | |
20 | InvalidHTTPVersion, LimitRequestLine, LimitRequestHeaders, | |
21 | ) | |
22 | from gunicorn.http.errors import InvalidProxyLine, ForbiddenProxyRequest | |
23 | from gunicorn.http.errors import InvalidSchemeHeaders | |
24 | from gunicorn.http.wsgi import default_environ, Response | |
25 | from gunicorn.six import MAXSIZE | |
26 | 26 | |
27 | 27 | |
28 | 28 | class Worker(object): |
50 | 50 | self.reloader = None |
51 | 51 | |
52 | 52 | self.nr = 0 |
53 | jitter = randint(0, cfg.max_requests_jitter) | |
54 | self.max_requests = cfg.max_requests + jitter or MAXSIZE | |
53 | ||
54 | if cfg.max_requests > 0: | |
55 | jitter = randint(0, cfg.max_requests_jitter) | |
56 | self.max_requests = cfg.max_requests + jitter | |
57 | else: | |
58 | self.max_requests = sys.maxsize | |
59 | ||
55 | 60 | self.alive = True |
56 | 61 | self.log = log |
57 | 62 | self.tmp = WorkerTmp(cfg) |
79 | 84 | """\ |
80 | 85 | If you override this method in a subclass, the last statement |
81 | 86 | in the function should be to call this method with |
82 | super(MyWorkerClass, self).init_process() so that the ``run()`` | |
83 | loop is initiated. | |
87 | super().init_process() so that the ``run()`` loop is initiated. | |
84 | 88 | """ |
85 | 89 | |
86 | 90 | # set environment' variables |
149 | 153 | _, exc_val, exc_tb = sys.exc_info() |
150 | 154 | self.reloader.add_extra_file(exc_val.filename) |
151 | 155 | |
152 | tb_string = six.StringIO() | |
156 | tb_string = io.StringIO() | |
153 | 157 | traceback.print_tb(exc_tb, file=tb_string) |
154 | 158 | self.wsgi = util.make_fail_app(tb_string.getvalue()) |
155 | 159 | finally: |
169 | 173 | |
170 | 174 | # Don't let SIGTERM and SIGUSR1 disturb active requests |
171 | 175 | # by interrupting system calls |
172 | if hasattr(signal, 'siginterrupt'): # python >= 2.6 | |
173 | signal.siginterrupt(signal.SIGTERM, False) | |
174 | signal.siginterrupt(signal.SIGUSR1, False) | |
176 | signal.siginterrupt(signal.SIGTERM, False) | |
177 | signal.siginterrupt(signal.SIGUSR1, False) | |
175 | 178 | |
176 | 179 | if hasattr(signal, 'set_wakeup_fd'): |
177 | 180 | signal.set_wakeup_fd(self.PIPE[1]) |
12 | 12 | import gunicorn.http.wsgi as wsgi |
13 | 13 | import gunicorn.util as util |
14 | 14 | import gunicorn.workers.base as base |
15 | from gunicorn import six | |
16 | 15 | |
17 | 16 | ALREADY_HANDLED = object() |
18 | 17 | |
20 | 19 | class AsyncWorker(base.Worker): |
21 | 20 | |
22 | 21 | def __init__(self, *args, **kwargs): |
23 | super(AsyncWorker, self).__init__(*args, **kwargs) | |
22 | super().__init__(*args, **kwargs) | |
24 | 23 | self.worker_connections = self.cfg.worker_connections |
25 | 24 | |
26 | 25 | def timeout_ctx(self): |
37 | 36 | try: |
38 | 37 | listener_name = listener.getsockname() |
39 | 38 | if not self.cfg.keepalive: |
40 | req = six.next(parser) | |
39 | req = next(parser) | |
41 | 40 | self.handle_request(listener_name, req, client, addr) |
42 | 41 | else: |
43 | 42 | # keepalive loop |
45 | 44 | while True: |
46 | 45 | req = None |
47 | 46 | with self.timeout_ctx(): |
48 | req = six.next(parser) | |
47 | req = next(parser) | |
49 | 48 | if not req: |
50 | 49 | break |
51 | 50 | if req.proxy_protocol_info: |
59 | 58 | self.log.debug("Closing connection. %s", e) |
60 | 59 | except ssl.SSLError: |
61 | 60 | # pass to next try-except level |
62 | six.reraise(*sys.exc_info()) | |
61 | util.reraise(*sys.exc_info()) | |
63 | 62 | except EnvironmentError: |
64 | 63 | # pass to next try-except level |
65 | six.reraise(*sys.exc_info()) | |
64 | util.reraise(*sys.exc_info()) | |
66 | 65 | except Exception as e: |
67 | 66 | self.handle_error(req, client, addr, e) |
68 | 67 | except ssl.SSLError as e: |
125 | 124 | except EnvironmentError: |
126 | 125 | # If the original exception was a socket.error we delegate |
127 | 126 | # handling it to the caller (where handle() might ignore it) |
128 | six.reraise(*sys.exc_info()) | |
127 | util.reraise(*sys.exc_info()) | |
129 | 128 | except Exception: |
130 | 129 | if resp and resp.headers_sent: |
131 | 130 | # If the requests have already been sent, we should close the |
0 | # -*- coding: utf-8 - | |
1 | # | |
2 | # This file is part of gunicorn released under the MIT license. | |
3 | # See the NOTICE for more information. | |
4 | ||
5 | import sys | |
6 | ||
7 | from gunicorn import util | |
8 | ||
9 | if sys.version_info >= (3, 4): | |
10 | try: | |
11 | import aiohttp # pylint: disable=unused-import | |
12 | except ImportError: | |
13 | raise RuntimeError("You need aiohttp installed to use this worker.") | |
14 | else: | |
15 | try: | |
16 | from aiohttp.worker import GunicornWebWorker as AiohttpWorker | |
17 | except ImportError: | |
18 | from gunicorn.workers._gaiohttp import AiohttpWorker | |
19 | ||
20 | util.warn( | |
21 | "The 'gaiohttp' worker is deprecated. See --worker-class " | |
22 | "documentation for more information." | |
23 | ) | |
24 | __all__ = ['AiohttpWorker'] | |
25 | else: | |
26 | raise RuntimeError("You need Python >= 3.4 to use the gaiohttp worker") |
4 | 4 | |
5 | 5 | from functools import partial |
6 | 6 | import errno |
7 | import os | |
7 | 8 | import sys |
8 | 9 | |
9 | 10 | try: |
10 | 11 | import eventlet |
11 | 12 | except ImportError: |
12 | raise RuntimeError("You need eventlet installed to use this worker.") | |
13 | ||
14 | # validate the eventlet version | |
15 | if eventlet.version_info < (0, 9, 7): | |
16 | raise RuntimeError("You need eventlet >= 0.9.7") | |
17 | ||
13 | raise RuntimeError("eventlet worker requires eventlet 0.24.1 or higher") | |
14 | else: | |
15 | from pkg_resources import parse_version | |
16 | if parse_version(eventlet.__version__) < parse_version('0.24.1'): | |
17 | raise RuntimeError("eventlet worker requires eventlet 0.24.1 or higher") | |
18 | 18 | |
19 | 19 | from eventlet import hubs, greenthread |
20 | 20 | from eventlet.greenio import GreenSocket |
22 | 22 | from eventlet.wsgi import ALREADY_HANDLED as EVENTLET_ALREADY_HANDLED |
23 | 23 | import greenlet |
24 | 24 | |
25 | from gunicorn.http.wsgi import sendfile as o_sendfile | |
26 | 25 | from gunicorn.workers.base_async import AsyncWorker |
26 | ||
27 | 27 | |
28 | 28 | def _eventlet_sendfile(fdout, fdin, offset, nbytes): |
29 | 29 | while True: |
30 | 30 | try: |
31 | return o_sendfile(fdout, fdin, offset, nbytes) | |
31 | return os.sendfile(fdout, fdin, offset, nbytes) | |
32 | 32 | except OSError as e: |
33 | 33 | if e.args[0] == errno.EAGAIN: |
34 | 34 | trampoline(fdout, write=True) |
78 | 78 | |
79 | 79 | |
80 | 80 | def patch_sendfile(): |
81 | from gunicorn.http import wsgi | |
82 | ||
83 | if o_sendfile is not None: | |
84 | setattr(wsgi, "sendfile", _eventlet_sendfile) | |
81 | setattr(os, "sendfile", _eventlet_sendfile) | |
85 | 82 | |
86 | 83 | |
87 | 84 | class EventletWorker(AsyncWorker): |
88 | 85 | |
89 | 86 | def patch(self): |
90 | 87 | hubs.use_hub() |
91 | eventlet.monkey_patch(os=False) | |
88 | eventlet.monkey_patch() | |
92 | 89 | patch_sendfile() |
93 | 90 | |
94 | 91 | def is_already_handled(self, respiter): |
95 | 92 | if respiter == EVENTLET_ALREADY_HANDLED: |
96 | 93 | raise StopIteration() |
97 | else: | |
98 | return super(EventletWorker, self).is_already_handled(respiter) | |
94 | return super().is_already_handled(respiter) | |
99 | 95 | |
100 | 96 | def init_process(self): |
101 | super(EventletWorker, self).init_process() | |
102 | 97 | self.patch() |
98 | super().init_process() | |
103 | 99 | |
104 | 100 | def handle_quit(self, sig, frame): |
105 | eventlet.spawn(super(EventletWorker, self).handle_quit, sig, frame) | |
101 | eventlet.spawn(super().handle_quit, sig, frame) | |
106 | 102 | |
107 | 103 | def handle_usr1(self, sig, frame): |
108 | eventlet.spawn(super(EventletWorker, self).handle_usr1, sig, frame) | |
104 | eventlet.spawn(super().handle_usr1, sig, frame) | |
109 | 105 | |
110 | 106 | def timeout_ctx(self): |
111 | 107 | return eventlet.Timeout(self.cfg.keepalive or None, False) |
115 | 111 | client = eventlet.wrap_ssl(client, server_side=True, |
116 | 112 | **self.cfg.ssl_options) |
117 | 113 | |
118 | super(EventletWorker, self).handle(listener, client, addr) | |
114 | super().handle(listener, client, addr) | |
119 | 115 | |
120 | 116 | def run(self): |
121 | 117 | acceptors = [] |
9 | 9 | from functools import partial |
10 | 10 | import time |
11 | 11 | |
12 | _socket = __import__("socket") | |
13 | ||
14 | # workaround on osx, disable kqueue | |
15 | if sys.platform == "darwin": | |
16 | os.environ['EVENT_NOKQUEUE'] = "1" | |
17 | ||
18 | 12 | try: |
19 | 13 | import gevent |
20 | 14 | except ImportError: |
21 | raise RuntimeError("You need gevent installed to use this worker.") | |
15 | raise RuntimeError("gevent worker requires gevent 1.4 or higher") | |
16 | else: | |
17 | from pkg_resources import parse_version | |
18 | if parse_version(gevent.__version__) < parse_version('1.4'): | |
19 | raise RuntimeError("gevent worker requires gevent 1.4 or higher") | |
20 | ||
22 | 21 | from gevent.pool import Pool |
23 | 22 | from gevent.server import StreamServer |
24 | from gevent.socket import wait_write, socket | |
25 | from gevent import pywsgi | |
23 | from gevent import hub, monkey, socket, pywsgi | |
26 | 24 | |
27 | 25 | import gunicorn |
28 | 26 | from gunicorn.http.wsgi import base_environ |
29 | 27 | from gunicorn.workers.base_async import AsyncWorker |
30 | from gunicorn.http.wsgi import sendfile as o_sendfile | |
31 | 28 | |
32 | 29 | VERSION = "gevent/%s gunicorn/%s" % (gevent.__version__, gunicorn.__version__) |
30 | ||
33 | 31 | |
34 | 32 | def _gevent_sendfile(fdout, fdin, offset, nbytes): |
35 | 33 | while True: |
36 | 34 | try: |
37 | return o_sendfile(fdout, fdin, offset, nbytes) | |
35 | return os.sendfile(fdout, fdin, offset, nbytes) | |
38 | 36 | except OSError as e: |
39 | 37 | if e.args[0] == errno.EAGAIN: |
40 | wait_write(fdout) | |
38 | socket.wait_write(fdout) | |
41 | 39 | else: |
42 | 40 | raise |
43 | 41 | |
44 | 42 | def patch_sendfile(): |
45 | from gunicorn.http import wsgi | |
46 | ||
47 | if o_sendfile is not None: | |
48 | setattr(wsgi, "sendfile", _gevent_sendfile) | |
43 | setattr(os, "sendfile", _gevent_sendfile) | |
49 | 44 | |
50 | 45 | |
51 | 46 | class GeventWorker(AsyncWorker): |
54 | 49 | wsgi_handler = None |
55 | 50 | |
56 | 51 | def patch(self): |
57 | from gevent import monkey | |
58 | monkey.noisy = False | |
59 | ||
60 | # if the new version is used make sure to patch subprocess | |
61 | if gevent.version_info[0] == 0: | |
62 | monkey.patch_all() | |
63 | else: | |
64 | monkey.patch_all(subprocess=True) | |
52 | monkey.patch_all() | |
65 | 53 | |
66 | 54 | # monkey patch sendfile to make it none blocking |
67 | 55 | patch_sendfile() |
69 | 57 | # patch sockets |
70 | 58 | sockets = [] |
71 | 59 | for s in self.sockets: |
72 | if sys.version_info[0] == 3: | |
73 | sockets.append(socket(s.FAMILY, _socket.SOCK_STREAM, | |
74 | fileno=s.sock.fileno())) | |
75 | else: | |
76 | sockets.append(socket(s.FAMILY, _socket.SOCK_STREAM, | |
77 | _sock=s)) | |
60 | sockets.append(socket.socket(s.FAMILY, socket.SOCK_STREAM, | |
61 | fileno=s.sock.fileno())) | |
78 | 62 | self.sockets = sockets |
79 | 63 | |
80 | 64 | def notify(self): |
81 | super(GeventWorker, self).notify() | |
65 | super().notify() | |
82 | 66 | if self.ppid != os.getppid(): |
83 | 67 | self.log.info("Parent changed, shutting down: %s", self) |
84 | 68 | sys.exit(0) |
151 | 135 | # Connected socket timeout defaults to socket.getdefaulttimeout(). |
152 | 136 | # This forces to blocking mode. |
153 | 137 | client.setblocking(1) |
154 | super(GeventWorker, self).handle(listener, client, addr) | |
138 | super().handle(listener, client, addr) | |
155 | 139 | |
156 | 140 | def handle_request(self, listener_name, req, sock, addr): |
157 | 141 | try: |
158 | super(GeventWorker, self).handle_request(listener_name, req, sock, | |
159 | addr) | |
142 | super().handle_request(listener_name, req, sock, addr) | |
160 | 143 | except gevent.GreenletExit: |
161 | 144 | pass |
162 | 145 | except SystemExit: |
165 | 148 | def handle_quit(self, sig, frame): |
166 | 149 | # Move this out of the signal handler so we can use |
167 | 150 | # blocking calls. See #1126 |
168 | gevent.spawn(super(GeventWorker, self).handle_quit, sig, frame) | |
151 | gevent.spawn(super().handle_quit, sig, frame) | |
169 | 152 | |
170 | 153 | def handle_usr1(self, sig, frame): |
171 | 154 | # Make the gevent workers handle the usr1 signal |
172 | 155 | # by deferring to a new greenlet. See #1645 |
173 | gevent.spawn(super(GeventWorker, self).handle_usr1, sig, frame) | |
174 | ||
175 | if gevent.version_info[0] == 0: | |
176 | ||
177 | def init_process(self): | |
178 | # monkey patch here | |
179 | self.patch() | |
180 | ||
181 | # reinit the hub | |
182 | import gevent.core | |
183 | gevent.core.reinit() | |
184 | ||
185 | #gevent 0.13 and older doesn't reinitialize dns for us after forking | |
186 | #here's the workaround | |
187 | gevent.core.dns_shutdown(fail_requests=1) | |
188 | gevent.core.dns_init() | |
189 | super(GeventWorker, self).init_process() | |
190 | ||
191 | else: | |
192 | ||
193 | def init_process(self): | |
194 | # monkey patch here | |
195 | self.patch() | |
196 | ||
197 | # reinit the hub | |
198 | from gevent import hub | |
199 | hub.reinit() | |
200 | ||
201 | # then initialize the process | |
202 | super(GeventWorker, self).init_process() | |
156 | gevent.spawn(super().handle_usr1, sig, frame) | |
157 | ||
158 | def init_process(self): | |
159 | self.patch() | |
160 | hub.reinit() | |
161 | super().init_process() | |
203 | 162 | |
204 | 163 | |
205 | 164 | class GeventResponse(object): |
229 | 188 | self.server.log.access(resp, req_headers, self.environ, response_time) |
230 | 189 | |
231 | 190 | def get_environ(self): |
232 | env = super(PyWSGIHandler, self).get_environ() | |
191 | env = super().get_environ() | |
233 | 192 | env['gunicorn.sock'] = self.socket |
234 | 193 | env['RAW_URI'] = self.path |
235 | 194 | return env |
3 | 3 | # See the NOTICE for more information. |
4 | 4 | |
5 | 5 | # design: |
6 | # a threaded worker accepts connections in the main loop, accepted | |
7 | # connections are are added to the thread pool as a connection job. On | |
8 | # keepalive connections are put back in the loop waiting for an event. | |
9 | # If no event happen after the keep alive timeout, the connectoin is | |
6 | # A threaded worker accepts connections in the main loop, accepted | |
7 | # connections are added to the thread pool as a connection job. | |
8 | # Keepalive connections are put back in the loop waiting for an event. | |
9 | # If no event happen after the keep alive timeout, the connection is | |
10 | 10 | # closed. |
11 | ||
12 | from collections import deque | |
13 | from datetime import datetime | |
11 | # pylint: disable=no-else-break | |
12 | ||
13 | import concurrent.futures as futures | |
14 | 14 | import errno |
15 | from functools import partial | |
16 | 15 | import os |
16 | import selectors | |
17 | 17 | import socket |
18 | 18 | import ssl |
19 | 19 | import sys |
20 | import time | |
21 | from collections import deque | |
22 | from datetime import datetime | |
23 | from functools import partial | |
20 | 24 | from threading import RLock |
21 | import time | |
22 | ||
25 | ||
26 | from . import base | |
23 | 27 | from .. import http |
28 | from .. import util | |
24 | 29 | from ..http import wsgi |
25 | from .. import util | |
26 | from . import base | |
27 | from .. import six | |
28 | ||
29 | ||
30 | try: | |
31 | import concurrent.futures as futures | |
32 | except ImportError: | |
33 | raise RuntimeError(""" | |
34 | You need to install the 'futures' package to use this worker with this | |
35 | Python version. | |
36 | """) | |
37 | ||
38 | try: | |
39 | from asyncio import selectors | |
40 | except ImportError: | |
41 | from gunicorn import selectors | |
42 | 30 | |
43 | 31 | |
44 | 32 | class TConn(object): |
77 | 65 | class ThreadWorker(base.Worker): |
78 | 66 | |
79 | 67 | def __init__(self, *args, **kwargs): |
80 | super(ThreadWorker, self).__init__(*args, **kwargs) | |
68 | super().__init__(*args, **kwargs) | |
81 | 69 | self.worker_connections = self.cfg.worker_connections |
82 | 70 | self.max_keepalived = self.cfg.worker_connections - self.cfg.threads |
83 | 71 | # initialise the pool |
97 | 85 | "Check the number of worker connections and threads.") |
98 | 86 | |
99 | 87 | def init_process(self): |
100 | self.tpool = futures.ThreadPoolExecutor(max_workers=self.cfg.threads) | |
88 | self.tpool = self.get_thread_pool() | |
101 | 89 | self.poller = selectors.DefaultSelector() |
102 | 90 | self._lock = RLock() |
103 | super(ThreadWorker, self).init_process() | |
91 | super().init_process() | |
92 | ||
93 | def get_thread_pool(self): | |
94 | """Override this method to customize how the thread pool is created""" | |
95 | return futures.ThreadPoolExecutor(max_workers=self.cfg.threads) | |
104 | 96 | |
105 | 97 | def handle_quit(self, sig, frame): |
106 | 98 | self.alive = False |
224 | 216 | if not self.is_parent_alive(): |
225 | 217 | break |
226 | 218 | |
227 | # hanle keepalive timeouts | |
219 | # handle keepalive timeouts | |
228 | 220 | self.murder_keepalived() |
229 | 221 | |
230 | 222 | self.tpool.shutdown(False) |
270 | 262 | keepalive = False |
271 | 263 | req = None |
272 | 264 | try: |
273 | req = six.next(conn.parser) | |
265 | req = next(conn.parser) | |
274 | 266 | if not req: |
275 | 267 | return (False, conn) |
276 | 268 | |
344 | 336 | return False |
345 | 337 | except EnvironmentError: |
346 | 338 | # pass to next try-except level |
347 | six.reraise(*sys.exc_info()) | |
339 | util.reraise(*sys.exc_info()) | |
348 | 340 | except Exception: |
349 | 341 | if resp and resp.headers_sent: |
350 | 342 | # If the requests have already been sent, we should close the |
18 | 18 | from gunicorn import __version__ as gversion |
19 | 19 | |
20 | 20 | |
21 | # `io_loop` arguments to many Tornado functions have been removed in Tornado 5.0 | |
22 | # <http://www.tornadoweb.org/en/stable/releases/v5.0.0.html#backwards-compatibility-notes> | |
23 | IOLOOP_PARAMETER_REMOVED = tornado.version_info >= (5, 0, 0) | |
21 | # Tornado 5.0 updated its IOLoop, and the `io_loop` arguments to many | |
22 | # Tornado functions have been removed in Tornado 5.0. Also, they no | |
23 | # longer store PeriodCallbacks in ioloop._callbacks. Instead we store | |
24 | # them on our side, and use stop() on them when stopping the worker. | |
25 | # See https://www.tornadoweb.org/en/stable/releases/v5.0.0.html#backwards-compatibility-notes | |
26 | # for more details. | |
27 | TORNADO5 = tornado.version_info >= (5, 0, 0) | |
24 | 28 | |
25 | 29 | |
26 | 30 | class TornadoWorker(Worker): |
39 | 43 | |
40 | 44 | def handle_exit(self, sig, frame): |
41 | 45 | if self.alive: |
42 | super(TornadoWorker, self).handle_exit(sig, frame) | |
46 | super().handle_exit(sig, frame) | |
43 | 47 | |
44 | 48 | def handle_request(self): |
45 | 49 | self.nr += 1 |
65 | 69 | pass |
66 | 70 | self.server_alive = False |
67 | 71 | else: |
68 | if not self.ioloop._callbacks: | |
72 | if TORNADO5: | |
73 | for callback in self.callbacks: | |
74 | callback.stop() | |
69 | 75 | self.ioloop.stop() |
76 | else: | |
77 | if not self.ioloop._callbacks: | |
78 | self.ioloop.stop() | |
79 | ||
80 | def init_process(self): | |
81 | # IOLoop cannot survive a fork or be shared across processes | |
82 | # in any way. When multiple processes are being used, each process | |
83 | # should create its own IOLoop. We should clear current IOLoop | |
84 | # if exists before os.fork. | |
85 | IOLoop.clear_current() | |
86 | super().init_process() | |
70 | 87 | |
71 | 88 | def run(self): |
72 | 89 | self.ioloop = IOLoop.instance() |
73 | 90 | self.alive = True |
74 | 91 | self.server_alive = False |
75 | if IOLOOP_PARAMETER_REMOVED: | |
76 | PeriodicCallback(self.watchdog, 1000).start() | |
77 | PeriodicCallback(self.heartbeat, 1000).start() | |
92 | ||
93 | if TORNADO5: | |
94 | self.callbacks = [] | |
95 | self.callbacks.append(PeriodicCallback(self.watchdog, 1000)) | |
96 | self.callbacks.append(PeriodicCallback(self.heartbeat, 1000)) | |
97 | for callback in self.callbacks: | |
98 | callback.start() | |
78 | 99 | else: |
79 | 100 | PeriodicCallback(self.watchdog, 1000, io_loop=self.ioloop).start() |
80 | 101 | PeriodicCallback(self.heartbeat, 1000, io_loop=self.ioloop).start() |
83 | 104 | # instance of tornado.web.Application or is an |
84 | 105 | # instance of tornado.wsgi.WSGIApplication |
85 | 106 | app = self.wsgi |
86 | if not isinstance(app, tornado.web.Application) or \ | |
87 | isinstance(app, tornado.wsgi.WSGIApplication): | |
88 | app = WSGIContainer(app) | |
107 | ||
108 | if tornado.version_info[0] < 6: | |
109 | if not isinstance(app, tornado.web.Application) or \ | |
110 | isinstance(app, tornado.wsgi.WSGIApplication): | |
111 | app = WSGIContainer(app) | |
89 | 112 | |
90 | 113 | # Monkey-patching HTTPConnection.finish to count the |
91 | 114 | # number of requests being handled by Tornado. This |
118 | 141 | # options |
119 | 142 | del _ssl_opt["do_handshake_on_connect"] |
120 | 143 | del _ssl_opt["suppress_ragged_eofs"] |
121 | if IOLOOP_PARAMETER_REMOVED: | |
144 | if TORNADO5: | |
122 | 145 | server = server_class(app, ssl_options=_ssl_opt) |
123 | 146 | else: |
124 | 147 | server = server_class(app, io_loop=self.ioloop, |
125 | 148 | ssl_options=_ssl_opt) |
126 | 149 | else: |
127 | if IOLOOP_PARAMETER_REMOVED: | |
150 | if TORNADO5: | |
128 | 151 | server = server_class(app) |
129 | 152 | else: |
130 | 153 | server = server_class(app, io_loop=self.ioloop) |
15 | 15 | import gunicorn.http.wsgi as wsgi |
16 | 16 | import gunicorn.util as util |
17 | 17 | import gunicorn.workers.base as base |
18 | from gunicorn import six | |
19 | 18 | |
20 | 19 | class StopWaiting(Exception): |
21 | """ exception raised to stop waiting for a connnection """ | |
20 | """ exception raised to stop waiting for a connection """ | |
22 | 21 | |
23 | 22 | class SyncWorker(base.Worker): |
24 | 23 | |
130 | 129 | **self.cfg.ssl_options) |
131 | 130 | |
132 | 131 | parser = http.RequestParser(self.cfg, client) |
133 | req = six.next(parser) | |
132 | req = next(parser) | |
134 | 133 | self.handle_request(listener, req, client, addr) |
135 | 134 | except http.errors.NoMoreData as e: |
136 | 135 | self.log.debug("Ignored premature client disconnection. %s", e) |
187 | 186 | respiter.close() |
188 | 187 | except EnvironmentError: |
189 | 188 | # pass to next try-except level |
190 | six.reraise(*sys.exc_info()) | |
189 | util.reraise(*sys.exc_info()) | |
191 | 190 | except Exception: |
192 | 191 | if resp and resp.headers_sent: |
193 | 192 | # If the requests have already been sent, we should close the |
20 | 20 | if fdir and not os.path.isdir(fdir): |
21 | 21 | raise RuntimeError("%s doesn't exist. Can't create workertmp." % fdir) |
22 | 22 | fd, name = tempfile.mkstemp(prefix="wgunicorn-", dir=fdir) |
23 | os.umask(old_umask) | |
23 | 24 | |
24 | # allows the process to write to the file | |
25 | util.chown(name, cfg.uid, cfg.gid) | |
26 | os.umask(old_umask) | |
25 | # change the owner and group of the file if the worker will run as | |
26 | # a different user or group, so that the worker can modify the file | |
27 | if cfg.uid != os.geteuid() or cfg.gid != os.getegid(): | |
28 | util.chown(name, cfg.uid, cfg.gid) | |
27 | 29 | |
28 | 30 | # unlink the file so we don't leak tempory files |
29 | 31 | try: |
30 | 32 | if not IS_CYGWIN: |
31 | 33 | util.unlink(name) |
32 | self._tmp = os.fdopen(fd, 'w+b', 1) | |
34 | # In Python 3.8, open() emits RuntimeWarning if buffering=1 for binary mode. | |
35 | # Because we never write to this file, pass 0 to switch buffering off. | |
36 | self._tmp = os.fdopen(fd, 'w+b', 0) | |
33 | 37 | except: |
34 | 38 | os.close(fd) |
35 | 39 | raise |
37 | 41 | self.spinner = 0 |
38 | 42 | |
39 | 43 | def notify(self): |
40 | try: | |
41 | self.spinner = (self.spinner + 1) % 2 | |
42 | os.fchmod(self._tmp.fileno(), self.spinner) | |
43 | except AttributeError: | |
44 | # python < 2.6 | |
45 | self._tmp.truncate(0) | |
46 | os.write(self._tmp.fileno(), b"X") | |
44 | self.spinner = (self.spinner + 1) % 2 | |
45 | os.fchmod(self._tmp.fileno(), self.spinner) | |
47 | 46 | |
48 | 47 | def last_update(self): |
49 | 48 | return os.fstat(self._tmp.fileno()).st_ctime |
0 | coverage>=4.0,<4.4 # TODO: https://github.com/benoitc/gunicorn/issues/1548 | |
1 | pytest==3.2.5 # TODO: upgrade to latest version requires drop support to Python 2.6 | |
2 | pytest-cov==2.5.1 | |
0 | aiohttp | |
1 | coverage | |
2 | pytest | |
3 | pytest-cov |
5 | 5 | # ======= |
6 | 6 | # pip install validate_email pyDNS |
7 | 7 | # |
8 | from __future__ import print_function | |
9 | 8 | import sys |
10 | 9 | |
11 | 10 | from validate_email import validate_email |
19 | 19 | 'Operating System :: MacOS :: MacOS X', |
20 | 20 | 'Operating System :: POSIX', |
21 | 21 | 'Programming Language :: Python', |
22 | 'Programming Language :: Python :: 2', | |
23 | 'Programming Language :: Python :: 2.6', | |
24 | 'Programming Language :: Python :: 2.7', | |
25 | 22 | 'Programming Language :: Python :: 3', |
26 | 'Programming Language :: Python :: 3.2', | |
27 | 'Programming Language :: Python :: 3.3', | |
28 | 23 | 'Programming Language :: Python :: 3.4', |
29 | 24 | 'Programming Language :: Python :: 3.5', |
30 | 25 | 'Programming Language :: Python :: 3.6', |
26 | 'Programming Language :: Python :: 3.7', | |
27 | 'Programming Language :: Python :: 3.8', | |
28 | 'Programming Language :: Python :: 3 :: Only', | |
29 | 'Programming Language :: Python :: Implementation :: CPython', | |
30 | 'Programming Language :: Python :: Implementation :: PyPy', | |
31 | 31 | 'Topic :: Internet', |
32 | 32 | 'Topic :: Utilities', |
33 | 33 | 'Topic :: Software Development :: Libraries :: Python Modules', |
44 | 44 | fname = os.path.join(os.path.dirname(__file__), 'requirements_test.txt') |
45 | 45 | with open(fname) as f: |
46 | 46 | tests_require = [l.strip() for l in f.readlines()] |
47 | ||
48 | if sys.version_info[:2] < (3, 3): | |
49 | tests_require.append('mock') | |
50 | if sys.version_info[:2] < (2, 7): | |
51 | tests_require.append('unittest2') | |
52 | 47 | |
53 | 48 | class PyTestCommand(TestCommand): |
54 | 49 | user_options = [ |
72 | 67 | sys.exit(errno) |
73 | 68 | |
74 | 69 | |
75 | extra_require = { | |
70 | install_requires = [ | |
71 | # We depend on functioning pkg_resources.working_set.add_entry() and | |
72 | # pkg_resources.load_entry_point(). These both work as of 3.0 which | |
73 | # is the first version to support Python 3.4 which we require as a | |
74 | # floor. | |
75 | 'setuptools>=3.0', | |
76 | ] | |
77 | ||
78 | extras_require = { | |
76 | 79 | 'gevent': ['gevent>=0.13'], |
77 | 80 | 'eventlet': ['eventlet>=0.9.7'], |
78 | 81 | 'tornado': ['tornado>=0.2'], |
79 | 82 | 'gthread': [], |
83 | 'setproctitle': ['setproctitle'], | |
80 | 84 | } |
81 | if sys.version_info[0] < 3: | |
82 | extra_require['gthread'] = ['futures'] | |
83 | 85 | |
84 | 86 | setup( |
85 | 87 | name='gunicorn', |
92 | 94 | license='MIT', |
93 | 95 | url='http://gunicorn.org', |
94 | 96 | |
95 | python_requires='>=2.6, !=3.0.*, !=3.1.*', | |
97 | python_requires='>=3.4', | |
98 | install_requires=install_requires, | |
96 | 99 | classifiers=CLASSIFIERS, |
97 | 100 | zip_safe=False, |
98 | 101 | packages=find_packages(exclude=['examples', 'tests']), |
104 | 107 | entry_points=""" |
105 | 108 | [console_scripts] |
106 | 109 | gunicorn=gunicorn.app.wsgiapp:run |
107 | gunicorn_paster=gunicorn.app.pasterapp:run | |
108 | 110 | |
109 | 111 | [paste.server_runner] |
110 | main=gunicorn.app.pasterapp:paste_server | |
112 | main=gunicorn.app.pasterapp:serve | |
111 | 113 | """, |
112 | extras_require=extra_require, | |
114 | extras_require=extras_require, | |
113 | 115 | ) |
47 | 47 | wrapper.min_version = min_version |
48 | 48 | return wrapper |
49 | 49 | return decorator |
50 | ||
51 | try: | |
52 | from types import SimpleNamespace # pylint: disable=unused-import | |
53 | except ImportError: | |
54 | class SimpleNamespace(object): | |
55 | def __init__(self, **kwargs): | |
56 | vars(self).update(kwargs) | |
57 | ||
58 | def __repr__(self): | |
59 | keys = sorted(vars(self)) | |
60 | items = ("{}={!r}".format(k, vars(self)[k]) for k in keys) | |
61 | return "{}({})".format(type(self).__name__, ", ".join(items)) | |
62 | ||
63 | def __eq__(self, other): | |
64 | return vars(self) == vars(other) |
3 | 3 | # This file is part of gunicorn released under the MIT license. |
4 | 4 | # See the NOTICE for more information. |
5 | 5 | |
6 | import io | |
6 | 7 | import os |
7 | 8 | import tempfile |
8 | 9 | |
9 | 10 | dirname = os.path.dirname(__file__) |
10 | 11 | |
11 | 12 | from gunicorn.http.parser import RequestParser |
12 | from gunicorn.six import BytesIO | |
13 | 13 | |
14 | 14 | |
15 | 15 | def data_source(fname): |
16 | buf = BytesIO() | |
16 | buf = io.BytesIO() | |
17 | 17 | with open(fname) as handle: |
18 | 18 | for line in handle: |
19 | 19 | line = line.rstrip("\n").replace("\\r\\n", "\r\n") |
3 | 3 | # See the NOTICE for more information. |
4 | 4 | |
5 | 5 | import os |
6 | ||
7 | try: | |
8 | import unittest.mock as mock | |
9 | except ImportError: | |
10 | import mock | |
6 | import unittest.mock as mock | |
11 | 7 | |
12 | 8 | import gunicorn.app.base |
13 | 9 | import gunicorn.arbiter |
10 | from gunicorn.config import ReusePort | |
14 | 11 | |
15 | 12 | |
16 | 13 | class DummyApplication(gunicorn.app.base.BaseApplication): |
59 | 56 | def test_arbiter_stop_does_not_unlink_systemd_listeners(close_sockets): |
60 | 57 | arbiter = gunicorn.arbiter.Arbiter(DummyApplication()) |
61 | 58 | arbiter.systemd = True |
59 | arbiter.stop() | |
60 | close_sockets.assert_called_with([], False) | |
61 | ||
62 | ||
63 | @mock.patch('gunicorn.sock.close_sockets') | |
64 | def test_arbiter_stop_does_not_unlink_when_using_reuse_port(close_sockets): | |
65 | arbiter = gunicorn.arbiter.Arbiter(DummyApplication()) | |
66 | arbiter.cfg.settings['reuse_port'] = ReusePort() | |
67 | arbiter.cfg.settings['reuse_port'].set(True) | |
62 | 68 | arbiter.stop() |
63 | 69 | close_sockets.assert_called_with([], False) |
64 | 70 | |
162 | 168 | 'preloaded' application. |
163 | 169 | """ |
164 | 170 | verify_env_vars() |
165 | return super(PreloadedAppWithEnvSettings, self).wsgi() | |
171 | return super().wsgi() | |
166 | 172 | |
167 | 173 | |
168 | 174 | def verify_env_vars(): |
41 | 41 | |
42 | 42 | class NoConfigApp(Application): |
43 | 43 | def __init__(self): |
44 | super(NoConfigApp, self).__init__("no_usage", prog="gunicorn_test") | |
44 | super().__init__("no_usage", prog="gunicorn_test") | |
45 | 45 | |
46 | 46 | def init(self, parser, opts, args): |
47 | 47 | pass |
385 | 385 | with AltArgs(cmdline): |
386 | 386 | app = NoConfigApp() |
387 | 387 | assert app.cfg.umask == expected |
388 | ||
389 | ||
390 | @pytest.mark.parametrize("options, expected", [ | |
391 | (["--ssl-version", "SSLv23"], 2), | |
392 | (["--ssl-version", "TLSv1"], 3), | |
393 | (["--ssl-version", "2"], 2), | |
394 | (["--ssl-version", "3"], 3), | |
395 | ]) | |
396 | def test_ssl_version_named_constants_python3(options, expected): | |
397 | _test_ssl_version(options, expected) | |
398 | ||
399 | ||
400 | @pytest.mark.skipif(sys.version_info < (3, 6), | |
401 | reason="requires python3.6+") | |
402 | @pytest.mark.parametrize("options, expected", [ | |
403 | (["--ssl-version", "TLS"], 2), | |
404 | (["--ssl-version", "TLSv1_1"], 4), | |
405 | (["--ssl-version", "TLSv1_2"], 5), | |
406 | (["--ssl-version", "TLS_SERVER"], 17), | |
407 | ]) | |
408 | def test_ssl_version_named_constants_python36(options, expected): | |
409 | _test_ssl_version(options, expected) | |
410 | ||
411 | ||
412 | @pytest.mark.parametrize("ssl_version", [ | |
413 | "FOO", | |
414 | "-99", | |
415 | "99991234" | |
416 | ]) | |
417 | def test_ssl_version_bad(ssl_version): | |
418 | c = config.Config() | |
419 | with pytest.raises(ValueError) as exc: | |
420 | c.set("ssl_version", ssl_version) | |
421 | assert 'Valid options' in str(exc.value) | |
422 | assert "TLSv" in str(exc.value) | |
423 | ||
424 | ||
425 | def _test_ssl_version(options, expected): | |
426 | cmdline = ["prog_name"] | |
427 | cmdline.extend(options) | |
428 | with AltArgs(cmdline): | |
429 | app = NoConfigApp() | |
430 | assert app.cfg.ssl_version == expected | |
431 | ||
432 | ||
433 | def test_bind_fd(): | |
434 | with AltArgs(["prog_name", "-b", "fd://42"]): | |
435 | app = NoConfigApp() | |
436 | assert app.cfg.bind == ["fd://42"] |
0 | # -*- coding: utf-8 - | |
1 | # | |
2 | # This file is part of gunicorn released under the MIT license. | |
3 | # See the NOTICE for more information. | |
4 | ||
5 | import unittest | |
6 | import pytest | |
7 | ||
8 | aiohttp = pytest.importorskip("aiohttp") | |
9 | WSGIServerHttpProtocol = pytest.importorskip("aiohttp.wsgi.WSGIServerHttpProtocol") | |
10 | ||
11 | import asyncio | |
12 | from gunicorn.workers import gaiohttp | |
13 | from gunicorn.workers._gaiohttp import _wrp | |
14 | from gunicorn.config import Config | |
15 | from unittest import mock | |
16 | ||
17 | ||
18 | class WorkerTests(unittest.TestCase): | |
19 | ||
20 | def setUp(self): | |
21 | self.loop = asyncio.new_event_loop() | |
22 | asyncio.set_event_loop(None) | |
23 | self.worker = gaiohttp.AiohttpWorker('age', | |
24 | 'ppid', | |
25 | 'sockets', | |
26 | 'app', | |
27 | 'timeout', | |
28 | Config(), | |
29 | 'log') | |
30 | ||
31 | def tearDown(self): | |
32 | self.loop.close() | |
33 | ||
34 | @mock.patch('gunicorn.workers._gaiohttp.asyncio') | |
35 | def test_init_process(self, m_asyncio): | |
36 | try: | |
37 | self.worker.init_process() | |
38 | except TypeError: | |
39 | # to mask incomplete initialization of AiohttWorker instance: | |
40 | # we pass invalid values for ctor args | |
41 | pass | |
42 | ||
43 | self.assertTrue(m_asyncio.get_event_loop.return_value.close.called) | |
44 | self.assertTrue(m_asyncio.new_event_loop.called) | |
45 | self.assertTrue(m_asyncio.set_event_loop.called) | |
46 | ||
47 | @mock.patch('gunicorn.workers._gaiohttp.asyncio') | |
48 | def test_run(self, m_asyncio): | |
49 | self.worker.loop = mock.Mock() | |
50 | self.worker.run() | |
51 | ||
52 | self.assertTrue(m_asyncio.ensure_future.called) | |
53 | self.assertTrue(self.worker.loop.run_until_complete.called) | |
54 | self.assertTrue(self.worker.loop.close.called) | |
55 | ||
56 | def test_factory(self): | |
57 | self.worker.wsgi = mock.Mock() | |
58 | self.worker.loop = mock.Mock() | |
59 | self.worker.log = mock.Mock() | |
60 | self.worker.cfg = Config() | |
61 | ||
62 | f = self.worker.factory( | |
63 | self.worker.wsgi, ('localhost', 8080)) | |
64 | self.assertIsInstance(f, WSGIServerHttpProtocol) | |
65 | ||
66 | @mock.patch('gunicorn.workers._gaiohttp.asyncio') | |
67 | def test__run(self, m_asyncio): | |
68 | self.worker.ppid = 1 | |
69 | self.worker.alive = True | |
70 | self.worker.servers = [] | |
71 | sock = mock.Mock() | |
72 | sock.cfg_addr = ('localhost', 8080) | |
73 | self.worker.sockets = [sock] | |
74 | self.worker.wsgi = mock.Mock() | |
75 | self.worker.log = mock.Mock() | |
76 | self.worker.notify = mock.Mock() | |
77 | loop = self.worker.loop = mock.Mock() | |
78 | loop.create_server.return_value = asyncio.Future(loop=self.loop) | |
79 | loop.create_server.return_value.set_result(sock) | |
80 | ||
81 | self.loop.run_until_complete(self.worker._run()) | |
82 | ||
83 | self.assertTrue(self.worker.log.info.called) | |
84 | self.assertTrue(self.worker.notify.called) | |
85 | ||
86 | @mock.patch('gunicorn.workers._gaiohttp.asyncio') | |
87 | def test__run_unix_socket(self, m_asyncio): | |
88 | self.worker.ppid = 1 | |
89 | self.worker.alive = True | |
90 | self.worker.servers = [] | |
91 | sock = mock.Mock() | |
92 | sock.cfg_addr = '/tmp/gunicorn.sock' | |
93 | self.worker.sockets = [sock] | |
94 | self.worker.wsgi = mock.Mock() | |
95 | self.worker.log = mock.Mock() | |
96 | self.worker.notify = mock.Mock() | |
97 | loop = self.worker.loop = mock.Mock() | |
98 | loop.create_server.return_value = asyncio.Future(loop=self.loop) | |
99 | loop.create_server.return_value.set_result(sock) | |
100 | ||
101 | self.loop.run_until_complete(self.worker._run()) | |
102 | ||
103 | self.assertTrue(self.worker.log.info.called) | |
104 | self.assertTrue(self.worker.notify.called) | |
105 | ||
106 | def test__run_connections(self): | |
107 | conn = mock.Mock() | |
108 | self.worker.ppid = 1 | |
109 | self.worker.alive = False | |
110 | self.worker.servers = [mock.Mock()] | |
111 | self.worker.connections = {1: conn} | |
112 | self.worker.sockets = [] | |
113 | self.worker.wsgi = mock.Mock() | |
114 | self.worker.log = mock.Mock() | |
115 | self.worker.loop = self.loop | |
116 | self.worker.loop.create_server = mock.Mock() | |
117 | self.worker.notify = mock.Mock() | |
118 | ||
119 | def _close_conns(): | |
120 | self.worker.connections = {} | |
121 | ||
122 | self.loop.call_later(0.1, _close_conns) | |
123 | self.loop.run_until_complete(self.worker._run()) | |
124 | ||
125 | self.assertTrue(self.worker.log.info.called) | |
126 | self.assertTrue(self.worker.notify.called) | |
127 | self.assertFalse(self.worker.servers) | |
128 | self.assertTrue(conn.closing.called) | |
129 | ||
130 | @mock.patch('gunicorn.workers._gaiohttp.os') | |
131 | @mock.patch('gunicorn.workers._gaiohttp.asyncio.sleep') | |
132 | def test__run_exc(self, m_sleep, m_os): | |
133 | m_os.getpid.return_value = 1 | |
134 | m_os.getppid.return_value = 1 | |
135 | ||
136 | self.worker.servers = [mock.Mock()] | |
137 | self.worker.ppid = 1 | |
138 | self.worker.alive = True | |
139 | self.worker.sockets = [] | |
140 | self.worker.log = mock.Mock() | |
141 | self.worker.loop = mock.Mock() | |
142 | self.worker.notify = mock.Mock() | |
143 | ||
144 | slp = asyncio.Future(loop=self.loop) | |
145 | slp.set_exception(KeyboardInterrupt) | |
146 | m_sleep.return_value = slp | |
147 | ||
148 | self.loop.run_until_complete(self.worker._run()) | |
149 | self.assertTrue(m_sleep.called) | |
150 | self.assertTrue(self.worker.servers[0].close.called) | |
151 | ||
152 | def test_close_wsgi_app(self): | |
153 | self.worker.ppid = 1 | |
154 | self.worker.alive = False | |
155 | self.worker.servers = [mock.Mock()] | |
156 | self.worker.connections = {} | |
157 | self.worker.sockets = [] | |
158 | self.worker.log = mock.Mock() | |
159 | self.worker.loop = self.loop | |
160 | self.worker.loop.create_server = mock.Mock() | |
161 | self.worker.notify = mock.Mock() | |
162 | ||
163 | self.worker.wsgi = mock.Mock() | |
164 | self.worker.wsgi.close.return_value = asyncio.Future(loop=self.loop) | |
165 | self.worker.wsgi.close.return_value.set_result(1) | |
166 | ||
167 | self.loop.run_until_complete(self.worker._run()) | |
168 | self.assertTrue(self.worker.wsgi.close.called) | |
169 | ||
170 | self.worker.wsgi = mock.Mock() | |
171 | self.worker.wsgi.close.return_value = asyncio.Future(loop=self.loop) | |
172 | self.worker.wsgi.close.return_value.set_exception(ValueError()) | |
173 | ||
174 | self.loop.run_until_complete(self.worker._run()) | |
175 | self.assertTrue(self.worker.wsgi.close.called) | |
176 | ||
177 | def test_wrp(self): | |
178 | conn = object() | |
179 | tracking = {} | |
180 | meth = mock.Mock() | |
181 | wrp = _wrp(conn, meth, tracking) | |
182 | wrp() | |
183 | ||
184 | self.assertIn(id(conn), tracking) | |
185 | self.assertTrue(meth.called) | |
186 | ||
187 | meth = mock.Mock() | |
188 | wrp = _wrp(conn, meth, tracking, False) | |
189 | wrp() | |
190 | ||
191 | self.assertNotIn(1, tracking) | |
192 | self.assertTrue(meth.called) |
0 | 0 | # -*- encoding: utf-8 -*- |
1 | 1 | |
2 | import io | |
2 | 3 | import t |
3 | 4 | import pytest |
5 | import unittest.mock as mock | |
4 | 6 | |
5 | 7 | from gunicorn import util |
6 | 8 | from gunicorn.http.body import Body, LengthReader, EOFReader |
7 | 9 | from gunicorn.http.wsgi import Response |
8 | 10 | from gunicorn.http.unreader import Unreader, IterUnreader, SocketUnreader |
9 | from gunicorn.six import BytesIO | |
10 | 11 | from gunicorn.http.errors import InvalidHeader, InvalidHeaderName |
11 | 12 | |
12 | try: | |
13 | import unittest.mock as mock | |
14 | except ImportError: | |
15 | import mock | |
16 | ||
17 | 13 | |
18 | 14 | def assert_readline(payload, size, expected): |
19 | body = Body(BytesIO(payload)) | |
15 | body = Body(io.BytesIO(payload)) | |
20 | 16 | assert body.readline(size) == expected |
21 | 17 | |
22 | 18 | |
31 | 27 | |
32 | 28 | |
33 | 29 | def test_readline_new_line_before_size(): |
34 | body = Body(BytesIO(b"abc\ndef")) | |
30 | body = Body(io.BytesIO(b"abc\ndef")) | |
35 | 31 | assert body.readline(4) == b"abc\n" |
36 | 32 | assert body.readline() == b"def" |
37 | 33 | |
38 | 34 | |
39 | 35 | def test_readline_new_line_after_size(): |
40 | body = Body(BytesIO(b"abc\ndef")) | |
36 | body = Body(io.BytesIO(b"abc\ndef")) | |
41 | 37 | assert body.readline(2) == b"ab" |
42 | 38 | assert body.readline() == b"c\n" |
43 | 39 | |
44 | 40 | |
45 | 41 | def test_readline_no_new_line(): |
46 | body = Body(BytesIO(b"abcdef")) | |
42 | body = Body(io.BytesIO(b"abcdef")) | |
47 | 43 | assert body.readline() == b"abcdef" |
48 | body = Body(BytesIO(b"abcdef")) | |
44 | body = Body(io.BytesIO(b"abcdef")) | |
49 | 45 | assert body.readline(2) == b"ab" |
50 | 46 | assert body.readline(2) == b"cd" |
51 | 47 | assert body.readline(2) == b"ef" |
52 | 48 | |
53 | 49 | |
54 | 50 | def test_readline_buffer_loaded(): |
55 | reader = BytesIO(b"abc\ndef") | |
51 | reader = io.BytesIO(b"abc\ndef") | |
56 | 52 | body = Body(reader) |
57 | 53 | body.read(1) # load internal buffer |
58 | 54 | reader.write(b"g\nhi") |
63 | 59 | |
64 | 60 | |
65 | 61 | def test_readline_buffer_loaded_with_size(): |
66 | body = Body(BytesIO(b"abc\ndef")) | |
62 | body = Body(io.BytesIO(b"abc\ndef")) | |
67 | 63 | body.read(1) # load internal buffer |
68 | 64 | assert body.readline(2) == b"bc" |
69 | 65 | assert body.readline(2) == b"\n" |
80 | 76 | mocked_request = mock.MagicMock() |
81 | 77 | response = Response(mocked_request, mocked_socket, None) |
82 | 78 | |
83 | # set umlaut header | |
84 | response.headers.append(('foo', u'häder')) | |
79 | # set umlaut header value - latin-1 is OK | |
80 | response.headers.append(('foo', 'häder')) | |
81 | response.send_headers() | |
82 | ||
83 | # set a-breve header value - unicode, non-latin-1 fails | |
84 | response = Response(mocked_request, mocked_socket, None) | |
85 | response.headers.append(('apple', 'măr')) | |
85 | 86 | with pytest.raises(UnicodeEncodeError): |
86 | 87 | response.send_headers() |
87 | 88 | |
168 | 169 | |
169 | 170 | |
170 | 171 | def test_socket_unreader_chunk(): |
171 | fake_sock = t.FakeSocket(BytesIO(b'Lorem ipsum dolor')) | |
172 | fake_sock = t.FakeSocket(io.BytesIO(b'Lorem ipsum dolor')) | |
172 | 173 | sock_unreader = SocketUnreader(fake_sock, max_chunk=5) |
173 | 174 | |
174 | 175 | assert sock_unreader.chunk() == b'Lorem' |
0 | 0 | import datetime |
1 | from types import SimpleNamespace | |
2 | ||
3 | import pytest | |
1 | 4 | |
2 | 5 | from gunicorn.config import Config |
3 | 6 | from gunicorn.glogging import Logger |
4 | ||
5 | from support import SimpleNamespace | |
6 | 7 | |
7 | 8 | |
8 | 9 | def test_atoms_defaults(): |
47 | 48 | assert atoms['B'] == 0 |
48 | 49 | |
49 | 50 | |
50 | def test_get_username_from_basic_auth_header(): | |
51 | @pytest.mark.parametrize('auth', [ | |
52 | # auth type is case in-sensitive | |
53 | 'Basic YnJrMHY6', | |
54 | 'basic YnJrMHY6', | |
55 | 'BASIC YnJrMHY6', | |
56 | ]) | |
57 | def test_get_username_from_basic_auth_header(auth): | |
51 | 58 | request = SimpleNamespace(headers=()) |
52 | 59 | response = SimpleNamespace( |
53 | 60 | status='200', response_length=1024, sent=1024, |
57 | 64 | 'REQUEST_METHOD': 'GET', 'RAW_URI': '/my/path?foo=bar', |
58 | 65 | 'PATH_INFO': '/my/path', 'QUERY_STRING': 'foo=bar', |
59 | 66 | 'SERVER_PROTOCOL': 'HTTP/1.1', |
60 | 'HTTP_AUTHORIZATION': 'Basic YnJrMHY6', | |
67 | 'HTTP_AUTHORIZATION': auth, | |
61 | 68 | } |
62 | 69 | logger = Logger(Config()) |
63 | 70 | atoms = logger.atoms(response, request, environ, datetime.timedelta(seconds=1)) |
3 | 3 | # See the NOTICE for more information. |
4 | 4 | |
5 | 5 | import errno |
6 | import sys | |
7 | ||
8 | try: | |
9 | import unittest.mock as mock | |
10 | except ImportError: | |
11 | import mock | |
6 | import unittest.mock as mock | |
12 | 7 | |
13 | 8 | import gunicorn.pidfile |
14 | 9 | |
15 | 10 | |
16 | 11 | def builtin(name): |
17 | if sys.version_info >= (3, 0): | |
18 | module = 'builtins' | |
19 | else: | |
20 | module = '__builtin__' | |
21 | ||
22 | return '{0}.{1}'.format(module, name) | |
12 | return 'builtins.{}'.format(name) | |
23 | 13 | |
24 | 14 | |
25 | 15 | @mock.patch(builtin('open'), new_callable=mock.mock_open) |
0 | # The following code adapted from CPython (see Lib/test/test_selectors.py) | |
1 | ||
2 | import errno | |
3 | import random | |
4 | import signal | |
5 | import socket | |
6 | from time import sleep | |
7 | try: | |
8 | import unittest2 as unittest | |
9 | except ImportError: | |
10 | import unittest | |
11 | try: | |
12 | import unittest.mock as mock | |
13 | except ImportError: | |
14 | import mock | |
15 | try: | |
16 | from time import monotonic as time | |
17 | except ImportError: | |
18 | from time import time | |
19 | try: | |
20 | import resource | |
21 | except ImportError: | |
22 | resource = None | |
23 | ||
24 | from gunicorn import selectors | |
25 | import support | |
26 | ||
27 | ||
28 | if hasattr(socket, 'socketpair'): | |
29 | socketpair = socket.socketpair | |
30 | else: | |
31 | def socketpair(family=socket.AF_INET, type=socket.SOCK_STREAM, proto=0): | |
32 | with socket.socket(family, type, proto) as l: | |
33 | l.bind((support.HOST, 0)) | |
34 | l.listen() | |
35 | c = socket.socket(family, type, proto) | |
36 | try: | |
37 | c.connect(l.getsockname()) | |
38 | caddr = c.getsockname() | |
39 | while True: | |
40 | a, addr = l.accept() | |
41 | # check that we've got the correct client | |
42 | if addr == caddr: | |
43 | return c, a | |
44 | a.close() | |
45 | except OSError: | |
46 | c.close() | |
47 | raise | |
48 | ||
49 | ||
50 | def find_ready_matching(ready, flag): | |
51 | match = [] | |
52 | for key, events in ready: | |
53 | if events & flag: | |
54 | match.append(key.fileobj) | |
55 | return match | |
56 | ||
57 | ||
58 | class BaseSelectorTestCase(object): | |
59 | ||
60 | def make_socketpair(self): | |
61 | rd, wr = socketpair() | |
62 | self.addCleanup(rd.close) | |
63 | self.addCleanup(wr.close) | |
64 | return rd, wr | |
65 | ||
66 | def test_register(self): | |
67 | s = self.SELECTOR() | |
68 | self.addCleanup(s.close) | |
69 | ||
70 | rd, wr = self.make_socketpair() | |
71 | ||
72 | key = s.register(rd, selectors.EVENT_READ, "data") | |
73 | self.assertIsInstance(key, selectors.SelectorKey) | |
74 | self.assertEqual(key.fileobj, rd) | |
75 | self.assertEqual(key.fd, rd.fileno()) | |
76 | self.assertEqual(key.events, selectors.EVENT_READ) | |
77 | self.assertEqual(key.data, "data") | |
78 | ||
79 | # register an unknown event | |
80 | self.assertRaises(ValueError, s.register, 0, 999999) | |
81 | ||
82 | # register an invalid FD | |
83 | self.assertRaises(ValueError, s.register, -10, selectors.EVENT_READ) | |
84 | ||
85 | # register twice | |
86 | self.assertRaises(KeyError, s.register, rd, selectors.EVENT_READ) | |
87 | ||
88 | # register the same FD, but with a different object | |
89 | self.assertRaises(KeyError, s.register, rd.fileno(), | |
90 | selectors.EVENT_READ) | |
91 | ||
92 | def test_unregister(self): | |
93 | s = self.SELECTOR() | |
94 | self.addCleanup(s.close) | |
95 | ||
96 | rd, wr = self.make_socketpair() | |
97 | ||
98 | s.register(rd, selectors.EVENT_READ) | |
99 | s.unregister(rd) | |
100 | ||
101 | # unregister an unknown file obj | |
102 | self.assertRaises(KeyError, s.unregister, 999999) | |
103 | ||
104 | # unregister twice | |
105 | self.assertRaises(KeyError, s.unregister, rd) | |
106 | ||
107 | def test_unregister_after_fd_close(self): | |
108 | s = self.SELECTOR() | |
109 | self.addCleanup(s.close) | |
110 | rd, wr = self.make_socketpair() | |
111 | r, w = rd.fileno(), wr.fileno() | |
112 | s.register(r, selectors.EVENT_READ) | |
113 | s.register(w, selectors.EVENT_WRITE) | |
114 | rd.close() | |
115 | wr.close() | |
116 | s.unregister(r) | |
117 | s.unregister(w) | |
118 | ||
119 | def test_unregister_after_socket_close(self): | |
120 | s = self.SELECTOR() | |
121 | self.addCleanup(s.close) | |
122 | rd, wr = self.make_socketpair() | |
123 | s.register(rd, selectors.EVENT_READ) | |
124 | s.register(wr, selectors.EVENT_WRITE) | |
125 | rd.close() | |
126 | wr.close() | |
127 | s.unregister(rd) | |
128 | s.unregister(wr) | |
129 | ||
130 | def test_modify(self): | |
131 | s = self.SELECTOR() | |
132 | self.addCleanup(s.close) | |
133 | ||
134 | rd, wr = self.make_socketpair() | |
135 | ||
136 | key = s.register(rd, selectors.EVENT_READ) | |
137 | ||
138 | # modify events | |
139 | key2 = s.modify(rd, selectors.EVENT_WRITE) | |
140 | self.assertNotEqual(key.events, key2.events) | |
141 | self.assertEqual(key2, s.get_key(rd)) | |
142 | ||
143 | s.unregister(rd) | |
144 | ||
145 | # modify data | |
146 | d1 = object() | |
147 | d2 = object() | |
148 | ||
149 | key = s.register(rd, selectors.EVENT_READ, d1) | |
150 | key2 = s.modify(rd, selectors.EVENT_READ, d2) | |
151 | self.assertEqual(key.events, key2.events) | |
152 | self.assertNotEqual(key.data, key2.data) | |
153 | self.assertEqual(key2, s.get_key(rd)) | |
154 | self.assertEqual(key2.data, d2) | |
155 | ||
156 | # modify unknown file obj | |
157 | self.assertRaises(KeyError, s.modify, 999999, selectors.EVENT_READ) | |
158 | ||
159 | # modify use a shortcut | |
160 | d3 = object() | |
161 | s.register = mock.Mock() | |
162 | s.unregister = mock.Mock() | |
163 | ||
164 | s.modify(rd, selectors.EVENT_READ, d3) | |
165 | self.assertFalse(s.register.called) | |
166 | self.assertFalse(s.unregister.called) | |
167 | ||
168 | def test_close(self): | |
169 | s = self.SELECTOR() | |
170 | self.addCleanup(s.close) | |
171 | ||
172 | rd, wr = self.make_socketpair() | |
173 | ||
174 | s.register(rd, selectors.EVENT_READ) | |
175 | s.register(wr, selectors.EVENT_WRITE) | |
176 | ||
177 | s.close() | |
178 | self.assertRaises(KeyError, s.get_key, rd) | |
179 | self.assertRaises(KeyError, s.get_key, wr) | |
180 | ||
181 | def test_get_key(self): | |
182 | s = self.SELECTOR() | |
183 | self.addCleanup(s.close) | |
184 | ||
185 | rd, wr = self.make_socketpair() | |
186 | ||
187 | key = s.register(rd, selectors.EVENT_READ, "data") | |
188 | self.assertEqual(key, s.get_key(rd)) | |
189 | ||
190 | # unknown file obj | |
191 | self.assertRaises(KeyError, s.get_key, 999999) | |
192 | ||
193 | def test_get_map(self): | |
194 | s = self.SELECTOR() | |
195 | self.addCleanup(s.close) | |
196 | ||
197 | rd, wr = self.make_socketpair() | |
198 | ||
199 | keys = s.get_map() | |
200 | self.assertFalse(keys) | |
201 | self.assertEqual(len(keys), 0) | |
202 | self.assertEqual(list(keys), []) | |
203 | key = s.register(rd, selectors.EVENT_READ, "data") | |
204 | self.assertIn(rd, keys) | |
205 | self.assertEqual(key, keys[rd]) | |
206 | self.assertEqual(len(keys), 1) | |
207 | self.assertEqual(list(keys), [rd.fileno()]) | |
208 | self.assertEqual(list(keys.values()), [key]) | |
209 | ||
210 | # unknown file obj | |
211 | with self.assertRaises(KeyError): | |
212 | keys[999999] | |
213 | ||
214 | # Read-only mapping | |
215 | with self.assertRaises(TypeError): | |
216 | del keys[rd] | |
217 | ||
218 | def test_select(self): | |
219 | s = self.SELECTOR() | |
220 | self.addCleanup(s.close) | |
221 | ||
222 | rd, wr = self.make_socketpair() | |
223 | ||
224 | s.register(rd, selectors.EVENT_READ) | |
225 | wr_key = s.register(wr, selectors.EVENT_WRITE) | |
226 | ||
227 | result = s.select() | |
228 | for key, events in result: | |
229 | self.assertTrue(isinstance(key, selectors.SelectorKey)) | |
230 | self.assertTrue(events) | |
231 | self.assertFalse(events & ~(selectors.EVENT_READ | | |
232 | selectors.EVENT_WRITE)) | |
233 | ||
234 | self.assertEqual([(wr_key, selectors.EVENT_WRITE)], result) | |
235 | ||
236 | def test_context_manager(self): | |
237 | s = self.SELECTOR() | |
238 | self.addCleanup(s.close) | |
239 | ||
240 | rd, wr = self.make_socketpair() | |
241 | ||
242 | with s as sel: | |
243 | sel.register(rd, selectors.EVENT_READ) | |
244 | sel.register(wr, selectors.EVENT_WRITE) | |
245 | ||
246 | self.assertRaises(KeyError, s.get_key, rd) | |
247 | self.assertRaises(KeyError, s.get_key, wr) | |
248 | ||
249 | def test_fileno(self): | |
250 | s = self.SELECTOR() | |
251 | self.addCleanup(s.close) | |
252 | ||
253 | if hasattr(s, 'fileno'): | |
254 | fd = s.fileno() | |
255 | self.assertTrue(isinstance(fd, int)) | |
256 | self.assertGreaterEqual(fd, 0) | |
257 | ||
258 | def test_selector(self): | |
259 | s = self.SELECTOR() | |
260 | self.addCleanup(s.close) | |
261 | ||
262 | NUM_SOCKETS = 12 | |
263 | MSG = b" This is a test." | |
264 | MSG_LEN = len(MSG) | |
265 | readers = [] | |
266 | writers = [] | |
267 | r2w = {} | |
268 | w2r = {} | |
269 | ||
270 | for i in range(NUM_SOCKETS): | |
271 | rd, wr = self.make_socketpair() | |
272 | s.register(rd, selectors.EVENT_READ) | |
273 | s.register(wr, selectors.EVENT_WRITE) | |
274 | readers.append(rd) | |
275 | writers.append(wr) | |
276 | r2w[rd] = wr | |
277 | w2r[wr] = rd | |
278 | ||
279 | bufs = [] | |
280 | ||
281 | while writers: | |
282 | ready = s.select() | |
283 | ready_writers = find_ready_matching(ready, selectors.EVENT_WRITE) | |
284 | if not ready_writers: | |
285 | self.fail("no sockets ready for writing") | |
286 | wr = random.choice(ready_writers) | |
287 | wr.send(MSG) | |
288 | ||
289 | for i in range(10): | |
290 | ready = s.select() | |
291 | ready_readers = find_ready_matching(ready, | |
292 | selectors.EVENT_READ) | |
293 | if ready_readers: | |
294 | break | |
295 | # there might be a delay between the write to the write end and | |
296 | # the read end is reported ready | |
297 | sleep(0.1) | |
298 | else: | |
299 | self.fail("no sockets ready for reading") | |
300 | self.assertEqual([w2r[wr]], ready_readers) | |
301 | rd = ready_readers[0] | |
302 | buf = rd.recv(MSG_LEN) | |
303 | self.assertEqual(len(buf), MSG_LEN) | |
304 | bufs.append(buf) | |
305 | s.unregister(r2w[rd]) | |
306 | s.unregister(rd) | |
307 | writers.remove(r2w[rd]) | |
308 | ||
309 | self.assertEqual(bufs, [MSG] * NUM_SOCKETS) | |
310 | ||
311 | def test_timeout(self): | |
312 | s = self.SELECTOR() | |
313 | self.addCleanup(s.close) | |
314 | ||
315 | rd, wr = self.make_socketpair() | |
316 | ||
317 | s.register(wr, selectors.EVENT_WRITE) | |
318 | t = time() | |
319 | self.assertEqual(1, len(s.select(0))) | |
320 | self.assertEqual(1, len(s.select(-1))) | |
321 | self.assertLess(time() - t, 0.5) | |
322 | ||
323 | s.unregister(wr) | |
324 | s.register(rd, selectors.EVENT_READ) | |
325 | t = time() | |
326 | self.assertFalse(s.select(0)) | |
327 | self.assertFalse(s.select(-1)) | |
328 | self.assertLess(time() - t, 0.5) | |
329 | ||
330 | t0 = time() | |
331 | self.assertFalse(s.select(1)) | |
332 | t1 = time() | |
333 | dt = t1 - t0 | |
334 | self.assertTrue(0.8 <= dt <= 1.6, dt) | |
335 | ||
336 | @unittest.skipUnless(hasattr(signal, "alarm"), | |
337 | "signal.alarm() required for this test") | |
338 | def test_select_interrupt(self): | |
339 | s = self.SELECTOR() | |
340 | self.addCleanup(s.close) | |
341 | ||
342 | rd, wr = self.make_socketpair() | |
343 | ||
344 | orig_alrm_handler = signal.signal(signal.SIGALRM, lambda *args: None) | |
345 | self.addCleanup(signal.signal, signal.SIGALRM, orig_alrm_handler) | |
346 | self.addCleanup(signal.alarm, 0) | |
347 | ||
348 | signal.alarm(1) | |
349 | ||
350 | s.register(rd, selectors.EVENT_READ) | |
351 | t = time() | |
352 | self.assertFalse(s.select(2)) | |
353 | self.assertLess(time() - t, 2.5) | |
354 | ||
355 | ||
356 | class ScalableSelectorMixIn: | |
357 | ||
358 | # see issue #18963 for why it's skipped on older OS X versions | |
359 | @support.requires_mac_ver(10, 5) | |
360 | @unittest.skipUnless(resource, "Test needs resource module") | |
361 | def test_above_fd_setsize(self): | |
362 | # A scalable implementation should have no problem with more than | |
363 | # FD_SETSIZE file descriptors. Since we don't know the value, we just | |
364 | # try to set the soft RLIMIT_NOFILE to the hard RLIMIT_NOFILE ceiling. | |
365 | soft, hard = resource.getrlimit(resource.RLIMIT_NOFILE) | |
366 | try: | |
367 | resource.setrlimit(resource.RLIMIT_NOFILE, (hard, hard)) | |
368 | self.addCleanup(resource.setrlimit, resource.RLIMIT_NOFILE, | |
369 | (soft, hard)) | |
370 | NUM_FDS = min(hard, 2**16) | |
371 | except (OSError, ValueError): | |
372 | NUM_FDS = soft | |
373 | ||
374 | # guard for already allocated FDs (stdin, stdout...) | |
375 | NUM_FDS -= 32 | |
376 | ||
377 | s = self.SELECTOR() | |
378 | self.addCleanup(s.close) | |
379 | ||
380 | for i in range(NUM_FDS // 2): | |
381 | try: | |
382 | rd, wr = self.make_socketpair() | |
383 | except (IOError, OSError): | |
384 | # too many FDs, skip - note that we should only catch EMFILE | |
385 | # here, but apparently *BSD and Solaris can fail upon connect() | |
386 | # or bind() with EADDRNOTAVAIL, so let's be safe | |
387 | self.skipTest("FD limit reached") | |
388 | ||
389 | try: | |
390 | s.register(rd, selectors.EVENT_READ) | |
391 | s.register(wr, selectors.EVENT_WRITE) | |
392 | except OSError as e: | |
393 | if e.errno == errno.ENOSPC: | |
394 | # this can be raised by epoll if we go over | |
395 | # fs.epoll.max_user_watches sysctl | |
396 | self.skipTest("FD limit reached") | |
397 | raise | |
398 | ||
399 | self.assertEqual(NUM_FDS // 2, len(s.select())) | |
400 | ||
401 | ||
402 | class DefaultSelectorTestCase(BaseSelectorTestCase, unittest.TestCase): | |
403 | ||
404 | SELECTOR = selectors.DefaultSelector | |
405 | ||
406 | ||
407 | class SelectSelectorTestCase(BaseSelectorTestCase, unittest.TestCase): | |
408 | ||
409 | SELECTOR = selectors.SelectSelector | |
410 | ||
411 | ||
412 | @unittest.skipUnless(hasattr(selectors, 'PollSelector'), | |
413 | "Test needs selectors.PollSelector") | |
414 | class PollSelectorTestCase(BaseSelectorTestCase, ScalableSelectorMixIn, unittest.TestCase): | |
415 | ||
416 | SELECTOR = getattr(selectors, 'PollSelector', None) | |
417 | ||
418 | ||
419 | @unittest.skipUnless(hasattr(selectors, 'EpollSelector'), | |
420 | "Test needs selectors.EpollSelector") | |
421 | class EpollSelectorTestCase(BaseSelectorTestCase, ScalableSelectorMixIn, unittest.TestCase): | |
422 | ||
423 | SELECTOR = getattr(selectors, 'EpollSelector', None) | |
424 | ||
425 | ||
426 | @unittest.skipUnless(hasattr(selectors, 'KqueueSelector'), | |
427 | "Test needs selectors.KqueueSelector)") | |
428 | class KqueueSelectorTestCase(BaseSelectorTestCase, ScalableSelectorMixIn, unittest.TestCase): | |
429 | ||
430 | SELECTOR = getattr(selectors, 'KqueueSelector', None) | |
431 | ||
432 | ||
433 | @unittest.skipUnless(hasattr(selectors, 'DevpollSelector'), | |
434 | "Test needs selectors.DevpollSelector") | |
435 | class DevpollSelectorTestCase(BaseSelectorTestCase, ScalableSelectorMixIn, unittest.TestCase): | |
436 | ||
437 | SELECTOR = getattr(selectors, 'DevpollSelector', None) | |
438 | ||
439 | if __name__ == "__main__": | |
440 | unittest.main() |
2 | 2 | # This file is part of gunicorn released under the MIT license. |
3 | 3 | # See the NOTICE for more information. |
4 | 4 | |
5 | try: | |
6 | import unittest.mock as mock | |
7 | except ImportError: | |
8 | import mock | |
5 | import unittest.mock as mock | |
9 | 6 | |
10 | 7 | from gunicorn import sock |
8 | ||
9 | ||
10 | @mock.patch('os.stat') | |
11 | def test_create_sockets_unix_bytes(stat): | |
12 | conf = mock.Mock(address=[b'127.0.0.1:8000']) | |
13 | log = mock.Mock() | |
14 | with mock.patch.object(sock.UnixSocket, '__init__', lambda *args: None): | |
15 | listeners = sock.create_sockets(conf, log) | |
16 | assert len(listeners) == 1 | |
17 | print(type(listeners[0])) | |
18 | assert isinstance(listeners[0], sock.UnixSocket) | |
19 | ||
20 | ||
21 | @mock.patch('os.stat') | |
22 | def test_create_sockets_unix_strings(stat): | |
23 | conf = mock.Mock(address=['127.0.0.1:8000']) | |
24 | log = mock.Mock() | |
25 | with mock.patch.object(sock.UnixSocket, '__init__', lambda *args: None): | |
26 | listeners = sock.create_sockets(conf, log) | |
27 | assert len(listeners) == 1 | |
28 | assert isinstance(listeners[0], sock.UnixSocket) | |
11 | 29 | |
12 | 30 | |
13 | 31 | def test_socket_close(): |
4 | 4 | # This file is part of gunicorn released under the MIT license. |
5 | 5 | # See the NOTICE for more information. |
6 | 6 | |
7 | import sys | |
8 | ||
9 | 7 | import pytest |
10 | 8 | |
11 | 9 | from gunicorn.config import ( |
12 | 10 | KeyFile, CertFile, SSLVersion, CACerts, SuppressRaggedEOFs, |
13 | DoHandshakeOnConnect, Setting, | |
11 | DoHandshakeOnConnect, Setting, Ciphers, | |
14 | 12 | ) |
15 | 13 | |
16 | 14 | ssl = pytest.importorskip('ssl') |
68 | 66 | assert DoHandshakeOnConnect.default is False |
69 | 67 | |
70 | 68 | |
71 | @pytest.mark.skipif(sys.version_info < (2, 7), | |
72 | reason="requires Python 2.7+") | |
73 | 69 | def test_ciphers(): |
74 | from gunicorn.config import Ciphers | |
75 | ||
76 | 70 | assert issubclass(Ciphers, Setting) |
77 | 71 | assert Ciphers.name == 'ciphers' |
78 | 72 | assert Ciphers.section == 'SSL' |
79 | 73 | assert Ciphers.cli == ['--ciphers'] |
80 | assert Ciphers.default == 'TLSv1' | |
74 | assert Ciphers.default is None |
0 | import io | |
1 | import logging | |
2 | import os | |
3 | import shutil | |
4 | import socket | |
5 | import tempfile | |
0 | 6 | from datetime import timedelta |
1 | import socket | |
2 | import logging | |
3 | import tempfile | |
4 | import shutil | |
5 | import os | |
7 | from types import SimpleNamespace | |
6 | 8 | |
7 | 9 | from gunicorn.config import Config |
8 | 10 | from gunicorn.instrument.statsd import Statsd |
9 | from gunicorn.six import StringIO | |
10 | ||
11 | from support import SimpleNamespace | |
12 | 11 | |
13 | 12 | |
14 | 13 | class StatsdTestException(Exception): |
59 | 58 | logger.exception("No impact on logging") |
60 | 59 | |
61 | 60 | |
61 | def test_dogstatsd_tags(): | |
62 | c = Config() | |
63 | tags = 'yucatan,libertine:rhubarb' | |
64 | c.set('dogstatsd_tags', tags) | |
65 | logger = Statsd(c) | |
66 | logger.sock = MockSocket(False) | |
67 | logger.info("Twill", extra={"mtype": "gauge", "metric": "barb.westerly", | |
68 | "value": 2}) | |
69 | assert logger.sock.msgs[0] == b"barb.westerly:2|g|#" + tags.encode('ascii') | |
70 | ||
71 | ||
62 | 72 | def test_instrument(): |
63 | 73 | logger = Statsd(Config()) |
64 | 74 | # Capture logged messages |
65 | sio = StringIO() | |
75 | sio = io.StringIO() | |
66 | 76 | logger.error_log.addHandler(logging.StreamHandler(sio)) |
67 | 77 | logger.sock = MockSocket(False) |
68 | 78 |
4 | 4 | |
5 | 5 | from contextlib import contextmanager |
6 | 6 | import os |
7 | ||
8 | try: | |
9 | import unittest.mock as mock | |
10 | except ImportError: | |
11 | import mock | |
7 | import unittest.mock as mock | |
12 | 8 | |
13 | 9 | import pytest |
14 | 10 |
6 | 6 | |
7 | 7 | from gunicorn import util |
8 | 8 | from gunicorn.errors import AppImportError |
9 | from gunicorn.six.moves.urllib.parse import SplitResult # pylint: disable=no-name-in-module | |
9 | from urllib.parse import SplitResult | |
10 | 10 | |
11 | 11 | |
12 | 12 | @pytest.mark.parametrize('test_input, expected', [ |
13 | 13 | ('unix://var/run/test.sock', 'var/run/test.sock'), |
14 | 14 | ('unix:/var/run/test.sock', '/var/run/test.sock'), |
15 | ('tcp://localhost', ('localhost', 8000)), | |
16 | ('tcp://localhost:5000', ('localhost', 5000)), | |
15 | 17 | ('', ('0.0.0.0', 8000)), |
16 | 18 | ('[::1]:8000', ('::1', 8000)), |
19 | ('[::1]:5000', ('::1', 5000)), | |
20 | ('[::1]', ('::1', 8000)), | |
17 | 21 | ('localhost:8000', ('localhost', 8000)), |
18 | 22 | ('127.0.0.1:8000', ('127.0.0.1', 8000)), |
19 | ('localhost', ('localhost', 8000)) | |
23 | ('localhost', ('localhost', 8000)), | |
24 | ('fd://33', 33), | |
20 | 25 | ]) |
21 | 26 | def test_parse_address(test_input, expected): |
22 | 27 | assert util.parse_address(test_input) == expected |
23 | 28 | |
24 | 29 | |
25 | 30 | def test_parse_address_invalid(): |
26 | with pytest.raises(RuntimeError) as err: | |
31 | with pytest.raises(RuntimeError) as exc_info: | |
27 | 32 | util.parse_address('127.0.0.1:test') |
28 | assert "'test' is not a valid port number." in str(err) | |
33 | assert "'test' is not a valid port number." in str(exc_info.value) | |
34 | ||
35 | ||
36 | def test_parse_fd_invalid(): | |
37 | with pytest.raises(RuntimeError) as exc_info: | |
38 | util.parse_address('fd://asd') | |
39 | assert "'asd' is not a valid file descriptor." in str(exc_info.value) | |
29 | 40 | |
30 | 41 | |
31 | 42 | def test_http_date(): |
51 | 62 | def test_import_app(): |
52 | 63 | assert util.import_app('support:app') |
53 | 64 | |
54 | with pytest.raises(ImportError) as err: | |
65 | with pytest.raises(ImportError) as exc_info: | |
55 | 66 | util.import_app('a:app') |
56 | assert 'No module' in str(err) | |
67 | assert 'No module' in str(exc_info.value) | |
57 | 68 | |
58 | with pytest.raises(AppImportError) as err: | |
69 | with pytest.raises(AppImportError) as exc_info: | |
59 | 70 | util.import_app('support:wrong_app') |
60 | 71 | msg = "Failed to find application object 'wrong_app' in 'support'" |
61 | assert msg in str(err) | |
72 | assert msg in str(exc_info.value) | |
62 | 73 | |
63 | 74 | |
64 | 75 | def test_to_bytestring(): |
65 | 76 | assert util.to_bytestring('test_str', 'ascii') == b'test_str' |
66 | 77 | assert util.to_bytestring('test_str®') == b'test_str\xc2\xae' |
67 | 78 | assert util.to_bytestring(b'byte_test_str') == b'byte_test_str' |
68 | with pytest.raises(TypeError) as err: | |
79 | with pytest.raises(TypeError) as exc_info: | |
69 | 80 | util.to_bytestring(100) |
70 | 81 | msg = '100 is not a string' |
71 | assert msg in str(err) | |
82 | assert msg in str(exc_info.value) | |
72 | 83 | |
73 | 84 | |
74 | 85 | @pytest.mark.parametrize('test_input, expected', [ |
3 | 3 | # under the MIT license. |
4 | 4 | |
5 | 5 | import inspect |
6 | import importlib.machinery | |
6 | 7 | import os |
7 | 8 | import random |
8 | ||
9 | from gunicorn._compat import execfile_ | |
9 | import types | |
10 | ||
10 | 11 | from gunicorn.config import Config |
11 | 12 | from gunicorn.http.parser import RequestParser |
12 | 13 | from gunicorn.util import split_request_uri |
13 | from gunicorn import six | |
14 | 14 | |
15 | 15 | dirname = os.path.dirname(__file__) |
16 | 16 | random.seed() |
29 | 29 | |
30 | 30 | |
31 | 31 | def load_py(fname): |
32 | config = globals().copy() | |
33 | config["uri"] = uri | |
34 | config["cfg"] = Config() | |
35 | execfile_(fname, config) | |
36 | return config | |
32 | module_name = '__config__' | |
33 | mod = types.ModuleType(module_name) | |
34 | setattr(mod, 'uri', uri) | |
35 | setattr(mod, 'cfg', Config()) | |
36 | loader = importlib.machinery.SourceFileLoader(module_name, fname) | |
37 | loader.exec_module(mod) | |
38 | return vars(mod) | |
37 | 39 | |
38 | 40 | |
39 | 41 | class request(object): |
70 | 72 | |
71 | 73 | def send_bytes(self): |
72 | 74 | for d in self.data: |
73 | if six.PY3: | |
74 | yield bytes([d]) | |
75 | else: | |
76 | yield d | |
75 | yield bytes([d]) | |
77 | 76 | |
78 | 77 | def send_random(self): |
79 | 78 | maxs = round(len(self.data) / 10) |
204 | 203 | if body: |
205 | 204 | raise AssertionError("Failed to read entire body: %r" % body) |
206 | 205 | try: |
207 | data = six.next(iter(req.body)) | |
206 | data = next(iter(req.body)) | |
208 | 207 | raise AssertionError("Read data after body finished: %r" % data) |
209 | 208 | except StopIteration: |
210 | 209 | pass |
283 | 282 | |
284 | 283 | def check(self, cfg): |
285 | 284 | p = RequestParser(cfg, self.send()) |
286 | six.next(p) | |
285 | next(p) |
0 | 0 | [tox] |
1 | envlist = py26, py27, py34, py35, py36, py36-dev, py37, pypy, lint | |
1 | envlist = py34, py35, py36, py37, py38, pypy3, lint | |
2 | 2 | skipsdist = True |
3 | 3 | |
4 | 4 | [testenv] |
6 | 6 | commands = py.test {posargs} |
7 | 7 | deps = |
8 | 8 | -rrequirements_test.txt |
9 | py26: unittest2 | |
10 | py{26,27},pypy: mock | |
11 | py{34,35,36,36-dev,37}: aiohttp | |
12 | 9 | |
13 | 10 | [testenv:lint] |
14 | 11 | commands = |
16 | 13 | gunicorn \ |
17 | 14 | tests/test_arbiter.py \ |
18 | 15 | tests/test_config.py \ |
19 | tests/test_gaiohttp.py \ | |
20 | 16 | tests/test_http.py \ |
21 | 17 | tests/test_invalid_requests.py \ |
22 | 18 | tests/test_logger.py \ |
29 | 25 | tests/test_valid_requests.py |
30 | 26 | deps = |
31 | 27 | pylint |
28 | ||
29 | [testenv:docs-lint] | |
30 | whitelist_externals = | |
31 | rst-lint | |
32 | bash | |
33 | grep | |
34 | deps = | |
35 | restructuredtext_lint | |
36 | pygments | |
37 | commands = | |
38 | rst-lint README.rst docs/README.rst | |
39 | bash -c "(set -o pipefail; rst-lint --encoding utf-8 docs/source/*.rst | grep -v 'Unknown interpreted text role\|Unknown directive type'); test $? == 1" |