import python-mockupdb_1.1.0.orig.tar.gz
Ondřej Nový
7 years ago
0 | *~ | |
1 | *#* | |
2 | .DS* | |
3 | *.class | |
4 | *.pyc | |
5 | *.pyd | |
6 | build/ | |
7 | dist/ | |
8 | .tox | |
9 | *.egg | |
10 | *.egg-info | |
11 | .eggs | |
12 | docs/_build |
0 | ======= | |
1 | Credits | |
2 | ======= | |
3 | ||
4 | Development Lead | |
5 | ---------------- | |
6 | ||
7 | * A. Jesse Jiryu Davis <jesse@mongodb.com> | |
8 | ||
9 | Contributors | |
10 | ------------ | |
11 | ||
12 | None yet. Why not be the first? |
0 | .. :changelog: | |
1 | ||
2 | Changelog | |
3 | ========= | |
4 | ||
5 | 1.1.0 (2016-02-11) | |
6 | ------------------ | |
7 | ||
8 | Add cursor_id property to OpGetMore, and ssl parameter to interactive_server. | |
9 | ||
10 | 1.0.3 (2015-09-12) | |
11 | ------------------ | |
12 | ||
13 | ``MockupDB(auto_ismaster=True)`` had just responded ``{"ok": 1}``, but this | |
14 | isn't enough to convince PyMongo 3 it's talking to a valid standalone, | |
15 | so auto-respond ``{"ok": 1, "ismaster": True}``. | |
16 | ||
17 | 1.0.2 (2015-09-11) | |
18 | ------------------ | |
19 | ||
20 | Restore Request.assert_matches method, used in pymongo-mockup-tests. | |
21 | ||
22 | 1.0.1 (2015-09-11) | |
23 | ------------------ | |
24 | ||
25 | Allow co-installation with PyMongo. | |
26 | ||
27 | 1.0.0 (2015-09-10) | |
28 | ------------------ | |
29 | ||
30 | First release. | |
31 | ||
32 | 0.1.0 (2015-02-25) | |
33 | ------------------ | |
34 | ||
35 | Development begun. |
0 | ============ | |
1 | Contributing | |
2 | ============ | |
3 | ||
4 | Contributions are welcome, and they are greatly appreciated! Every | |
5 | little bit helps, and credit will always be given. | |
6 | ||
7 | You can contribute in many ways: | |
8 | ||
9 | Types of Contributions | |
10 | ---------------------- | |
11 | ||
12 | Report Bugs | |
13 | ~~~~~~~~~~~ | |
14 | ||
15 | Report bugs at https://github.com/ajdavis/mongo-mockup-db/issues. | |
16 | ||
17 | If you are reporting a bug, please include: | |
18 | ||
19 | * Your operating system name and version. | |
20 | * Any details about your local setup that might be helpful in troubleshooting. | |
21 | * Detailed steps to reproduce the bug. | |
22 | ||
23 | Fix Bugs | |
24 | ~~~~~~~~ | |
25 | ||
26 | Look through the GitHub issues for bugs. Anything tagged with "bug" | |
27 | is open to whoever wants to implement it. | |
28 | ||
29 | Implement Features | |
30 | ~~~~~~~~~~~~~~~~~~ | |
31 | ||
32 | Look through the GitHub issues for features. Anything tagged with "feature" | |
33 | is open to whoever wants to implement it. | |
34 | ||
35 | Write Documentation | |
36 | ~~~~~~~~~~~~~~~~~~~ | |
37 | ||
38 | MockupDB could always use more documentation, whether as part of the | |
39 | official MockupDB docs, in docstrings, or even on the web in blog posts, | |
40 | articles, and such. | |
41 | ||
42 | Submit Feedback | |
43 | ~~~~~~~~~~~~~~~ | |
44 | ||
45 | The best way to send feedback is to file an issue at https://github.com/ajdavis/mongo-mockup-db/issues. | |
46 | ||
47 | If you are proposing a feature: | |
48 | ||
49 | * Explain in detail how it would work. | |
50 | * Keep the scope as narrow as possible, to make it easier to implement. | |
51 | * Remember that this is a volunteer-driven project, and that contributions | |
52 | are welcome :) | |
53 | ||
54 | Get Started! | |
55 | ------------ | |
56 | ||
57 | Ready to contribute? Here's how to set up MockupDB for local development. | |
58 | ||
59 | 1. Fork the `mongo-mockup-db` repo on GitHub. | |
60 | 2. Clone your fork locally:: | |
61 | ||
62 | $ git clone git@github.com:your_name_here/mongo-mockup-db.git | |
63 | ||
64 | 3. Install your local copy into a virtualenv. Assuming you have virtualenvwrapper installed, this is how you set up your fork for local development:: | |
65 | ||
66 | $ mkvirtualenv mongo-mockup-db | |
67 | $ cd mongo-mockup-db/ | |
68 | $ python setup.py develop | |
69 | ||
70 | 4. Create a branch for local development:: | |
71 | ||
72 | $ git checkout -b name-of-your-bugfix-or-feature | |
73 | ||
74 | Now you can make your changes locally. | |
75 | ||
76 | 5. When you're done making changes, check that your changes pass flake8 and the tests, including testing other Python versions with tox:: | |
77 | ||
78 | $ flake8 mockupdb tests | |
79 | $ python setup.py test | |
80 | $ tox | |
81 | ||
82 | To get flake8 and tox, just pip install them into your virtualenv. | |
83 | ||
84 | 6. Commit your changes and push your branch to GitHub:: | |
85 | ||
86 | $ git add . | |
87 | $ git commit -m "Your detailed description of your changes." | |
88 | $ git push origin name-of-your-bugfix-or-feature | |
89 | ||
90 | 7. Submit a pull request through the GitHub website. | |
91 | ||
92 | Pull Request Guidelines | |
93 | ----------------------- | |
94 | ||
95 | Before you submit a pull request, check that it meets these guidelines: | |
96 | ||
97 | 1. The pull request should include tests. | |
98 | 2. If the pull request adds functionality, the docs should be updated. Put | |
99 | your new functionality into a function with a docstring, and add the | |
100 | feature to the list in README.rst. | |
101 | 3. The pull request should work for Python 2.6, 2.7, 3.3, and 3.4. Check that | |
102 | tests pass in all versions with `tox`. | |
103 | ||
104 | Tips | |
105 | ---- | |
106 | ||
107 | To run a subset of tests:: | |
108 | ||
109 | $ python setup.py test -s tests.test_mockupdb |
0 | Apache License | |
1 | Version 2.0, January 2004 | |
2 | http://www.apache.org/licenses/ | |
3 | ||
4 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION | |
5 | ||
6 | 1. Definitions. | |
7 | ||
8 | "License" shall mean the terms and conditions for use, reproduction, | |
9 | and distribution as defined by Sections 1 through 9 of this document. | |
10 | ||
11 | "Licensor" shall mean the copyright owner or entity authorized by | |
12 | the copyright owner that is granting the License. | |
13 | ||
14 | "Legal Entity" shall mean the union of the acting entity and all | |
15 | other entities that control, are controlled by, or are under common | |
16 | control with that entity. For the purposes of this definition, | |
17 | "control" means (i) the power, direct or indirect, to cause the | |
18 | direction or management of such entity, whether by contract or | |
19 | otherwise, or (ii) ownership of fifty percent (50%) or more of the | |
20 | outstanding shares, or (iii) beneficial ownership of such entity. | |
21 | ||
22 | "You" (or "Your") shall mean an individual or Legal Entity | |
23 | exercising permissions granted by this License. | |
24 | ||
25 | "Source" form shall mean the preferred form for making modifications, | |
26 | including but not limited to software source code, documentation | |
27 | source, and configuration files. | |
28 | ||
29 | "Object" form shall mean any form resulting from mechanical | |
30 | transformation or translation of a Source form, including but | |
31 | not limited to compiled object code, generated documentation, | |
32 | and conversions to other media types. | |
33 | ||
34 | "Work" shall mean the work of authorship, whether in Source or | |
35 | Object form, made available under the License, as indicated by a | |
36 | copyright notice that is included in or attached to the work | |
37 | (an example is provided in the Appendix below). | |
38 | ||
39 | "Derivative Works" shall mean any work, whether in Source or Object | |
40 | form, that is based on (or derived from) the Work and for which the | |
41 | editorial revisions, annotations, elaborations, or other modifications | |
42 | represent, as a whole, an original work of authorship. For the purposes | |
43 | of this License, Derivative Works shall not include works that remain | |
44 | separable from, or merely link (or bind by name) to the interfaces of, | |
45 | the Work and Derivative Works thereof. | |
46 | ||
47 | "Contribution" shall mean any work of authorship, including | |
48 | the original version of the Work and any modifications or additions | |
49 | to that Work or Derivative Works thereof, that is intentionally | |
50 | submitted to Licensor for inclusion in the Work by the copyright owner | |
51 | or by an individual or Legal Entity authorized to submit on behalf of | |
52 | the copyright owner. For the purposes of this definition, "submitted" | |
53 | means any form of electronic, verbal, or written communication sent | |
54 | to the Licensor or its representatives, including but not limited to | |
55 | communication on electronic mailing lists, source code control systems, | |
56 | and issue tracking systems that are managed by, or on behalf of, the | |
57 | Licensor for the purpose of discussing and improving the Work, but | |
58 | excluding communication that is conspicuously marked or otherwise | |
59 | designated in writing by the copyright owner as "Not a Contribution." | |
60 | ||
61 | "Contributor" shall mean Licensor and any individual or Legal Entity | |
62 | on behalf of whom a Contribution has been received by Licensor and | |
63 | subsequently incorporated within the Work. | |
64 | ||
65 | 2. Grant of Copyright License. Subject to the terms and conditions of | |
66 | this License, each Contributor hereby grants to You a perpetual, | |
67 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable | |
68 | copyright license to reproduce, prepare Derivative Works of, | |
69 | publicly display, publicly perform, sublicense, and distribute the | |
70 | Work and such Derivative Works in Source or Object form. | |
71 | ||
72 | 3. Grant of Patent License. Subject to the terms and conditions of | |
73 | this License, each Contributor hereby grants to You a perpetual, | |
74 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable | |
75 | (except as stated in this section) patent license to make, have made, | |
76 | use, offer to sell, sell, import, and otherwise transfer the Work, | |
77 | where such license applies only to those patent claims licensable | |
78 | by such Contributor that are necessarily infringed by their | |
79 | Contribution(s) alone or by combination of their Contribution(s) | |
80 | with the Work to which such Contribution(s) was submitted. If You | |
81 | institute patent litigation against any entity (including a | |
82 | cross-claim or counterclaim in a lawsuit) alleging that the Work | |
83 | or a Contribution incorporated within the Work constitutes direct | |
84 | or contributory patent infringement, then any patent licenses | |
85 | granted to You under this License for that Work shall terminate | |
86 | as of the date such litigation is filed. | |
87 | ||
88 | 4. Redistribution. You may reproduce and distribute copies of the | |
89 | Work or Derivative Works thereof in any medium, with or without | |
90 | modifications, and in Source or Object form, provided that You | |
91 | meet the following conditions: | |
92 | ||
93 | (a) You must give any other recipients of the Work or | |
94 | Derivative Works a copy of this License; and | |
95 | ||
96 | (b) You must cause any modified files to carry prominent notices | |
97 | stating that You changed the files; and | |
98 | ||
99 | (c) You must retain, in the Source form of any Derivative Works | |
100 | that You distribute, all copyright, patent, trademark, and | |
101 | attribution notices from the Source form of the Work, | |
102 | excluding those notices that do not pertain to any part of | |
103 | the Derivative Works; and | |
104 | ||
105 | (d) If the Work includes a "NOTICE" text file as part of its | |
106 | distribution, then any Derivative Works that You distribute must | |
107 | include a readable copy of the attribution notices contained | |
108 | within such NOTICE file, excluding those notices that do not | |
109 | pertain to any part of the Derivative Works, in at least one | |
110 | of the following places: within a NOTICE text file distributed | |
111 | as part of the Derivative Works; within the Source form or | |
112 | documentation, if provided along with the Derivative Works; or, | |
113 | within a display generated by the Derivative Works, if and | |
114 | wherever such third-party notices normally appear. The contents | |
115 | of the NOTICE file are for informational purposes only and | |
116 | do not modify the License. You may add Your own attribution | |
117 | notices within Derivative Works that You distribute, alongside | |
118 | or as an addendum to the NOTICE text from the Work, provided | |
119 | that such additional attribution notices cannot be construed | |
120 | as modifying the License. | |
121 | ||
122 | You may add Your own copyright statement to Your modifications and | |
123 | may provide additional or different license terms and conditions | |
124 | for use, reproduction, or distribution of Your modifications, or | |
125 | for any such Derivative Works as a whole, provided Your use, | |
126 | reproduction, and distribution of the Work otherwise complies with | |
127 | the conditions stated in this License. | |
128 | ||
129 | 5. Submission of Contributions. Unless You explicitly state otherwise, | |
130 | any Contribution intentionally submitted for inclusion in the Work | |
131 | by You to the Licensor shall be under the terms and conditions of | |
132 | this License, without any additional terms or conditions. | |
133 | Notwithstanding the above, nothing herein shall supersede or modify | |
134 | the terms of any separate license agreement you may have executed | |
135 | with Licensor regarding such Contributions. | |
136 | ||
137 | 6. Trademarks. This License does not grant permission to use the trade | |
138 | names, trademarks, service marks, or product names of the Licensor, | |
139 | except as required for reasonable and customary use in describing the | |
140 | origin of the Work and reproducing the content of the NOTICE file. | |
141 | ||
142 | 7. Disclaimer of Warranty. Unless required by applicable law or | |
143 | agreed to in writing, Licensor provides the Work (and each | |
144 | Contributor provides its Contributions) on an "AS IS" BASIS, | |
145 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or | |
146 | implied, including, without limitation, any warranties or conditions | |
147 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A | |
148 | PARTICULAR PURPOSE. You are solely responsible for determining the | |
149 | appropriateness of using or redistributing the Work and assume any | |
150 | risks associated with Your exercise of permissions under this License. | |
151 | ||
152 | 8. Limitation of Liability. In no event and under no legal theory, | |
153 | whether in tort (including negligence), contract, or otherwise, | |
154 | unless required by applicable law (such as deliberate and grossly | |
155 | negligent acts) or agreed to in writing, shall any Contributor be | |
156 | liable to You for damages, including any direct, indirect, special, | |
157 | incidental, or consequential damages of any character arising as a | |
158 | result of this License or out of the use or inability to use the | |
159 | Work (including but not limited to damages for loss of goodwill, | |
160 | work stoppage, computer failure or malfunction, or any and all | |
161 | other commercial damages or losses), even if such Contributor | |
162 | has been advised of the possibility of such damages. | |
163 | ||
164 | 9. Accepting Warranty or Additional Liability. While redistributing | |
165 | the Work or Derivative Works thereof, You may choose to offer, | |
166 | and charge a fee for, acceptance of support, warranty, indemnity, | |
167 | or other liability obligations and/or rights consistent with this | |
168 | License. However, in accepting such obligations, You may act only | |
169 | on Your own behalf and on Your sole responsibility, not on behalf | |
170 | of any other Contributor, and only if You agree to indemnify, | |
171 | defend, and hold each Contributor harmless for any liability | |
172 | incurred by, or claims asserted against, such Contributor by reason | |
173 | of your accepting any such warranty or additional liability. | |
174 | ||
175 | END OF TERMS AND CONDITIONS | |
176 | ||
177 | APPENDIX: How to apply the Apache License to your work. | |
178 | ||
179 | To apply the Apache License to your work, attach the following | |
180 | boilerplate notice, with the fields enclosed by brackets "[]" | |
181 | replaced with your own identifying information. (Don't include | |
182 | the brackets!) The text should be enclosed in the appropriate | |
183 | comment syntax for the file format. We also recommend that a | |
184 | file or class name and description of purpose be included on the | |
185 | same "printed page" as the copyright notice for easier | |
186 | identification within third-party archives. | |
187 | ||
188 | Copyright [yyyy] [name of copyright owner] | |
189 | ||
190 | Licensed under the Apache License, Version 2.0 (the "License"); | |
191 | you may not use this file except in compliance with the License. | |
192 | You may obtain a copy of the License at | |
193 | ||
194 | http://www.apache.org/licenses/LICENSE-2.0 | |
195 | ||
196 | Unless required by applicable law or agreed to in writing, software | |
197 | distributed under the License is distributed on an "AS IS" BASIS, | |
198 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
199 | See the License for the specific language governing permissions and | |
200 | limitations under the License. |
0 | include AUTHORS.rst | |
1 | include CONTRIBUTING.rst | |
2 | include CHANGELOG.rst | |
3 | include LICENSE | |
4 | include README.rst | |
5 | ||
6 | recursive-include mockupdb * | |
7 | recursive-include tests * | |
8 | recursive-exclude * __pycache__ | |
9 | recursive-exclude * *.py[co] | |
10 | ||
11 | recursive-include docs *.rst conf.py Makefile make.bat |
0 | .PHONY: clean-pyc clean-build docs clean | |
1 | ||
2 | help: | |
3 | @echo "clean - remove all build, test, coverage and Python artifacts" | |
4 | @echo "clean-build - remove build artifacts" | |
5 | @echo "clean-pyc - remove Python file artifacts" | |
6 | @echo "clean-test - remove test and coverage artifacts" | |
7 | @echo "lint - check style with flake8" | |
8 | @echo "test - run tests quickly with the default Python" | |
9 | @echo "test-all - run tests on every Python version with tox" | |
10 | @echo "coverage - check code coverage quickly with the default Python" | |
11 | @echo "docs - generate Sphinx HTML documentation, including API docs" | |
12 | @echo "release - package and upload a release" | |
13 | @echo "dist - package" | |
14 | @echo "install - install the package to the active Python's site-packages" | |
15 | ||
16 | clean: clean-build clean-pyc clean-test | |
17 | ||
18 | clean-build: | |
19 | rm -fr build/ | |
20 | rm -fr dist/ | |
21 | rm -fr .eggs/ | |
22 | find . -name '*.egg-info' -exec rm -fr {} + | |
23 | find . -name '*.egg' -exec rm -f {} + | |
24 | ||
25 | clean-pyc: | |
26 | find . -name '*.pyc' -exec rm -f {} + | |
27 | find . -name '*.pyo' -exec rm -f {} + | |
28 | find . -name '*~' -exec rm -f {} + | |
29 | find . -name '__pycache__' -exec rm -fr {} + | |
30 | ||
31 | clean-test: | |
32 | rm -fr .tox/ | |
33 | rm -f .coverage | |
34 | rm -fr htmlcov/ | |
35 | ||
36 | lint: | |
37 | flake8 mockupdb tests | |
38 | ||
39 | test: | |
40 | python setup.py test | |
41 | ||
42 | test-all: | |
43 | tox | |
44 | ||
45 | coverage: | |
46 | coverage run --source mockupdb setup.py test | |
47 | coverage report -m | |
48 | coverage html | |
49 | open htmlcov/index.html | |
50 | ||
51 | docs: | |
52 | rm -f docs/mockupdb.rst | |
53 | rm -f docs/modules.rst | |
54 | sphinx-apidoc -o docs/ mockupdb | |
55 | $(MAKE) -C docs clean | |
56 | $(MAKE) -C docs html | |
57 | open docs/_build/html/index.html | |
58 | ||
59 | release: clean | |
60 | python setup.py sdist upload | |
61 | python setup.py bdist_wheel upload | |
62 | ||
63 | dist: clean | |
64 | python setup.py sdist | |
65 | python setup.py bdist_wheel | |
66 | ls -l dist | |
67 | ||
68 | install: clean | |
69 | python setup.py install |
0 | ======== | |
1 | MockupDB | |
2 | ======== | |
3 | ||
4 | Mock server for testing MongoDB clients and creating MongoDB Wire Protocol | |
5 | servers. | |
6 | ||
7 | * Documentation: http://mockupdb.readthedocs.org/ |
0 | # Makefile for Sphinx documentation | |
1 | # | |
2 | ||
3 | # You can set these variables from the command line. | |
4 | SPHINXOPTS = | |
5 | SPHINXBUILD = sphinx-build | |
6 | PAPER = | |
7 | BUILDDIR = _build | |
8 | ||
9 | # User-friendly check for sphinx-build | |
10 | ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) | |
11 | $(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) | |
12 | endif | |
13 | ||
14 | # Internal variables. | |
15 | PAPEROPT_a4 = -D latex_paper_size=a4 | |
16 | PAPEROPT_letter = -D latex_paper_size=letter | |
17 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . | |
18 | # the i18n builder cannot share the environment and doctrees with the others | |
19 | I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . | |
20 | ||
21 | .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext | |
22 | ||
23 | help: | |
24 | @echo "Please use \`make <target>' where <target> is one of" | |
25 | @echo " html to make standalone HTML files" | |
26 | @echo " dirhtml to make HTML files named index.html in directories" | |
27 | @echo " singlehtml to make a single large HTML file" | |
28 | @echo " pickle to make pickle files" | |
29 | @echo " json to make JSON files" | |
30 | @echo " htmlhelp to make HTML files and a HTML help project" | |
31 | @echo " qthelp to make HTML files and a qthelp project" | |
32 | @echo " devhelp to make HTML files and a Devhelp project" | |
33 | @echo " epub to make an epub" | |
34 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" | |
35 | @echo " latexpdf to make LaTeX files and run them through pdflatex" | |
36 | @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" | |
37 | @echo " text to make text files" | |
38 | @echo " man to make manual pages" | |
39 | @echo " texinfo to make Texinfo files" | |
40 | @echo " info to make Texinfo files and run them through makeinfo" | |
41 | @echo " gettext to make PO message catalogs" | |
42 | @echo " changes to make an overview of all changed/added/deprecated items" | |
43 | @echo " xml to make Docutils-native XML files" | |
44 | @echo " pseudoxml to make pseudoxml-XML files for display purposes" | |
45 | @echo " linkcheck to check all external links for integrity" | |
46 | @echo " doctest to run all doctests embedded in the documentation (if enabled)" | |
47 | ||
48 | clean: | |
49 | rm -rf $(BUILDDIR)/* | |
50 | ||
51 | html: | |
52 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html | |
53 | @echo | |
54 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." | |
55 | ||
56 | dirhtml: | |
57 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml | |
58 | @echo | |
59 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." | |
60 | ||
61 | singlehtml: | |
62 | $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml | |
63 | @echo | |
64 | @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." | |
65 | ||
66 | pickle: | |
67 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle | |
68 | @echo | |
69 | @echo "Build finished; now you can process the pickle files." | |
70 | ||
71 | json: | |
72 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json | |
73 | @echo | |
74 | @echo "Build finished; now you can process the JSON files." | |
75 | ||
76 | htmlhelp: | |
77 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp | |
78 | @echo | |
79 | @echo "Build finished; now you can run HTML Help Workshop with the" \ | |
80 | ".hhp project file in $(BUILDDIR)/htmlhelp." | |
81 | ||
82 | qthelp: | |
83 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp | |
84 | @echo | |
85 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \ | |
86 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:" | |
87 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/mongo-mockup-db.qhcp" | |
88 | @echo "To view the help file:" | |
89 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/mongo-mockup-db.qhc" | |
90 | ||
91 | devhelp: | |
92 | $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp | |
93 | @echo | |
94 | @echo "Build finished." | |
95 | @echo "To view the help file:" | |
96 | @echo "# mkdir -p $$HOME/.local/share/devhelp/mongo-mockup-db" | |
97 | @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/mongo-mockup-db" | |
98 | @echo "# devhelp" | |
99 | ||
100 | epub: | |
101 | $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub | |
102 | @echo | |
103 | @echo "Build finished. The epub file is in $(BUILDDIR)/epub." | |
104 | ||
105 | latex: | |
106 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex | |
107 | @echo | |
108 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." | |
109 | @echo "Run \`make' in that directory to run these through (pdf)latex" \ | |
110 | "(use \`make latexpdf' here to do that automatically)." | |
111 | ||
112 | latexpdf: | |
113 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex | |
114 | @echo "Running LaTeX files through pdflatex..." | |
115 | $(MAKE) -C $(BUILDDIR)/latex all-pdf | |
116 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." | |
117 | ||
118 | latexpdfja: | |
119 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex | |
120 | @echo "Running LaTeX files through platex and dvipdfmx..." | |
121 | $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja | |
122 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." | |
123 | ||
124 | text: | |
125 | $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text | |
126 | @echo | |
127 | @echo "Build finished. The text files are in $(BUILDDIR)/text." | |
128 | ||
129 | man: | |
130 | $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man | |
131 | @echo | |
132 | @echo "Build finished. The manual pages are in $(BUILDDIR)/man." | |
133 | ||
134 | texinfo: | |
135 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo | |
136 | @echo | |
137 | @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." | |
138 | @echo "Run \`make' in that directory to run these through makeinfo" \ | |
139 | "(use \`make info' here to do that automatically)." | |
140 | ||
141 | info: | |
142 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo | |
143 | @echo "Running Texinfo files through makeinfo..." | |
144 | make -C $(BUILDDIR)/texinfo info | |
145 | @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." | |
146 | ||
147 | gettext: | |
148 | $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale | |
149 | @echo | |
150 | @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." | |
151 | ||
152 | changes: | |
153 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes | |
154 | @echo | |
155 | @echo "The overview file is in $(BUILDDIR)/changes." | |
156 | ||
157 | linkcheck: | |
158 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck | |
159 | @echo | |
160 | @echo "Link check complete; look for any errors in the above output " \ | |
161 | "or in $(BUILDDIR)/linkcheck/output.txt." | |
162 | ||
163 | doctest: | |
164 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest | |
165 | @echo "Testing of doctests in the sources finished, look at the " \ | |
166 | "results in $(BUILDDIR)/doctest/output.txt." | |
167 | ||
168 | xml: | |
169 | $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml | |
170 | @echo | |
171 | @echo "Build finished. The XML files are in $(BUILDDIR)/xml." | |
172 | ||
173 | pseudoxml: | |
174 | $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml | |
175 | @echo | |
176 | @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." |
Binary diff not shown
0 | /* | |
1 | * sidebar.js | |
2 | * ~~~~~~~~~~ | |
3 | * | |
4 | * This script makes the Sphinx sidebar collapsible and implements intelligent | |
5 | * scrolling. | |
6 | * | |
7 | * .sphinxsidebar contains .sphinxsidebarwrapper. This script adds in | |
8 | * .sphixsidebar, after .sphinxsidebarwrapper, the #sidebarbutton used to | |
9 | * collapse and expand the sidebar. | |
10 | * | |
11 | * When the sidebar is collapsed the .sphinxsidebarwrapper is hidden and the | |
12 | * width of the sidebar and the margin-left of the document are decreased. | |
13 | * When the sidebar is expanded the opposite happens. This script saves a | |
14 | * per-browser/per-session cookie used to remember the position of the sidebar | |
15 | * among the pages. Once the browser is closed the cookie is deleted and the | |
16 | * position reset to the default (expanded). | |
17 | * | |
18 | * :copyright: Copyright 2007-2011 by the Sphinx team, see AUTHORS. | |
19 | * :license: BSD, see LICENSE for details. | |
20 | * | |
21 | */ | |
22 | ||
23 | $(function() { | |
24 | // global elements used by the functions. | |
25 | // the 'sidebarbutton' element is defined as global after its | |
26 | // creation, in the add_sidebar_button function | |
27 | var jwindow = $(window); | |
28 | var jdocument = $(document); | |
29 | var bodywrapper = $('.bodywrapper'); | |
30 | var sidebar = $('.sphinxsidebar'); | |
31 | var sidebarwrapper = $('.sphinxsidebarwrapper'); | |
32 | ||
33 | // original margin-left of the bodywrapper and width of the sidebar | |
34 | // with the sidebar expanded | |
35 | var bw_margin_expanded = bodywrapper.css('margin-left'); | |
36 | var ssb_width_expanded = sidebar.width(); | |
37 | ||
38 | // margin-left of the bodywrapper and width of the sidebar | |
39 | // with the sidebar collapsed | |
40 | var bw_margin_collapsed = '.8em'; | |
41 | var ssb_width_collapsed = '.8em'; | |
42 | ||
43 | // colors used by the current theme | |
44 | var dark_color = '#AAAAAA'; | |
45 | var light_color = '#CCCCCC'; | |
46 | ||
47 | function get_viewport_height() { | |
48 | if (window.innerHeight) | |
49 | return window.innerHeight; | |
50 | else | |
51 | return jwindow.height(); | |
52 | } | |
53 | ||
54 | function sidebar_is_collapsed() { | |
55 | return sidebarwrapper.is(':not(:visible)'); | |
56 | } | |
57 | ||
58 | function toggle_sidebar() { | |
59 | if (sidebar_is_collapsed()) | |
60 | expand_sidebar(); | |
61 | else | |
62 | collapse_sidebar(); | |
63 | // adjust the scrolling of the sidebar | |
64 | scroll_sidebar(); | |
65 | } | |
66 | ||
67 | function collapse_sidebar() { | |
68 | sidebarwrapper.hide(); | |
69 | sidebar.css('width', ssb_width_collapsed); | |
70 | bodywrapper.css('margin-left', bw_margin_collapsed); | |
71 | sidebarbutton.css({ | |
72 | 'margin-left': '0', | |
73 | 'height': bodywrapper.height(), | |
74 | 'border-radius': '5px' | |
75 | }); | |
76 | sidebarbutton.find('span').text('»'); | |
77 | sidebarbutton.attr('title', _('Expand sidebar')); | |
78 | document.cookie = 'sidebar=collapsed'; | |
79 | } | |
80 | ||
81 | function expand_sidebar() { | |
82 | bodywrapper.css('margin-left', bw_margin_expanded); | |
83 | sidebar.css('width', ssb_width_expanded); | |
84 | sidebarwrapper.show(); | |
85 | sidebarbutton.css({ | |
86 | 'margin-left': ssb_width_expanded-12, | |
87 | 'height': bodywrapper.height(), | |
88 | 'border-radius': '0 5px 5px 0' | |
89 | }); | |
90 | sidebarbutton.find('span').text('«'); | |
91 | sidebarbutton.attr('title', _('Collapse sidebar')); | |
92 | //sidebarwrapper.css({'padding-top': | |
93 | // Math.max(window.pageYOffset - sidebarwrapper.offset().top, 10)}); | |
94 | document.cookie = 'sidebar=expanded'; | |
95 | } | |
96 | ||
97 | function add_sidebar_button() { | |
98 | sidebarwrapper.css({ | |
99 | 'float': 'left', | |
100 | 'margin-right': '0', | |
101 | 'width': ssb_width_expanded - 28 | |
102 | }); | |
103 | // create the button | |
104 | sidebar.append( | |
105 | '<div id="sidebarbutton"><span>«</span></div>' | |
106 | ); | |
107 | var sidebarbutton = $('#sidebarbutton'); | |
108 | // find the height of the viewport to center the '<<' in the page | |
109 | var viewport_height = get_viewport_height(); | |
110 | var sidebar_offset = sidebar.offset().top; | |
111 | var sidebar_height = Math.max(bodywrapper.height(), sidebar.height()); | |
112 | sidebarbutton.find('span').css({ | |
113 | 'display': 'block', | |
114 | 'position': 'fixed', | |
115 | 'top': Math.min(viewport_height/2, sidebar_height/2 + sidebar_offset) - 10 | |
116 | }); | |
117 | ||
118 | sidebarbutton.click(toggle_sidebar); | |
119 | sidebarbutton.attr('title', _('Collapse sidebar')); | |
120 | sidebarbutton.css({ | |
121 | 'border-radius': '0 5px 5px 0', | |
122 | 'color': '#444444', | |
123 | 'background-color': '#CCCCCC', | |
124 | 'font-size': '1.2em', | |
125 | 'cursor': 'pointer', | |
126 | 'height': sidebar_height, | |
127 | 'padding-top': '1px', | |
128 | 'padding-left': '1px', | |
129 | 'margin-left': ssb_width_expanded - 12 | |
130 | }); | |
131 | ||
132 | sidebarbutton.hover( | |
133 | function () { | |
134 | $(this).css('background-color', dark_color); | |
135 | }, | |
136 | function () { | |
137 | $(this).css('background-color', light_color); | |
138 | } | |
139 | ); | |
140 | } | |
141 | ||
142 | function set_position_from_cookie() { | |
143 | if (!document.cookie) | |
144 | return; | |
145 | var items = document.cookie.split(';'); | |
146 | for(var k=0; k<items.length; k++) { | |
147 | var key_val = items[k].split('='); | |
148 | var key = key_val[0]; | |
149 | if (key == 'sidebar') { | |
150 | var value = key_val[1]; | |
151 | if ((value == 'collapsed') && (!sidebar_is_collapsed())) | |
152 | collapse_sidebar(); | |
153 | else if ((value == 'expanded') && (sidebar_is_collapsed())) | |
154 | expand_sidebar(); | |
155 | } | |
156 | } | |
157 | } | |
158 | ||
159 | add_sidebar_button(); | |
160 | var sidebarbutton = $('#sidebarbutton'); | |
161 | set_position_from_cookie(); | |
162 | ||
163 | ||
164 | /* intelligent scrolling */ | |
165 | function scroll_sidebar() { | |
166 | var sidebar_height = sidebarwrapper.height(); | |
167 | var viewport_height = get_viewport_height(); | |
168 | var offset = sidebar.position()['top']; | |
169 | var wintop = jwindow.scrollTop(); | |
170 | var winbot = wintop + viewport_height; | |
171 | var curtop = sidebarwrapper.position()['top']; | |
172 | var curbot = curtop + sidebar_height; | |
173 | // does sidebar fit in window? | |
174 | if (sidebar_height < viewport_height) { | |
175 | // yes: easy case -- always keep at the top | |
176 | sidebarwrapper.css('top', $u.min([$u.max([0, wintop - offset - 10]), | |
177 | jdocument.height() - sidebar_height - 200])); | |
178 | } | |
179 | else { | |
180 | // no: only scroll if top/bottom edge of sidebar is at | |
181 | // top/bottom edge of window | |
182 | if (curtop > wintop && curbot > winbot) { | |
183 | sidebarwrapper.css('top', $u.max([wintop - offset - 10, 0])); | |
184 | } | |
185 | else if (curtop < wintop && curbot < winbot) { | |
186 | sidebarwrapper.css('top', $u.min([winbot - sidebar_height - offset - 20, | |
187 | jdocument.height() - sidebar_height - 200])); | |
188 | } | |
189 | } | |
190 | } | |
191 | jwindow.scroll(scroll_sidebar); | |
192 | }); |
0 | .. include:: ../AUTHORS.rst |
0 | .. include:: ../CHANGELOG.rst |
0 | #!/usr/bin/env python | |
1 | # -*- coding: utf-8 -*- | |
2 | # | |
3 | # mongo-mockup-db documentation build configuration file, created by | |
4 | # sphinx-quickstart on Tue Jul 9 22:26:36 2013. | |
5 | # | |
6 | # This file is execfile()d with the current directory set to its | |
7 | # containing dir. | |
8 | # | |
9 | # Note that not all possible configuration values are present in this | |
10 | # autogenerated file. | |
11 | # | |
12 | # All configuration values have a default; values that are commented out | |
13 | # serve to show the default. | |
14 | ||
15 | import sys | |
16 | import os | |
17 | ||
18 | # If extensions (or modules to document with autodoc) are in another | |
19 | # directory, add these directories to sys.path here. If the directory is | |
20 | # relative to the documentation root, use os.path.abspath to make it | |
21 | # absolute, like shown here. | |
22 | #sys.path.insert(0, os.path.abspath('.')) | |
23 | ||
24 | # Get the project root dir, which is the parent dir of this | |
25 | cwd = os.getcwd() | |
26 | project_root = os.path.dirname(cwd) | |
27 | ||
28 | # Insert the project root dir as the first element in the PYTHONPATH. | |
29 | # This lets us ensure that the source package is imported, and that its | |
30 | # version is used. | |
31 | sys.path.insert(0, project_root) | |
32 | ||
33 | import mockupdb | |
34 | ||
35 | # -- General configuration --------------------------------------------- | |
36 | ||
37 | # If your documentation needs a minimal Sphinx version, state it here. | |
38 | #needs_sphinx = '1.0' | |
39 | ||
40 | # Add any Sphinx extension module names here, as strings. They can be | |
41 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones. | |
42 | extensions = [ | |
43 | 'sphinx.ext.autodoc', | |
44 | 'sphinx.ext.doctest', | |
45 | 'sphinx.ext.coverage', | |
46 | 'sphinx.ext.todo', | |
47 | 'sphinx.ext.intersphinx', | |
48 | ] | |
49 | ||
50 | intersphinx_mapping = { | |
51 | 'python': ('http://python.readthedocs.org/', None), | |
52 | 'pymongo': ('http://api.mongodb.org/python/current/', None), | |
53 | } | |
54 | ||
55 | primary_domain = 'py' | |
56 | default_role = 'py:obj' | |
57 | ||
58 | doctest_global_setup = """ | |
59 | try: | |
60 | from collections import OrderedDict | |
61 | except: | |
62 | from ordereddict import OrderedDict # Python 2.6, "pip install ordereddict" | |
63 | from mockupdb import * | |
64 | """ | |
65 | ||
66 | # Add any paths that contain templates here, relative to this directory. | |
67 | templates_path = ['_templates'] | |
68 | ||
69 | # The suffix of source filenames. | |
70 | source_suffix = '.rst' | |
71 | ||
72 | # The encoding of source files. | |
73 | #source_encoding = 'utf-8-sig' | |
74 | ||
75 | # The master toctree document. | |
76 | master_doc = 'index' | |
77 | ||
78 | # General information about the project. | |
79 | project = 'MockupDB' | |
80 | copyright = '2015, MongoDB, Inc.' | |
81 | ||
82 | # The version info for the project you're documenting, acts as replacement | |
83 | # for |version| and |release|, also used in various other places throughout | |
84 | # the built documents. | |
85 | # | |
86 | # The short X.Y version. | |
87 | version = mockupdb.__version__ | |
88 | # The full version, including alpha/beta/rc tags. | |
89 | release = mockupdb.__version__ | |
90 | ||
91 | # The language for content autogenerated by Sphinx. Refer to documentation | |
92 | # for a list of supported languages. | |
93 | #language = None | |
94 | ||
95 | # There are two options for replacing |today|: either, you set today to | |
96 | # some non-false value, then it is used: | |
97 | #today = '' | |
98 | # Else, today_fmt is used as the format for a strftime call. | |
99 | #today_fmt = '%B %d, %Y' | |
100 | ||
101 | # List of patterns, relative to source directory, that match files and | |
102 | # directories to ignore when looking for source files. | |
103 | exclude_patterns = ['_build'] | |
104 | ||
105 | # The reST default role (used for this markup: `text`) to use for all | |
106 | # documents. | |
107 | #default_role = None | |
108 | ||
109 | # If true, '()' will be appended to :func: etc. cross-reference text. | |
110 | #add_function_parentheses = True | |
111 | ||
112 | # If true, the current module name will be prepended to all description | |
113 | # unit titles (such as .. function::). | |
114 | #add_module_names = True | |
115 | ||
116 | # If true, sectionauthor and moduleauthor directives will be shown in the | |
117 | # output. They are ignored by default. | |
118 | #show_authors = False | |
119 | ||
120 | # The name of the Pygments (syntax highlighting) style to use. | |
121 | pygments_style = 'sphinx' | |
122 | ||
123 | # A list of ignored prefixes for module index sorting. | |
124 | #modindex_common_prefix = [] | |
125 | ||
126 | # If true, keep warnings as "system message" paragraphs in the built | |
127 | # documents. | |
128 | #keep_warnings = False | |
129 | ||
130 | ||
131 | # -- Options for HTML output ------------------------------------------- | |
132 | ||
133 | # Theme gratefully vendored from CPython source. | |
134 | html_theme = "pydoctheme" | |
135 | html_theme_path = ["."] | |
136 | html_theme_options = {'collapsiblesidebar': True} | |
137 | ||
138 | # Theme options are theme-specific and customize the look and feel of a | |
139 | # theme further. For a list of options available for each theme, see the | |
140 | # documentation. | |
141 | #html_theme_options = {} | |
142 | ||
143 | # Add any paths that contain custom themes here, relative to this directory. | |
144 | #html_theme_path = [] | |
145 | ||
146 | # The name for this set of Sphinx documents. If None, it defaults to | |
147 | # "<project> v<release> documentation". | |
148 | #html_title = None | |
149 | ||
150 | # A shorter title for the navigation bar. Default is the same as | |
151 | # html_title. | |
152 | #html_short_title = None | |
153 | ||
154 | # The name of an image file (relative to this directory) to place at the | |
155 | # top of the sidebar. | |
156 | #html_logo = None | |
157 | ||
158 | # The name of an image file (within the static path) to use as favicon | |
159 | # of the docs. This file should be a Windows icon file (.ico) being | |
160 | # 16x16 or 32x32 pixels large. | |
161 | #html_favicon = None | |
162 | ||
163 | # Add any paths that contain custom static files (such as style sheets) | |
164 | # here, relative to this directory. They are copied after the builtin | |
165 | # static files, so a file named "default.css" will overwrite the builtin | |
166 | # "default.css". | |
167 | html_static_path = ['_static'] | |
168 | ||
169 | # If not '', a 'Last updated on:' timestamp is inserted at every page | |
170 | # bottom, using the given strftime format. | |
171 | #html_last_updated_fmt = '%b %d, %Y' | |
172 | ||
173 | # If true, SmartyPants will be used to convert quotes and dashes to | |
174 | # typographically correct entities. | |
175 | #html_use_smartypants = True | |
176 | ||
177 | # Custom sidebar templates, maps document names to template names. | |
178 | #html_sidebars = {} | |
179 | ||
180 | # Additional templates that should be rendered to pages, maps page names | |
181 | # to template names. | |
182 | #html_additional_pages = {} | |
183 | ||
184 | # If false, no module index is generated. | |
185 | #html_domain_indices = True | |
186 | ||
187 | # If false, no index is generated. | |
188 | #html_use_index = True | |
189 | ||
190 | # If true, the index is split into individual pages for each letter. | |
191 | #html_split_index = False | |
192 | ||
193 | # If true, links to the reST sources are added to the pages. | |
194 | #html_show_sourcelink = True | |
195 | ||
196 | # If true, "Created using Sphinx" is shown in the HTML footer. | |
197 | # Default is True. | |
198 | #html_show_sphinx = True | |
199 | ||
200 | # If true, "(C) Copyright ..." is shown in the HTML footer. | |
201 | # Default is True. | |
202 | #html_show_copyright = True | |
203 | ||
204 | # If true, an OpenSearch description file will be output, and all pages | |
205 | # will contain a <link> tag referring to it. The value of this option | |
206 | # must be the base URL from which the finished HTML is served. | |
207 | #html_use_opensearch = '' | |
208 | ||
209 | # This is the file name suffix for HTML files (e.g. ".xhtml"). | |
210 | #html_file_suffix = None | |
211 | ||
212 | # Output file base name for HTML help builder. | |
213 | htmlhelp_basename = 'mockupdbdoc' | |
214 | ||
215 | ||
216 | # -- Options for LaTeX output ------------------------------------------ | |
217 | ||
218 | latex_elements = { | |
219 | # The paper size ('letterpaper' or 'a4paper'). | |
220 | #'papersize': 'letterpaper', | |
221 | ||
222 | # The font size ('10pt', '11pt' or '12pt'). | |
223 | #'pointsize': '10pt', | |
224 | ||
225 | # Additional stuff for the LaTeX preamble. | |
226 | #'preamble': '', | |
227 | } | |
228 | ||
229 | # Grouping the document tree into LaTeX files. List of tuples | |
230 | # (source start file, target name, title, author, documentclass | |
231 | # [howto/manual]). | |
232 | latex_documents = [ | |
233 | ('index', 'mockupdb.tex', | |
234 | 'MockupDB Documentation', | |
235 | 'A. Jesse Jiryu Davis', 'manual'), | |
236 | ] | |
237 | ||
238 | # The name of an image file (relative to this directory) to place at | |
239 | # the top of the title page. | |
240 | #latex_logo = None | |
241 | ||
242 | # For "manual" documents, if this is true, then toplevel headings | |
243 | # are parts, not chapters. | |
244 | #latex_use_parts = False | |
245 | ||
246 | # If true, show page references after internal links. | |
247 | #latex_show_pagerefs = False | |
248 | ||
249 | # If true, show URL addresses after external links. | |
250 | #latex_show_urls = False | |
251 | ||
252 | # Documents to append as an appendix to all manuals. | |
253 | #latex_appendices = [] | |
254 | ||
255 | # If false, no module index is generated. | |
256 | #latex_domain_indices = True | |
257 | ||
258 | ||
259 | # -- Options for manual page output ------------------------------------ | |
260 | ||
261 | # One entry per manual page. List of tuples | |
262 | # (source start file, name, description, authors, manual section). | |
263 | man_pages = [ | |
264 | ('index', 'mockupdb', | |
265 | 'MockupDB Documentation', | |
266 | ['A. Jesse Jiryu Davis'], 1) | |
267 | ] | |
268 | ||
269 | # If true, show URL addresses after external links. | |
270 | #man_show_urls = False | |
271 | ||
272 | ||
273 | # -- Options for Texinfo output ---------------------------------------- | |
274 | ||
275 | # Grouping the document tree into Texinfo files. List of tuples | |
276 | # (source start file, target name, title, author, | |
277 | # dir menu entry, description, category) | |
278 | texinfo_documents = [ | |
279 | ('index', 'mockupdb', | |
280 | 'MockupDB Documentation', | |
281 | 'A. Jesse Jiryu Davis', | |
282 | 'mockupdb', | |
283 | ('Mock server for testing MongoDB clients and creating MongoDB Wire Protocol' | |
284 | ' servers.'), | |
285 | 'Miscellaneous'), | |
286 | ] | |
287 | ||
288 | # Documents to append as an appendix to all manuals. | |
289 | #texinfo_appendices = [] | |
290 | ||
291 | # If false, no module index is generated. | |
292 | #texinfo_domain_indices = True | |
293 | ||
294 | # How to display URL addresses: 'footnote', 'no', or 'inline'. | |
295 | #texinfo_show_urls = 'footnote' | |
296 | ||
297 | # If true, do not generate a @detailmenu in the "Top" node's menu. | |
298 | #texinfo_no_detailmenu = False |
0 | .. include:: ../CONTRIBUTING.rst |
0 | .. mongo-mockup-db documentation master file, created by | |
1 | sphinx-quickstart on Tue Jul 9 22:26:36 2013. | |
2 | You can adapt this file completely to your liking, but it should at least | |
3 | contain the root `toctree` directive. | |
4 | ||
5 | MockupDB | |
6 | ======== | |
7 | ||
8 | .. image:: _static/mask.jpg | |
9 | ||
10 | Mock server for testing MongoDB clients and creating `MongoDB Wire Protocol`_ | |
11 | servers. | |
12 | ||
13 | The :doc:`tutorial` is the primary documentation. | |
14 | ||
15 | Contents: | |
16 | ||
17 | .. toctree:: | |
18 | :maxdepth: 1 | |
19 | ||
20 | readme | |
21 | installation | |
22 | tutorial | |
23 | reference | |
24 | contributing | |
25 | authors | |
26 | changelog | |
27 | ||
28 | Indices and tables | |
29 | ================== | |
30 | ||
31 | * :ref:`genindex` | |
32 | * :ref:`modindex` | |
33 | * :ref:`search` | |
34 | ||
35 | Image Credit: `gnuckx <https://www.flickr.com/photos/34409164@N06/4708707234/>`_ | |
36 | ||
37 | .. _MongoDB Wire Protocol: http://docs.mongodb.org/meta-driver/latest/legacy/mongodb-wire-protocol/ |
0 | ============ | |
1 | Installation | |
2 | ============ | |
3 | ||
4 | MockMongoDB requires PyMongo_. It uses PyMongo's ``bson`` package to encode | |
5 | and decode MongoDB Wire Protocol message bodies. | |
6 | ||
7 | At the command line:: | |
8 | ||
9 | $ easy_install mongo-mockup-db | |
10 | ||
11 | Or, if you have virtualenvwrapper installed:: | |
12 | ||
13 | $ mkvirtualenv mongo-mockup-db | |
14 | $ pip install mongo-mockup-db | |
15 | ||
16 | .. _PyMongo: https://pypi.python.org/pypi/pymongo/ |
0 | @ECHO OFF | |
1 | ||
2 | REM Command file for Sphinx documentation | |
3 | ||
4 | if "%SPHINXBUILD%" == "" ( | |
5 | set SPHINXBUILD=sphinx-build | |
6 | ) | |
7 | set BUILDDIR=_build | |
8 | set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% . | |
9 | set I18NSPHINXOPTS=%SPHINXOPTS% . | |
10 | if NOT "%PAPER%" == "" ( | |
11 | set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% | |
12 | set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS% | |
13 | ) | |
14 | ||
15 | if "%1" == "" goto help | |
16 | ||
17 | if "%1" == "help" ( | |
18 | :help | |
19 | echo.Please use `make ^<target^>` where ^<target^> is one of | |
20 | echo. html to make standalone HTML files | |
21 | echo. dirhtml to make HTML files named index.html in directories | |
22 | echo. singlehtml to make a single large HTML file | |
23 | echo. pickle to make pickle files | |
24 | echo. json to make JSON files | |
25 | echo. htmlhelp to make HTML files and a HTML help project | |
26 | echo. qthelp to make HTML files and a qthelp project | |
27 | echo. devhelp to make HTML files and a Devhelp project | |
28 | echo. epub to make an epub | |
29 | echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter | |
30 | echo. text to make text files | |
31 | echo. man to make manual pages | |
32 | echo. texinfo to make Texinfo files | |
33 | echo. gettext to make PO message catalogs | |
34 | echo. changes to make an overview over all changed/added/deprecated items | |
35 | echo. xml to make Docutils-native XML files | |
36 | echo. pseudoxml to make pseudoxml-XML files for display purposes | |
37 | echo. linkcheck to check all external links for integrity | |
38 | echo. doctest to run all doctests embedded in the documentation if enabled | |
39 | goto end | |
40 | ) | |
41 | ||
42 | if "%1" == "clean" ( | |
43 | for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i | |
44 | del /q /s %BUILDDIR%\* | |
45 | goto end | |
46 | ) | |
47 | ||
48 | ||
49 | %SPHINXBUILD% 2> nul | |
50 | if errorlevel 9009 ( | |
51 | echo. | |
52 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx | |
53 | echo.installed, then set the SPHINXBUILD environment variable to point | |
54 | echo.to the full path of the 'sphinx-build' executable. Alternatively you | |
55 | echo.may add the Sphinx directory to PATH. | |
56 | echo. | |
57 | echo.If you don't have Sphinx installed, grab it from | |
58 | echo.http://sphinx-doc.org/ | |
59 | exit /b 1 | |
60 | ) | |
61 | ||
62 | if "%1" == "html" ( | |
63 | %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html | |
64 | if errorlevel 1 exit /b 1 | |
65 | echo. | |
66 | echo.Build finished. The HTML pages are in %BUILDDIR%/html. | |
67 | goto end | |
68 | ) | |
69 | ||
70 | if "%1" == "dirhtml" ( | |
71 | %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml | |
72 | if errorlevel 1 exit /b 1 | |
73 | echo. | |
74 | echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. | |
75 | goto end | |
76 | ) | |
77 | ||
78 | if "%1" == "singlehtml" ( | |
79 | %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml | |
80 | if errorlevel 1 exit /b 1 | |
81 | echo. | |
82 | echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. | |
83 | goto end | |
84 | ) | |
85 | ||
86 | if "%1" == "pickle" ( | |
87 | %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle | |
88 | if errorlevel 1 exit /b 1 | |
89 | echo. | |
90 | echo.Build finished; now you can process the pickle files. | |
91 | goto end | |
92 | ) | |
93 | ||
94 | if "%1" == "json" ( | |
95 | %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json | |
96 | if errorlevel 1 exit /b 1 | |
97 | echo. | |
98 | echo.Build finished; now you can process the JSON files. | |
99 | goto end | |
100 | ) | |
101 | ||
102 | if "%1" == "htmlhelp" ( | |
103 | %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp | |
104 | if errorlevel 1 exit /b 1 | |
105 | echo. | |
106 | echo.Build finished; now you can run HTML Help Workshop with the ^ | |
107 | .hhp project file in %BUILDDIR%/htmlhelp. | |
108 | goto end | |
109 | ) | |
110 | ||
111 | if "%1" == "qthelp" ( | |
112 | %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp | |
113 | if errorlevel 1 exit /b 1 | |
114 | echo. | |
115 | echo.Build finished; now you can run "qcollectiongenerator" with the ^ | |
116 | .qhcp project file in %BUILDDIR%/qthelp, like this: | |
117 | echo.^> qcollectiongenerator %BUILDDIR%\qthelp\mongo-mockup-db.qhcp | |
118 | echo.To view the help file: | |
119 | echo.^> assistant -collectionFile %BUILDDIR%\qthelp\mongo-mockup-db.ghc | |
120 | goto end | |
121 | ) | |
122 | ||
123 | if "%1" == "devhelp" ( | |
124 | %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp | |
125 | if errorlevel 1 exit /b 1 | |
126 | echo. | |
127 | echo.Build finished. | |
128 | goto end | |
129 | ) | |
130 | ||
131 | if "%1" == "epub" ( | |
132 | %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub | |
133 | if errorlevel 1 exit /b 1 | |
134 | echo. | |
135 | echo.Build finished. The epub file is in %BUILDDIR%/epub. | |
136 | goto end | |
137 | ) | |
138 | ||
139 | if "%1" == "latex" ( | |
140 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex | |
141 | if errorlevel 1 exit /b 1 | |
142 | echo. | |
143 | echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. | |
144 | goto end | |
145 | ) | |
146 | ||
147 | if "%1" == "latexpdf" ( | |
148 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex | |
149 | cd %BUILDDIR%/latex | |
150 | make all-pdf | |
151 | cd %BUILDDIR%/.. | |
152 | echo. | |
153 | echo.Build finished; the PDF files are in %BUILDDIR%/latex. | |
154 | goto end | |
155 | ) | |
156 | ||
157 | if "%1" == "latexpdfja" ( | |
158 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex | |
159 | cd %BUILDDIR%/latex | |
160 | make all-pdf-ja | |
161 | cd %BUILDDIR%/.. | |
162 | echo. | |
163 | echo.Build finished; the PDF files are in %BUILDDIR%/latex. | |
164 | goto end | |
165 | ) | |
166 | ||
167 | if "%1" == "text" ( | |
168 | %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text | |
169 | if errorlevel 1 exit /b 1 | |
170 | echo. | |
171 | echo.Build finished. The text files are in %BUILDDIR%/text. | |
172 | goto end | |
173 | ) | |
174 | ||
175 | if "%1" == "man" ( | |
176 | %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man | |
177 | if errorlevel 1 exit /b 1 | |
178 | echo. | |
179 | echo.Build finished. The manual pages are in %BUILDDIR%/man. | |
180 | goto end | |
181 | ) | |
182 | ||
183 | if "%1" == "texinfo" ( | |
184 | %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo | |
185 | if errorlevel 1 exit /b 1 | |
186 | echo. | |
187 | echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo. | |
188 | goto end | |
189 | ) | |
190 | ||
191 | if "%1" == "gettext" ( | |
192 | %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale | |
193 | if errorlevel 1 exit /b 1 | |
194 | echo. | |
195 | echo.Build finished. The message catalogs are in %BUILDDIR%/locale. | |
196 | goto end | |
197 | ) | |
198 | ||
199 | if "%1" == "changes" ( | |
200 | %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes | |
201 | if errorlevel 1 exit /b 1 | |
202 | echo. | |
203 | echo.The overview file is in %BUILDDIR%/changes. | |
204 | goto end | |
205 | ) | |
206 | ||
207 | if "%1" == "linkcheck" ( | |
208 | %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck | |
209 | if errorlevel 1 exit /b 1 | |
210 | echo. | |
211 | echo.Link check complete; look for any errors in the above output ^ | |
212 | or in %BUILDDIR%/linkcheck/output.txt. | |
213 | goto end | |
214 | ) | |
215 | ||
216 | if "%1" == "doctest" ( | |
217 | %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest | |
218 | if errorlevel 1 exit /b 1 | |
219 | echo. | |
220 | echo.Testing of doctests in the sources finished, look at the ^ | |
221 | results in %BUILDDIR%/doctest/output.txt. | |
222 | goto end | |
223 | ) | |
224 | ||
225 | if "%1" == "xml" ( | |
226 | %SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml | |
227 | if errorlevel 1 exit /b 1 | |
228 | echo. | |
229 | echo.Build finished. The XML files are in %BUILDDIR%/xml. | |
230 | goto end | |
231 | ) | |
232 | ||
233 | if "%1" == "pseudoxml" ( | |
234 | %SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml | |
235 | if errorlevel 1 exit /b 1 | |
236 | echo. | |
237 | echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml. | |
238 | goto end | |
239 | ) | |
240 | ||
241 | :end |
0 | @import url("default.css"); | |
1 | ||
2 | body { | |
3 | background-color: white; | |
4 | margin-left: 1em; | |
5 | margin-right: 1em; | |
6 | } | |
7 | ||
8 | div.related { | |
9 | margin-bottom: 1.2em; | |
10 | padding: 0.5em 0; | |
11 | border-top: 1px solid #ccc; | |
12 | margin-top: 0.5em; | |
13 | } | |
14 | ||
15 | div.related a:hover { | |
16 | color: #0095C4; | |
17 | } | |
18 | ||
19 | div.related:first-child { | |
20 | border-top: 0; | |
21 | border-bottom: 1px solid #ccc; | |
22 | } | |
23 | ||
24 | div.sphinxsidebar { | |
25 | background-color: #eeeeee; | |
26 | border-radius: 5px; | |
27 | line-height: 130%; | |
28 | font-size: smaller; | |
29 | } | |
30 | ||
31 | div.sphinxsidebar h3, div.sphinxsidebar h4 { | |
32 | margin-top: 1.5em; | |
33 | } | |
34 | ||
35 | div.sphinxsidebarwrapper > h3:first-child { | |
36 | margin-top: 0.2em; | |
37 | } | |
38 | ||
39 | div.sphinxsidebarwrapper > ul > li > ul > li { | |
40 | margin-bottom: 0.4em; | |
41 | } | |
42 | ||
43 | div.sphinxsidebar a:hover { | |
44 | color: #0095C4; | |
45 | } | |
46 | ||
47 | div.sphinxsidebar input { | |
48 | font-family: 'Lucida Grande',Arial,sans-serif; | |
49 | border: 1px solid #999999; | |
50 | font-size: smaller; | |
51 | border-radius: 3px; | |
52 | } | |
53 | ||
54 | div.sphinxsidebar input[type=text] { | |
55 | max-width: 150px; | |
56 | } | |
57 | ||
58 | div.body { | |
59 | padding: 0 0 0 1.2em; | |
60 | } | |
61 | ||
62 | div.body p { | |
63 | line-height: 140%; | |
64 | } | |
65 | ||
66 | div.body h1, div.body h2, div.body h3, div.body h4, div.body h5, div.body h6 { | |
67 | margin: 0; | |
68 | border: 0; | |
69 | padding: 0.3em 0; | |
70 | } | |
71 | ||
72 | div.body hr { | |
73 | border: 0; | |
74 | background-color: #ccc; | |
75 | height: 1px; | |
76 | } | |
77 | ||
78 | div.body pre { | |
79 | border-radius: 3px; | |
80 | border: 1px solid #ac9; | |
81 | } | |
82 | ||
83 | div.body div.admonition, div.body div.impl-detail { | |
84 | border-radius: 3px; | |
85 | } | |
86 | ||
87 | div.body div.impl-detail > p { | |
88 | margin: 0; | |
89 | } | |
90 | ||
91 | div.body div.seealso { | |
92 | border: 1px solid #dddd66; | |
93 | } | |
94 | ||
95 | div.body a { | |
96 | color: #0072aa; | |
97 | } | |
98 | ||
99 | div.body a:visited { | |
100 | color: #6363bb; | |
101 | } | |
102 | ||
103 | div.body a:hover { | |
104 | color: #00B0E4; | |
105 | } | |
106 | ||
107 | tt, code, pre { | |
108 | font-family: monospace, sans-serif; | |
109 | font-size: 96.5%; | |
110 | } | |
111 | ||
112 | div.body tt, div.body code { | |
113 | border-radius: 3px; | |
114 | } | |
115 | ||
116 | div.body tt.descname, div.body code.descname { | |
117 | font-size: 120%; | |
118 | } | |
119 | ||
120 | div.body tt.xref, div.body a tt, div.body code.xref, div.body a code { | |
121 | font-weight: normal; | |
122 | } | |
123 | ||
124 | .deprecated { | |
125 | border-radius: 3px; | |
126 | } | |
127 | ||
128 | table.docutils { | |
129 | border: 1px solid #ddd; | |
130 | min-width: 20%; | |
131 | border-radius: 3px; | |
132 | margin-top: 10px; | |
133 | margin-bottom: 10px; | |
134 | } | |
135 | ||
136 | table.docutils td, table.docutils th { | |
137 | border: 1px solid #ddd !important; | |
138 | border-radius: 3px; | |
139 | } | |
140 | ||
141 | table p, table li { | |
142 | text-align: left !important; | |
143 | } | |
144 | ||
145 | table.docutils th { | |
146 | background-color: #eee; | |
147 | padding: 0.3em 0.5em; | |
148 | } | |
149 | ||
150 | table.docutils td { | |
151 | background-color: white; | |
152 | padding: 0.3em 0.5em; | |
153 | } | |
154 | ||
155 | table.footnote, table.footnote td { | |
156 | border: 0 !important; | |
157 | } | |
158 | ||
159 | div.footer { | |
160 | line-height: 150%; | |
161 | margin-top: -2em; | |
162 | text-align: right; | |
163 | width: auto; | |
164 | margin-right: 10px; | |
165 | } | |
166 | ||
167 | div.footer a:hover { | |
168 | color: #0095C4; | |
169 | } | |
170 | ||
171 | .refcount { | |
172 | color: #060; | |
173 | } | |
174 | ||
175 | .stableabi { | |
176 | color: #229; | |
177 | } |
0 | [theme] | |
1 | inherit = default | |
2 | stylesheet = pydoctheme.css | |
3 | pygments_style = sphinx | |
4 | ||
5 | [options] | |
6 | bodyfont = 'Lucida Grande', Arial, sans-serif | |
7 | headfont = 'Lucida Grande', Arial, sans-serif | |
8 | footerbgcolor = white | |
9 | footertextcolor = #555555 | |
10 | relbarbgcolor = white | |
11 | relbartextcolor = #666666 | |
12 | relbarlinkcolor = #444444 | |
13 | sidebarbgcolor = white | |
14 | sidebartextcolor = #444444 | |
15 | sidebarlinkcolor = #444444 | |
16 | bgcolor = white | |
17 | textcolor = #222222 | |
18 | linkcolor = #0090c0 | |
19 | visitedlinkcolor = #00608f | |
20 | headtextcolor = #1a1a1a | |
21 | headbgcolor = white | |
22 | headlinkcolor = #aaaaaa |
0 | .. include:: ../README.rst |
0 | ======== | |
1 | Tutorial | |
2 | ======== | |
3 | ||
4 | .. currentmodule:: mockupdb | |
5 | ||
6 | This is the primary documentation for the MockupDB project, and its primary | |
7 | test. | |
8 | ||
9 | We assume some familiarity with PyMongo_ and the `MongoDB Wire Protocol`_. | |
10 | ||
11 | .. contents:: | |
12 | ||
13 | Introduction | |
14 | ------------ | |
15 | ||
16 | You can play with the mock server via ``python -m mockupdb`` and connect from | |
17 | the shell, but that is not tremendously interesting. Better to use it in tests. | |
18 | ||
19 | We begin by running a :class:`.MockupDB` and connecting to it with PyMongo's | |
20 | `~pymongo.mongo_client.MongoClient`: | |
21 | ||
22 | >>> from mockupdb import * | |
23 | >>> server = MockupDB() | |
24 | >>> port = server.run() # Returns the TCP port number it listens on. | |
25 | >>> from pymongo import MongoClient | |
26 | >>> client = MongoClient(server.uri) | |
27 | ||
28 | When the client connects it calls the "ismaster" command, then blocks until | |
29 | the server responds. MockupDB receives the "ismaster" command but does not | |
30 | respond until you tell it to: | |
31 | ||
32 | >>> request = server.receives() | |
33 | >>> request | |
34 | Command({"ismaster": 1}, namespace="admin") | |
35 | ||
36 | We respond: | |
37 | ||
38 | >>> request.replies({'ok': 1}) | |
39 | True | |
40 | ||
41 | In fact this is the default response, so the next time the client calls | |
42 | "ismaster" you could just say ``server.receives().replies()``. | |
43 | ||
44 | The `~MockupDB.receives` call blocks until it receives a request from the | |
45 | client. Responding to each "ismaster" call is tiresome, so tell the client | |
46 | to send the default response to all ismaster calls: | |
47 | ||
48 | >>> server.autoresponds('ismaster') | |
49 | _AutoResponder(Matcher(Request({"ismaster": 1})), (), {}) | |
50 | >>> client.admin.command('ismaster') == {'ok': 1} | |
51 | True | |
52 | ||
53 | A call to `~MockupDB.receives` now blocks waiting for some request that | |
54 | does *not* match "ismaster". | |
55 | ||
56 | (Notice that `~Request.replies` returns True. This makes more advanced uses of | |
57 | `~MockupDB.autoresponds` easier, see the reference document.) | |
58 | ||
59 | Reply To Legacy Writes | |
60 | ---------------------- | |
61 | ||
62 | Send an unacknowledged OP_INSERT: | |
63 | ||
64 | >>> from pymongo.write_concern import WriteConcern | |
65 | >>> w0 = WriteConcern(w=0) | |
66 | >>> collection = client.db.coll.with_options(write_concern=w0) | |
67 | >>> collection.insert_one({'_id': 1}) # doctest: +ELLIPSIS | |
68 | <pymongo.results.InsertOneResult object at ...> | |
69 | >>> server.receives() | |
70 | OpInsert({"_id": 1}, namespace="db.coll") | |
71 | ||
72 | If PyMongo sends an unacknowledged OP_INSERT it does not block | |
73 | waiting for you to call `~Request.replies`, but for all other operations it | |
74 | does. Use `~test.utils.go` to defer PyMongo to a background thread so you | |
75 | can respond from the main thread: | |
76 | ||
77 | >>> # Default write concern is acknowledged. | |
78 | >>> collection = client.db.coll | |
79 | >>> from mockupdb import go | |
80 | >>> future = go(collection.insert_one, {'_id': 2}) | |
81 | ||
82 | Pass a method and its arguments to the `go` function, the same as to | |
83 | `functools.partial`. It launches `~pymongo.collection.Collection.insert_one` | |
84 | on a thread and returns a handle to its future outcome. Meanwhile, wait for the | |
85 | client's request to arrive on the main thread: | |
86 | ||
87 | >>> server.receives() | |
88 | OpInsert({"_id": 2}, namespace="db.coll") | |
89 | >>> gle = server.receives() | |
90 | >>> gle | |
91 | Command({"getlasterror": 1}, namespace="db") | |
92 | ||
93 | You could respond with ``{'ok': 1, 'err': None}``, or for convenience: | |
94 | ||
95 | >>> gle.replies_to_gle() | |
96 | True | |
97 | ||
98 | The server's getlasterror response unblocks the client, so its future | |
99 | contains the return value of `~pymongo.collection.Collection.insert_one`, | |
100 | which is an `~pymongo.results.InsertOneResult`: | |
101 | ||
102 | >>> write_result = future() | |
103 | >>> write_result # doctest: +ELLIPSIS | |
104 | <pymongo.results.InsertOneResult object at ...> | |
105 | >>> write_result.inserted_id | |
106 | 2 | |
107 | ||
108 | If you don't need the future's return value, you can express this more tersely | |
109 | with `going`: | |
110 | ||
111 | >>> with going(collection.insert_one, {'_id': 3}): | |
112 | ... server.receives() | |
113 | ... server.receives().replies_to_gle() | |
114 | OpInsert({"_id": 3}, namespace="db.coll") | |
115 | True | |
116 | ||
117 | Reply To Write Commands | |
118 | ----------------------- | |
119 | ||
120 | MockupDB runs the most recently added autoresponders first, and uses the | |
121 | first that matches. Override the previous "ismaster" responder to upgrade | |
122 | the wire protocol: | |
123 | ||
124 | >>> responder = server.autoresponds('ismaster', maxWireVersion=3) | |
125 | ||
126 | Test that PyMongo now uses a write command instead of a legacy insert: | |
127 | ||
128 | >>> client.close() | |
129 | >>> future = go(collection.insert_one, {'_id': 1}) | |
130 | >>> request = server.receives() | |
131 | >>> request | |
132 | Command({"insert": "coll", "ordered": true, "documents": [{"_id": 1}]}, namespace="db") | |
133 | ||
134 | (Note how MockupDB requests and replies are rendered as JSON, not Python. | |
135 | This is mainly to show you the *order* of keys and values, which is sometimes | |
136 | important when testing a driver.) | |
137 | ||
138 | To unblock the background thread, send the default reply of ``{ok: 1}}``: | |
139 | ||
140 | >>> request.reply() | |
141 | True | |
142 | >>> assert 1 == future().inserted_id | |
143 | ||
144 | Simulate a command error: | |
145 | ||
146 | >>> future = go(collection.insert_one, {'_id': 1}) | |
147 | >>> server.receives(insert='coll').command_err(11000, 'eek!') | |
148 | True | |
149 | >>> future() | |
150 | Traceback (most recent call last): | |
151 | ... | |
152 | DuplicateKeyError: eek! | |
153 | ||
154 | Or a network error: | |
155 | ||
156 | >>> future = go(collection.insert_one, {'_id': 1}) | |
157 | >>> server.receives(insert='coll').hangup() | |
158 | True | |
159 | >>> future() | |
160 | Traceback (most recent call last): | |
161 | ... | |
162 | AutoReconnect: connection closed | |
163 | ||
164 | Pattern-Match Requests | |
165 | ---------------------- | |
166 | ||
167 | MockupDB's pattern-matching is useful for testing: you can tell the server | |
168 | to verify any aspect of the expected client request. | |
169 | ||
170 | Pass a pattern to `~.MockupDB.receives` to test that the next request | |
171 | matches the pattern: | |
172 | ||
173 | >>> future = go(client.db.command, 'commandFoo') | |
174 | >>> request = server.receives('commandBar') # doctest: +NORMALIZE_WHITESPACE | |
175 | Traceback (most recent call last): | |
176 | ... | |
177 | AssertionError: expected to receive Command({"commandBar": 1}), | |
178 | got Command({"commandFoo": 1}) | |
179 | ||
180 | Even if the pattern does not match, the request is still popped from the | |
181 | queue. | |
182 | ||
183 | If you do not know what order you need to accept requests, you can make a | |
184 | little loop: | |
185 | ||
186 | >>> import traceback | |
187 | >>> def loop(): | |
188 | ... try: | |
189 | ... while server.running: | |
190 | ... # Match queries most restrictive first. | |
191 | ... if server.got(OpQuery, {'a': {'$gt': 1}}): | |
192 | ... server.reply({'a': 2}) | |
193 | ... elif server.got('break'): | |
194 | ... server.ok() | |
195 | ... break | |
196 | ... elif server.got(OpQuery): | |
197 | ... server.reply({'a': 1}, {'a': 2}) | |
198 | ... else: | |
199 | ... server.command_err('unrecognized request') | |
200 | ... except: | |
201 | ... traceback.print_exc() | |
202 | ... raise | |
203 | ... | |
204 | >>> future = go(loop) | |
205 | >>> | |
206 | >>> list(client.db.coll.find()) == [{'a': 1}, {'a': 2}] | |
207 | True | |
208 | >>> list(client.db.coll.find({'a': {'$gt': 1}})) == [{'a': 2}] | |
209 | True | |
210 | >>> client.db.command('break') == {'ok': 1} | |
211 | True | |
212 | >>> future() | |
213 | ||
214 | You can even implement the "shutdown" command: | |
215 | ||
216 | >>> def loop(): | |
217 | ... try: | |
218 | ... while server.running: | |
219 | ... if server.got('shutdown'): | |
220 | ... server.stop() # Hangs up. | |
221 | ... else: | |
222 | ... server.command_err('unrecognized request') | |
223 | ... except: | |
224 | ... traceback.print_exc() | |
225 | ... raise | |
226 | ... | |
227 | >>> future = go(loop) | |
228 | >>> client.db.command('shutdown') | |
229 | Traceback (most recent call last): | |
230 | ... | |
231 | AutoReconnect: connection closed | |
232 | >>> future() | |
233 | >>> server.running | |
234 | False | |
235 | >>> client.close() | |
236 | ||
237 | To show off a difficult test that MockupDB makes easy, assert that | |
238 | PyMongo sends a ``writeConcern`` argument if you specify ``w=1``: | |
239 | ||
240 | >>> server = MockupDB() | |
241 | >>> responder = server.autoresponds('ismaster', maxWireVersion=3) | |
242 | >>> port = server.run() | |
243 | >>> | |
244 | >>> # Specify w=1. This is distinct from the default write concern. | |
245 | >>> client = MongoClient(server.uri, w=1) | |
246 | >>> collection = client.db.coll | |
247 | >>> future = go(collection.insert_one, {'_id': 4}) | |
248 | >>> server.receives({'writeConcern': {'w': 1}}).sends() | |
249 | True | |
250 | >>> client.close() | |
251 | ||
252 | ... but not by default: | |
253 | ||
254 | >>> # Accept the default write concern. | |
255 | >>> client = MongoClient(server.uri) | |
256 | >>> collection = client.db.coll | |
257 | >>> future = go(collection.insert_one, {'_id': 5}) | |
258 | >>> assert 'writeConcern' not in server.receives() | |
259 | >>> client.close() | |
260 | ||
261 | Wait For A Request Impatiently | |
262 | ------------------------------ | |
263 | ||
264 | If your test waits for PyMongo to send a request but receives none, it times out | |
265 | after 10 seconds by default. This way MockupDB ensures that even failing tests | |
266 | all take finite time. | |
267 | ||
268 | To abbreviate the wait, pass a timeout in seconds to `~MockupDB.receives`: | |
269 | ||
270 | >>> server.receives(timeout=0.1) | |
271 | Traceback (most recent call last): | |
272 | ... | |
273 | AssertionError: expected to receive Request(), got nothing | |
274 | ||
275 | Test Cursor Behavior | |
276 | -------------------- | |
277 | ||
278 | Test what happens when a query fails: | |
279 | ||
280 | >>> cursor = collection.find().batch_size(1) | |
281 | >>> future = go(next, cursor) | |
282 | >>> server.receives(OpQuery).fail() | |
283 | True | |
284 | >>> future() | |
285 | Traceback (most recent call last): | |
286 | ... | |
287 | OperationFailure: database error: MockupDB query failure | |
288 | ||
289 | OP_KILL_CURSORS has historically been hard to test. On a single | |
290 | server you could count cursors in `serverStatus`_ to know when one dies. | |
291 | But in a replica set, the count is confounded by replication cursors coming | |
292 | and going, and it is precisely in replica sets that it is crucial to verify | |
293 | PyMongo sends OP_KILLCURSORS to the right server. | |
294 | ||
295 | You can check the cursor is closed by trying getMores on it until the | |
296 | server returns CursorNotFound. However, if you are in the midst of a | |
297 | getMore when the asynchronous OP_KILL_CURSORS arrives, the server | |
298 | logs "Assertion: 16089:Cannot kill active cursor" and leaves it alive. By | |
299 | sleeping a long time between getMores the test reduces races, but does not | |
300 | eliminate them, and at the cost of sluggishness. | |
301 | ||
302 | But with MockupDB you can test OP_KILL_CURSORS easily and reliably. | |
303 | We start a cursor with its first batch: | |
304 | ||
305 | >>> cursor = collection.find().batch_size(1) | |
306 | >>> future = go(next, cursor) | |
307 | >>> reply = OpReply({'first': 'doc'}, cursor_id=123) | |
308 | >>> server.receives(OpQuery).replies(reply) | |
309 | True | |
310 | >>> future() == {'first': 'doc'} | |
311 | True | |
312 | >>> cursor.alive | |
313 | True | |
314 | ||
315 | The cursor should send OP_KILL_CURSORS if it is garbage-collected: | |
316 | ||
317 | >>> del cursor | |
318 | >>> import gc | |
319 | >>> _ = gc.collect() | |
320 | >>> server.receives(OpKillCursors, cursor_ids=[123]) | |
321 | OpKillCursors([123]) | |
322 | ||
323 | You can simulate normal querying, too: | |
324 | ||
325 | >>> cursor = collection.find().batch_size(1) | |
326 | >>> future = go(list, cursor) | |
327 | >>> documents = [{'_id': 1}, {'foo': 'bar'}, {'beauty': True}] | |
328 | >>> server.receives(OpQuery).replies(OpReply(documents[0], cursor_id=123)) | |
329 | True | |
330 | >>> del documents[0] | |
331 | >>> num_sent = 1 | |
332 | >>> while documents: | |
333 | ... getmore = server.receives(OpGetMore) | |
334 | ... num_to_return = getmore.num_to_return | |
335 | ... print('num_to_return %s' % num_to_return) | |
336 | ... batch = documents[:num_to_return] | |
337 | ... del documents[:num_to_return] | |
338 | ... if documents: | |
339 | ... cursor_id = 123 | |
340 | ... else: | |
341 | ... cursor_id = 0 | |
342 | ... reply = OpReply(batch, | |
343 | ... cursor_id=cursor_id, | |
344 | ... starting_from=num_sent) | |
345 | ... getmore.replies(reply) | |
346 | ... num_sent += len(batch) | |
347 | ... | |
348 | num_to_return 2 | |
349 | True | |
350 | ||
351 | Observe a quirk in the wire protocol: MongoDB treats an initial query | |
352 | with nToReturn of 1 the same as -1 and closes the cursor after the first | |
353 | batch. To work around this, PyMongo overrides a batch size of 1 and asks | |
354 | for 2. | |
355 | ||
356 | At any rate, the loop completes and the cursor receives all documents: | |
357 | ||
358 | >>> future() == [{'_id': 1}, {'foo': 'bar'}, {'beauty': True}] | |
359 | True | |
360 | ||
361 | But this is just a parlor trick. Let us test something serious. | |
362 | ||
363 | Test Server Discovery And Monitoring | |
364 | ------------------------------------ | |
365 | ||
366 | To test PyMongo's server monitor, make the server a secondary: | |
367 | ||
368 | >>> hosts = [server.address_string] | |
369 | >>> secondary_reply = OpReply({ | |
370 | ... 'ismaster': False, | |
371 | ... 'secondary': True, | |
372 | ... 'setName': 'rs', | |
373 | ... 'hosts': hosts}) | |
374 | >>> responder = server.autoresponds('ismaster', secondary_reply) | |
375 | ||
376 | Connect to the replica set: | |
377 | ||
378 | >>> client = MongoClient(server.uri, replicaSet='rs') | |
379 | >>> from mockupdb import wait_until | |
380 | >>> wait_until(lambda: server.address in client.secondaries, | |
381 | ... 'discover secondary') | |
382 | True | |
383 | ||
384 | Add a primary to the host list: | |
385 | ||
386 | >>> primary = MockupDB() | |
387 | >>> port = primary.run() | |
388 | >>> hosts.append(primary.address_string) | |
389 | >>> primary_reply = OpReply({ | |
390 | ... 'ismaster': True, | |
391 | ... 'secondary': False, | |
392 | ... 'setName': 'rs', | |
393 | ... 'hosts': hosts}) | |
394 | >>> responder = primary.autoresponds('ismaster', primary_reply) | |
395 | ||
396 | Client discovers it quickly if there's a pending operation: | |
397 | ||
398 | >>> with going(client.db.command, 'buildinfo'): | |
399 | ... wait_until(lambda: primary.address == client.primary, | |
400 | ... 'discovery primary') | |
401 | ... primary.pop('buildinfo').ok() | |
402 | True | |
403 | True | |
404 | ||
405 | Test Server Selection | |
406 | --------------------- | |
407 | ||
408 | TODO. | |
409 | ||
410 | .. _PyMongo: https://pypi.python.org/pypi/pymongo/ | |
411 | ||
412 | .. _MongoDB Wire Protocol: http://docs.mongodb.org/meta-driver/latest/legacy/mongodb-wire-protocol/ | |
413 | ||
414 | .. _serverStatus: http://docs.mongodb.org/manual/reference/server-status/ | |
415 | ||
416 | .. _collect: https://docs.python.org/2/library/gc.html#gc.collect |
0 | # -*- coding: utf-8 -*- | |
1 | # Copyright 2015 MongoDB, Inc. | |
2 | # | |
3 | # Licensed under the Apache License, Version 2.0 (the "License"); | |
4 | # you may not use this file except in compliance with the License. | |
5 | # You may obtain a copy of the License at | |
6 | # | |
7 | # http://www.apache.org/licenses/LICENSE-2.0 | |
8 | # | |
9 | # Unless required by applicable law or agreed to in writing, software | |
10 | # distributed under the License is distributed on an "AS IS" BASIS, | |
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
12 | # See the License for the specific language governing permissions and | |
13 | # limitations under the License. | |
14 | ||
15 | """Simulate a MongoDB server. | |
16 | ||
17 | Request Spec | |
18 | ------------ | |
19 | ||
20 | TODO | |
21 | ||
22 | Matcher Spec | |
23 | ------------ | |
24 | ||
25 | TODO | |
26 | ||
27 | Reply Spec | |
28 | ---------- | |
29 | ||
30 | TODO | |
31 | ||
32 | """ | |
33 | ||
34 | from __future__ import print_function | |
35 | ||
36 | __author__ = 'A. Jesse Jiryu Davis' | |
37 | __email__ = 'jesse@mongodb.com' | |
38 | __version__ = '1.1' | |
39 | ||
40 | import collections | |
41 | import contextlib | |
42 | import errno | |
43 | import functools | |
44 | import inspect | |
45 | import os | |
46 | import random | |
47 | import select | |
48 | import ssl as _ssl | |
49 | import socket | |
50 | import struct | |
51 | import traceback | |
52 | import threading | |
53 | import time | |
54 | import weakref | |
55 | import sys | |
56 | from codecs import utf_8_decode as _utf_8_decode | |
57 | ||
58 | try: | |
59 | from queue import Queue, Empty | |
60 | except ImportError: | |
61 | from Queue import Queue, Empty | |
62 | ||
63 | try: | |
64 | from collections import OrderedDict | |
65 | except: | |
66 | from ordereddict import OrderedDict # Python 2.6, "pip install ordereddict" | |
67 | ||
68 | try: | |
69 | from io import StringIO | |
70 | except ImportError: | |
71 | from cStringIO import StringIO | |
72 | ||
73 | # Pure-Python bson lib vendored in from PyMongo 3.0.3. | |
74 | from mockupdb import _bson | |
75 | import mockupdb._bson.codec_options as _codec_options | |
76 | import mockupdb._bson.json_util as _json_util | |
77 | ||
78 | CODEC_OPTIONS = _codec_options.CodecOptions(document_class=OrderedDict) | |
79 | ||
80 | PY3 = sys.version_info[0] == 3 | |
81 | if PY3: | |
82 | string_type = str | |
83 | text_type = str | |
84 | ||
85 | def reraise(exctype, value, trace=None): | |
86 | raise exctype(str(value)).with_traceback(trace) | |
87 | else: | |
88 | string_type = basestring | |
89 | text_type = unicode | |
90 | ||
91 | # "raise x, y, z" raises SyntaxError in Python 3. | |
92 | exec("""def reraise(exctype, value, trace=None): | |
93 | raise exctype, str(value), trace | |
94 | """) | |
95 | ||
96 | ||
97 | __all__ = [ | |
98 | 'MockupDB', 'go', 'going', 'Future', 'wait_until', 'interactive_server', | |
99 | ||
100 | 'OP_REPLY', 'OP_UPDATE', 'OP_INSERT', 'OP_QUERY', 'OP_GET_MORE', | |
101 | 'OP_DELETE', 'OP_KILL_CURSORS', | |
102 | ||
103 | 'QUERY_FLAGS', 'UPDATE_FLAGS', 'INSERT_FLAGS', 'DELETE_FLAGS', | |
104 | 'REPLY_FLAGS', | |
105 | ||
106 | 'Request', 'Command', 'OpQuery', 'OpGetMore', 'OpKillCursors', 'OpInsert', | |
107 | 'OpUpdate', 'OpDelete', 'OpReply', | |
108 | ||
109 | 'Matcher', 'absent', | |
110 | ] | |
111 | ||
112 | ||
113 | def go(fn, *args, **kwargs): | |
114 | """Launch an operation on a thread and get a handle to its future result. | |
115 | ||
116 | >>> from time import sleep | |
117 | >>> def print_sleep_print(duration): | |
118 | ... sleep(duration) | |
119 | ... print('hello from background thread') | |
120 | ... sleep(duration) | |
121 | ... print('goodbye from background thread') | |
122 | ... return 'return value' | |
123 | ... | |
124 | >>> future = go(print_sleep_print, 0.1) | |
125 | >>> sleep(0.15) | |
126 | hello from background thread | |
127 | >>> print('main thread') | |
128 | main thread | |
129 | >>> result = future() | |
130 | goodbye from background thread | |
131 | >>> result | |
132 | 'return value' | |
133 | """ | |
134 | if not callable(fn): | |
135 | raise TypeError('go() requires a function, not %r' % (fn, )) | |
136 | result = [None] | |
137 | error = [] | |
138 | ||
139 | def target(): | |
140 | try: | |
141 | result[0] = fn(*args, **kwargs) | |
142 | except Exception: | |
143 | # Are we in interpreter shutdown? | |
144 | if sys: | |
145 | error.extend(sys.exc_info()) | |
146 | ||
147 | t = threading.Thread(target=target) | |
148 | t.daemon = True | |
149 | t.start() | |
150 | ||
151 | def get_result(timeout=10): | |
152 | t.join(timeout) | |
153 | if t.is_alive(): | |
154 | raise AssertionError('timed out waiting for %r' % fn) | |
155 | if error: | |
156 | reraise(*error) | |
157 | return result[0] | |
158 | ||
159 | return get_result | |
160 | ||
161 | ||
162 | @contextlib.contextmanager | |
163 | def going(fn, *args, **kwargs): | |
164 | """Launch a thread and wait for its result before exiting the code block. | |
165 | ||
166 | >>> with going(lambda: 'return value') as future: | |
167 | ... pass | |
168 | >>> future() # Won't block, the future is ready by now. | |
169 | 'return value' | |
170 | ||
171 | Or discard the result: | |
172 | ||
173 | >>> with going(lambda: "don't care"): | |
174 | ... pass | |
175 | ||
176 | ||
177 | If an exception is raised within the context, the result is lost: | |
178 | ||
179 | >>> with going(lambda: 'return value') as future: | |
180 | ... assert 1 == 0 | |
181 | Traceback (most recent call last): | |
182 | ... | |
183 | AssertionError | |
184 | """ | |
185 | future = go(fn, *args, **kwargs) | |
186 | try: | |
187 | yield future | |
188 | except: | |
189 | # We are raising an exception, just try to clean up the future. | |
190 | exc_info = sys.exc_info() | |
191 | try: | |
192 | # Shorter than normal timeout. | |
193 | future(timeout=1) | |
194 | except: | |
195 | log_message = ('\nerror in %s:\n' | |
196 | % format_call(inspect.currentframe())) | |
197 | sys.stderr.write(log_message) | |
198 | traceback.print_exc() | |
199 | # sys.stderr.write('exc in %s' % format_call(inspect.currentframe())) | |
200 | reraise(*exc_info) | |
201 | else: | |
202 | # Raise exception or discard result. | |
203 | future(timeout=10) | |
204 | ||
205 | ||
206 | class Future(object): | |
207 | def __init__(self): | |
208 | self._result = None | |
209 | self._event = threading.Event() | |
210 | ||
211 | def result(self, timeout=None): | |
212 | self._event.wait(timeout) | |
213 | # wait() always returns None in Python 2.6. | |
214 | if not self._event.is_set(): | |
215 | raise AssertionError('timed out waiting for Future') | |
216 | return self._result | |
217 | ||
218 | def set_result(self, result): | |
219 | if self._event.is_set(): | |
220 | raise RuntimeError("Future is already resolved") | |
221 | self._result = result | |
222 | self._event.set() | |
223 | ||
224 | ||
225 | def wait_until(predicate, success_description, timeout=10): | |
226 | """Wait up to 10 seconds (by default) for predicate to be true. | |
227 | ||
228 | E.g.: | |
229 | ||
230 | wait_until(lambda: client.primary == ('a', 1), | |
231 | 'connect to the primary') | |
232 | ||
233 | If the lambda-expression isn't true after 10 seconds, we raise | |
234 | AssertionError("Didn't ever connect to the primary"). | |
235 | ||
236 | Returns the predicate's first true value. | |
237 | """ | |
238 | start = time.time() | |
239 | while True: | |
240 | retval = predicate() | |
241 | if retval: | |
242 | return retval | |
243 | ||
244 | if time.time() - start > timeout: | |
245 | raise AssertionError("Didn't ever %s" % success_description) | |
246 | ||
247 | time.sleep(0.1) | |
248 | ||
249 | ||
250 | OP_REPLY = 1 | |
251 | OP_UPDATE = 2001 | |
252 | OP_INSERT = 2002 | |
253 | OP_QUERY = 2004 | |
254 | OP_GET_MORE = 2005 | |
255 | OP_DELETE = 2006 | |
256 | OP_KILL_CURSORS = 2007 | |
257 | ||
258 | QUERY_FLAGS = OrderedDict([ | |
259 | ('TailableCursor', 2), | |
260 | ('SlaveOkay', 4), | |
261 | ('OplogReplay', 8), | |
262 | ('NoTimeout', 16), | |
263 | ('AwaitData', 32), | |
264 | ('Exhaust', 64), | |
265 | ('Partial', 128)]) | |
266 | ||
267 | UPDATE_FLAGS = OrderedDict([ | |
268 | ('Upsert', 1), | |
269 | ('MultiUpdate', 2)]) | |
270 | ||
271 | INSERT_FLAGS = OrderedDict([ | |
272 | ('ContinueOnError', 1)]) | |
273 | ||
274 | DELETE_FLAGS = OrderedDict([ | |
275 | ('SingleRemove', 1)]) | |
276 | ||
277 | REPLY_FLAGS = OrderedDict([ | |
278 | ('CursorNotFound', 1), | |
279 | ('QueryFailure', 2)]) | |
280 | ||
281 | _UNPACK_INT = struct.Struct("<i").unpack | |
282 | _UNPACK_LONG = struct.Struct("<q").unpack | |
283 | ||
284 | ||
285 | def _get_c_string(data, position): | |
286 | """Decode a BSON 'C' string to python unicode string.""" | |
287 | end = data.index(b"\x00", position) | |
288 | return _utf_8_decode(data[position:end], None, True)[0], end + 1 | |
289 | ||
290 | ||
291 | class _PeekableQueue(Queue): | |
292 | """Only safe from one consumer thread at a time.""" | |
293 | _NO_ITEM = object() | |
294 | ||
295 | def __init__(self, *args, **kwargs): | |
296 | Queue.__init__(self, *args, **kwargs) | |
297 | self._item = _PeekableQueue._NO_ITEM | |
298 | ||
299 | def peek(self, block=True, timeout=None): | |
300 | if self._item is not _PeekableQueue._NO_ITEM: | |
301 | return self._item | |
302 | else: | |
303 | self._item = self.get(block, timeout) | |
304 | return self._item | |
305 | ||
306 | def get(self, block=True, timeout=None): | |
307 | if self._item is not _PeekableQueue._NO_ITEM: | |
308 | item = self._item | |
309 | self._item = _PeekableQueue._NO_ITEM | |
310 | return item | |
311 | else: | |
312 | return Queue.get(self, block, timeout) | |
313 | ||
314 | ||
315 | class Request(object): | |
316 | """Base class for `Command`, `OpInsert`, and so on. | |
317 | ||
318 | Some useful asserts you can do in tests: | |
319 | ||
320 | >>> {'_id': 0} in OpInsert({'_id': 0}) | |
321 | True | |
322 | >>> {'_id': 1} in OpInsert({'_id': 0}) | |
323 | False | |
324 | >>> {'_id': 1} in OpInsert([{'_id': 0}, {'_id': 1}]) | |
325 | True | |
326 | >>> {'_id': 1} == OpInsert([{'_id': 0}, {'_id': 1}])[1] | |
327 | True | |
328 | >>> 'field' in Command(field=1) | |
329 | True | |
330 | >>> 'field' in Command() | |
331 | False | |
332 | >>> 'field' in Command('ismaster') | |
333 | False | |
334 | >>> Command(ismaster=False)['ismaster'] is False | |
335 | True | |
336 | """ | |
337 | opcode = None | |
338 | is_command = None | |
339 | _non_matched_attrs = 'doc', 'docs' | |
340 | _flags_map = None | |
341 | ||
342 | def __init__(self, *args, **kwargs): | |
343 | self._flags = kwargs.pop('flags', None) | |
344 | self._namespace = kwargs.pop('namespace', None) | |
345 | self._client = kwargs.pop('client', None) | |
346 | self._request_id = kwargs.pop('request_id', None) | |
347 | self._server = kwargs.pop('server', None) | |
348 | self._verbose = self._server and self._server.verbose | |
349 | self._server_port = kwargs.pop('server_port', None) | |
350 | self._docs = make_docs(*args, **kwargs) | |
351 | if not all(isinstance(doc, collections.Mapping) for doc in self._docs): | |
352 | raise_args_err() | |
353 | ||
354 | @property | |
355 | def doc(self): | |
356 | """The request document, if there is exactly one. | |
357 | ||
358 | Use this for queries, commands, and legacy deletes. Legacy writes may | |
359 | have many documents, OP_GET_MORE and OP_KILL_CURSORS have none. | |
360 | """ | |
361 | assert len(self.docs) == 1, '%r has more than one document' % self | |
362 | return self.docs[0] | |
363 | ||
364 | @property | |
365 | def docs(self): | |
366 | """The request documents, if any.""" | |
367 | return self._docs | |
368 | ||
369 | @property | |
370 | def namespace(self): | |
371 | """The operation namespace or None.""" | |
372 | return self._namespace | |
373 | ||
374 | @property | |
375 | def flags(self): | |
376 | """The request flags or None.""" | |
377 | return self._flags | |
378 | ||
379 | @property | |
380 | def slave_ok(self): | |
381 | """True if the SlaveOkay wire protocol flag is set.""" | |
382 | return self._flags and bool( | |
383 | self._flags & QUERY_FLAGS['SlaveOkay']) | |
384 | ||
385 | slave_okay = slave_ok | |
386 | """Synonym for `.slave_ok`.""" | |
387 | ||
388 | @property | |
389 | def request_id(self): | |
390 | """The request id or None.""" | |
391 | return self._request_id | |
392 | ||
393 | @property | |
394 | def client_port(self): | |
395 | """Client connection's TCP port.""" | |
396 | return self._client.getpeername()[1] | |
397 | ||
398 | @property | |
399 | def server(self): | |
400 | """The `.MockupDB` server.""" | |
401 | return self._server | |
402 | ||
403 | def assert_matches(self, *args, **kwargs): | |
404 | """Assert this matches a `matcher spec`_ and return self.""" | |
405 | matcher = make_matcher(*args, **kwargs) | |
406 | if not matcher.matches(self): | |
407 | raise AssertionError('%r does not match %r' % (self, matcher)) | |
408 | return self | |
409 | ||
410 | def matches(self, *args, **kwargs): | |
411 | """True if this matches a `matcher spec`_.""" | |
412 | return make_matcher(*args, **kwargs).matches(self) | |
413 | ||
414 | def replies(self, *args, **kwargs): | |
415 | """Send an `OpReply` to the client. | |
416 | ||
417 | The default reply to a command is ``{'ok': 1}``, otherwise the default | |
418 | is empty (no documents). | |
419 | ||
420 | Returns True so it is suitable as an `~MockupDB.autoresponds` handler. | |
421 | """ | |
422 | self._replies(*args, **kwargs) | |
423 | return True | |
424 | ||
425 | ok = send = sends = reply = replies | |
426 | """Synonym for `.replies`.""" | |
427 | ||
428 | def fail(self, err='MockupDB query failure', *args, **kwargs): | |
429 | """Reply to a query with the QueryFailure flag and an '$err' key. | |
430 | ||
431 | Returns True so it is suitable as an `~MockupDB.autoresponds` handler. | |
432 | """ | |
433 | kwargs.setdefault('flags', 0) | |
434 | kwargs['flags'] |= REPLY_FLAGS['QueryFailure'] | |
435 | kwargs['$err'] = err | |
436 | self.replies(*args, **kwargs) | |
437 | return True | |
438 | ||
439 | def command_err(self, code=1, errmsg='MockupDB command failure', | |
440 | *args, **kwargs): | |
441 | """Error reply to a command. | |
442 | ||
443 | Returns True so it is suitable as an `~MockupDB.autoresponds` handler. | |
444 | """ | |
445 | kwargs.setdefault('ok', 0) | |
446 | kwargs['code'] = code | |
447 | kwargs['errmsg'] = errmsg | |
448 | self.replies(*args, **kwargs) | |
449 | return True | |
450 | ||
451 | def hangup(self): | |
452 | """Close the connection. | |
453 | ||
454 | Returns True so it is suitable as an `~MockupDB.autoresponds` handler. | |
455 | """ | |
456 | if self._server: | |
457 | self._server._log('\t%d\thangup' % self.client_port) | |
458 | self._client.shutdown(socket.SHUT_RDWR) | |
459 | return True | |
460 | ||
461 | hangs_up = hangup | |
462 | """Synonym for `.hangup`.""" | |
463 | ||
464 | def _matches_docs(self, docs, other_docs): | |
465 | """Overridable method.""" | |
466 | for i, doc in enumerate(docs): | |
467 | other_doc = other_docs[i] | |
468 | for key, value in doc.items(): | |
469 | if value is absent: | |
470 | if key in other_doc: | |
471 | return False | |
472 | elif other_doc.get(key, None) != value: | |
473 | return False | |
474 | if isinstance(doc, (OrderedDict, _bson.SON)): | |
475 | if not isinstance(other_doc, (OrderedDict, _bson.SON)): | |
476 | raise TypeError( | |
477 | "Can't compare ordered and unordered document types:" | |
478 | " %r, %r" % (doc, other_doc)) | |
479 | keys = [key for key, value in doc.items() | |
480 | if value is not absent] | |
481 | if not seq_match(keys, list(other_doc.keys())): | |
482 | return False | |
483 | return True | |
484 | ||
485 | def _replies(self, *args, **kwargs): | |
486 | """Overridable method.""" | |
487 | reply_msg = make_reply(*args, **kwargs) | |
488 | if self._server: | |
489 | self._server._log('\t%d\t<-- %r' % (self.client_port, reply_msg)) | |
490 | reply_bytes = reply_msg.reply_bytes(self) | |
491 | self._client.sendall(reply_bytes) | |
492 | ||
493 | def __contains__(self, item): | |
494 | if item in self.docs: | |
495 | return True | |
496 | if len(self.docs) == 1 and isinstance(item, (string_type, text_type)): | |
497 | return item in self.doc | |
498 | return False | |
499 | ||
500 | def __getitem__(self, item): | |
501 | return self.doc[item] if len(self.docs) == 1 else self.docs[item] | |
502 | ||
503 | def __str__(self): | |
504 | return docs_repr(*self.docs) | |
505 | ||
506 | def __repr__(self): | |
507 | name = self.__class__.__name__ | |
508 | parts = [] | |
509 | if self.docs: | |
510 | parts.append(docs_repr(*self.docs)) | |
511 | ||
512 | if self._flags: | |
513 | if self._flags_map: | |
514 | parts.append('flags=%s' % ( | |
515 | '|'.join(name for name, value in self._flags_map.items() | |
516 | if self._flags & value))) | |
517 | else: | |
518 | parts.append('flags=%d' % self._flags) | |
519 | ||
520 | if self._namespace: | |
521 | parts.append('namespace="%s"' % self._namespace) | |
522 | ||
523 | return '%s(%s)' % (name, ', '.join(str(part) for part in parts)) | |
524 | ||
525 | ||
526 | class OpQuery(Request): | |
527 | """A query (besides a command) the client executes on the server. | |
528 | ||
529 | >>> OpQuery({'i': {'$gt': 2}}, fields={'j': False}) | |
530 | OpQuery({"i": {"$gt": 2}}, fields={"j": false}) | |
531 | """ | |
532 | opcode = OP_QUERY | |
533 | is_command = False | |
534 | _flags_map = QUERY_FLAGS | |
535 | ||
536 | @classmethod | |
537 | def unpack(cls, msg, client, server, request_id): | |
538 | """Parse message and return an `OpQuery` or `Command`. | |
539 | ||
540 | Takes the client message as bytes, the client and server socket objects, | |
541 | and the client request id. | |
542 | """ | |
543 | flags, = _UNPACK_INT(msg[:4]) | |
544 | namespace, pos = _get_c_string(msg, 4) | |
545 | is_command = namespace.endswith('.$cmd') | |
546 | num_to_skip, = _UNPACK_INT(msg[pos:pos + 4]) | |
547 | pos += 4 | |
548 | num_to_return, = _UNPACK_INT(msg[pos:pos + 4]) | |
549 | pos += 4 | |
550 | docs = _bson.decode_all(msg[pos:], CODEC_OPTIONS) | |
551 | if is_command: | |
552 | assert len(docs) == 1 | |
553 | command_ns = namespace[:-len('.$cmd')] | |
554 | return Command(docs, namespace=command_ns, flags=flags, | |
555 | client=client, request_id=request_id, server=server) | |
556 | else: | |
557 | if len(docs) == 1: | |
558 | fields = None | |
559 | else: | |
560 | assert len(docs) == 2 | |
561 | fields = docs[1] | |
562 | return OpQuery(docs[0], fields=fields, namespace=namespace, | |
563 | flags=flags, num_to_skip=num_to_skip, | |
564 | num_to_return=num_to_return, client=client, | |
565 | request_id=request_id, server=server) | |
566 | ||
567 | def __init__(self, *args, **kwargs): | |
568 | fields = kwargs.pop('fields', None) | |
569 | if fields is not None and not isinstance(fields, collections.Mapping): | |
570 | raise_args_err() | |
571 | self._fields = fields | |
572 | self._num_to_skip = kwargs.pop('num_to_skip', None) | |
573 | self._num_to_return = kwargs.pop('num_to_return', None) | |
574 | super(OpQuery, self).__init__(*args, **kwargs) | |
575 | if not self._docs: | |
576 | self._docs = [{}] # Default query filter. | |
577 | elif len(self._docs) > 1: | |
578 | raise_args_err('OpQuery too many documents', ValueError) | |
579 | ||
580 | @property | |
581 | def num_to_skip(self): | |
582 | """Client query's numToSkip or None.""" | |
583 | return self._num_to_skip | |
584 | ||
585 | @property | |
586 | def num_to_return(self): | |
587 | """Client query's numToReturn or None.""" | |
588 | return self._num_to_return | |
589 | ||
590 | @property | |
591 | def fields(self): | |
592 | """Client query's fields selector or None.""" | |
593 | return self._fields | |
594 | ||
595 | def __repr__(self): | |
596 | rep = super(OpQuery, self).__repr__().rstrip(')') | |
597 | if self._fields: | |
598 | rep += ', fields=%s' % docs_repr(self._fields) | |
599 | if self._num_to_skip is not None: | |
600 | rep += ', numToSkip=%d' % self._num_to_skip | |
601 | if self._num_to_return is not None: | |
602 | rep += ', numToReturn=%d' % self._num_to_return | |
603 | return rep + ')' | |
604 | ||
605 | ||
606 | class Command(OpQuery): | |
607 | """A command the client executes on the server.""" | |
608 | is_command = True | |
609 | ||
610 | # Check command name case-insensitively. | |
611 | _non_matched_attrs = OpQuery._non_matched_attrs + ('command_name', ) | |
612 | ||
613 | @property | |
614 | def command_name(self): | |
615 | """The command name or None. | |
616 | ||
617 | >>> Command({'count': 'collection'}).command_name | |
618 | 'count' | |
619 | >>> Command('aggregate', 'collection', cursor=absent).command_name | |
620 | 'aggregate' | |
621 | """ | |
622 | if self.docs and self.docs[0]: | |
623 | return list(self.docs[0])[0] | |
624 | ||
625 | def _matches_docs(self, docs, other_docs): | |
626 | assert len(docs) == len(other_docs) == 1 | |
627 | doc, = docs | |
628 | other_doc, = other_docs | |
629 | items = list(doc.items()) | |
630 | other_items = list(other_doc.items()) | |
631 | ||
632 | # Compare command name case-insensitively. | |
633 | if items and other_items: | |
634 | if items[0][0].lower() != other_items[0][0].lower(): | |
635 | return False | |
636 | if items[0][1] != other_items[0][1]: | |
637 | return False | |
638 | return super(Command, self)._matches_docs( | |
639 | [OrderedDict(items[1:])], | |
640 | [OrderedDict(other_items[1:])]) | |
641 | ||
642 | def _replies(self, *args, **kwargs): | |
643 | reply = make_reply(*args, **kwargs) | |
644 | if not reply.docs: | |
645 | reply.docs = [{'ok': 1}] | |
646 | else: | |
647 | if len(reply.docs) > 1: | |
648 | raise ValueError('Command reply with multiple documents: %s' | |
649 | % (reply.docs, )) | |
650 | reply.doc.setdefault('ok', 1) | |
651 | super(Command, self)._replies(reply) | |
652 | ||
653 | def replies_to_gle(self, **kwargs): | |
654 | """Send a getlasterror response. | |
655 | ||
656 | Defaults to ``{ok: 1, err: null}``. Add or override values by passing | |
657 | keyword arguments. | |
658 | ||
659 | Returns True so it is suitable as an `~MockupDB.autoresponds` handler. | |
660 | """ | |
661 | kwargs.setdefault('err', None) | |
662 | return self.replies(**kwargs) | |
663 | ||
664 | ||
665 | class OpGetMore(Request): | |
666 | """An OP_GET_MORE the client executes on the server.""" | |
667 | @classmethod | |
668 | def unpack(cls, msg, client, server, request_id): | |
669 | """Parse message and return an `OpGetMore`. | |
670 | ||
671 | Takes the client message as bytes, the client and server socket objects, | |
672 | and the client request id. | |
673 | """ | |
674 | flags, = _UNPACK_INT(msg[:4]) | |
675 | namespace, pos = _get_c_string(msg, 4) | |
676 | num_to_return, = _UNPACK_INT(msg[pos:pos + 4]) | |
677 | pos += 4 | |
678 | cursor_id, = _UNPACK_LONG(msg[pos:pos + 8]) | |
679 | return OpGetMore(namespace=namespace, flags=flags, client=client, | |
680 | num_to_return=num_to_return, cursor_id=cursor_id, | |
681 | request_id=request_id, server=server) | |
682 | ||
683 | def __init__(self, **kwargs): | |
684 | self._num_to_return = kwargs.pop('num_to_return', None) | |
685 | self._cursor_id = kwargs.pop('cursor_id', None) | |
686 | super(OpGetMore, self).__init__(**kwargs) | |
687 | ||
688 | @property | |
689 | def num_to_return(self): | |
690 | """The client message's numToReturn field.""" | |
691 | return self._num_to_return | |
692 | ||
693 | @property | |
694 | def cursor_id(self): | |
695 | """The client message's cursorId field.""" | |
696 | return self._cursor_id | |
697 | ||
698 | ||
699 | class OpKillCursors(Request): | |
700 | """An OP_KILL_CURSORS the client executes on the server.""" | |
701 | @classmethod | |
702 | def unpack(cls, msg, client, server, _): | |
703 | """Parse message and return an `OpKillCursors`. | |
704 | ||
705 | Takes the client message as bytes, the client and server socket objects, | |
706 | and the client request id. | |
707 | """ | |
708 | # Leading 4 bytes are reserved. | |
709 | num_of_cursor_ids, = _UNPACK_INT(msg[4:8]) | |
710 | cursor_ids = [] | |
711 | pos = 8 | |
712 | for _ in range(num_of_cursor_ids): | |
713 | cursor_ids.append(_UNPACK_INT(msg[pos:pos+4])[0]) | |
714 | pos += 4 | |
715 | return OpKillCursors(client=client, cursor_ids=cursor_ids, | |
716 | server=server) | |
717 | ||
718 | def __init__(self, **kwargs): | |
719 | self._cursor_ids = kwargs.pop('cursor_ids', None) | |
720 | super(OpKillCursors, self).__init__(**kwargs) | |
721 | ||
722 | @property | |
723 | def cursor_ids(self): | |
724 | """List of cursor ids the client wants to kill.""" | |
725 | return self._cursor_ids | |
726 | ||
727 | def __repr__(self): | |
728 | return '%s(%s)' % (self.__class__.__name__, self._cursor_ids) | |
729 | ||
730 | ||
731 | class _LegacyWrite(Request): | |
732 | is_command = False | |
733 | ||
734 | ||
735 | class OpInsert(_LegacyWrite): | |
736 | """A legacy OP_INSERT the client executes on the server.""" | |
737 | opcode = OP_INSERT | |
738 | _flags_map = INSERT_FLAGS | |
739 | ||
740 | @classmethod | |
741 | def unpack(cls, msg, client, server, request_id): | |
742 | """Parse message and return an `OpInsert`. | |
743 | ||
744 | Takes the client message as bytes, the client and server socket objects, | |
745 | and the client request id. | |
746 | """ | |
747 | flags, = _UNPACK_INT(msg[:4]) | |
748 | namespace, pos = _get_c_string(msg, 4) | |
749 | docs = _bson.decode_all(msg[pos:], CODEC_OPTIONS) | |
750 | return cls(*docs, namespace=namespace, flags=flags, client=client, | |
751 | request_id=request_id, server=server) | |
752 | ||
753 | ||
754 | class OpUpdate(_LegacyWrite): | |
755 | """A legacy OP_UPDATE the client executes on the server.""" | |
756 | opcode = OP_UPDATE | |
757 | _flags_map = UPDATE_FLAGS | |
758 | ||
759 | @classmethod | |
760 | def unpack(cls, msg, client, server, request_id): | |
761 | """Parse message and return an `OpUpdate`. | |
762 | ||
763 | Takes the client message as bytes, the client and server socket objects, | |
764 | and the client request id. | |
765 | """ | |
766 | # First 4 bytes of OP_UPDATE are "reserved". | |
767 | namespace, pos = _get_c_string(msg, 4) | |
768 | flags, = _UNPACK_INT(msg[pos:pos + 4]) | |
769 | docs = _bson.decode_all(msg[pos+4:], CODEC_OPTIONS) | |
770 | return cls(*docs, namespace=namespace, flags=flags, client=client, | |
771 | request_id=request_id, server=server) | |
772 | ||
773 | ||
774 | class OpDelete(_LegacyWrite): | |
775 | """A legacy OP_DELETE the client executes on the server.""" | |
776 | opcode = OP_DELETE | |
777 | _flags_map = DELETE_FLAGS | |
778 | ||
779 | @classmethod | |
780 | def unpack(cls, msg, client, server, request_id): | |
781 | """Parse message and return an `OpDelete`. | |
782 | ||
783 | Takes the client message as bytes, the client and server socket objects, | |
784 | and the client request id. | |
785 | """ | |
786 | # First 4 bytes of OP_DELETE are "reserved". | |
787 | namespace, pos = _get_c_string(msg, 4) | |
788 | flags, = _UNPACK_INT(msg[pos:pos + 4]) | |
789 | docs = _bson.decode_all(msg[pos+4:], CODEC_OPTIONS) | |
790 | return cls(*docs, namespace=namespace, flags=flags, client=client, | |
791 | request_id=request_id, server=server) | |
792 | ||
793 | ||
794 | class OpReply(object): | |
795 | """A reply from `MockupDB` to the client.""" | |
796 | def __init__(self, *args, **kwargs): | |
797 | self._flags = kwargs.pop('flags', 0) | |
798 | self._cursor_id = kwargs.pop('cursor_id', 0) | |
799 | self._starting_from = kwargs.pop('starting_from', 0) | |
800 | self._docs = make_docs(*args, **kwargs) | |
801 | ||
802 | @property | |
803 | def docs(self): | |
804 | """The reply documents, if any.""" | |
805 | return self._docs | |
806 | ||
807 | @docs.setter | |
808 | def docs(self, docs): | |
809 | self._docs = make_docs(docs) | |
810 | ||
811 | @property | |
812 | def doc(self): | |
813 | """Contents of reply. | |
814 | ||
815 | Useful for replies to commands; replies to other messages may have no | |
816 | documents or multiple documents. | |
817 | """ | |
818 | assert len(self._docs) == 1, '%s has more than one document' % self | |
819 | return self._docs[0] | |
820 | ||
821 | def update(self, *args, **kwargs): | |
822 | """Update the document. Same as ``dict().update()``. | |
823 | ||
824 | >>> reply = OpReply({'ismaster': True}) | |
825 | >>> reply.update(maxWireVersion=3) | |
826 | >>> reply.doc['maxWireVersion'] | |
827 | 3 | |
828 | >>> reply.update({'maxWriteBatchSize': 10, 'msg': 'isdbgrid'}) | |
829 | """ | |
830 | self.doc.update(*args, **kwargs) | |
831 | ||
832 | def reply_bytes(self, request): | |
833 | """Take a `Request` and return an OP_REPLY message as bytes.""" | |
834 | flags = struct.pack("<i", self._flags) | |
835 | cursor_id = struct.pack("<q", self._cursor_id) | |
836 | starting_from = struct.pack("<i", self._starting_from) | |
837 | number_returned = struct.pack("<i", len(self._docs)) | |
838 | reply_id = random.randint(0, 1000000) | |
839 | response_to = request.request_id | |
840 | ||
841 | data = b''.join([flags, cursor_id, starting_from, number_returned]) | |
842 | data += b''.join([_bson.BSON.encode(doc) for doc in self._docs]) | |
843 | ||
844 | message = struct.pack("<i", 16 + len(data)) | |
845 | message += struct.pack("<i", reply_id) | |
846 | message += struct.pack("<i", response_to) | |
847 | message += struct.pack("<i", OP_REPLY) | |
848 | return message + data | |
849 | ||
850 | def __str__(self): | |
851 | return docs_repr(*self._docs) | |
852 | ||
853 | def __repr__(self): | |
854 | rep = '%s(%s' % (self.__class__.__name__, self) | |
855 | if self._starting_from: | |
856 | rep += ', starting_from=%d' % self._starting_from | |
857 | return rep + ')' | |
858 | ||
859 | ||
860 | absent = {'absent': 1} | |
861 | ||
862 | ||
863 | class Matcher(object): | |
864 | """Matches a subset of `.Request` objects. | |
865 | ||
866 | Initialized with a `request spec`_. | |
867 | ||
868 | Used by `~MockupDB.receives` to assert the client sent the expected request, | |
869 | and by `~MockupDB.got` to test if it did and return ``True`` or ``False``. | |
870 | Used by `.autoresponds` to match requests with autoresponses. | |
871 | """ | |
872 | def __init__(self, *args, **kwargs): | |
873 | self._kwargs = kwargs | |
874 | self._prototype = make_prototype_request(*args, **kwargs) | |
875 | ||
876 | def matches(self, *args, **kwargs): | |
877 | """Take a `request spec`_ and return ``True`` or ``False``. | |
878 | ||
879 | .. request-matching rules:: | |
880 | ||
881 | The empty matcher matches anything: | |
882 | ||
883 | >>> Matcher().matches({'a': 1}) | |
884 | True | |
885 | >>> Matcher().matches({'a': 1}, {'a': 1}) | |
886 | True | |
887 | >>> Matcher().matches('ismaster') | |
888 | True | |
889 | ||
890 | A matcher's document matches if its key-value pairs are a subset of the | |
891 | request's: | |
892 | ||
893 | >>> Matcher({'a': 1}).matches({'a': 1}) | |
894 | True | |
895 | >>> Matcher({'a': 2}).matches({'a': 1}) | |
896 | False | |
897 | >>> Matcher({'a': 1}).matches({'a': 1, 'b': 1}) | |
898 | True | |
899 | ||
900 | Prohibit a field: | |
901 | ||
902 | >>> Matcher({'field': absent}) | |
903 | Matcher(Request({"field": {"absent": 1}})) | |
904 | >>> Matcher({'field': absent}).matches({'field': 1}) | |
905 | False | |
906 | >>> Matcher({'field': absent}).matches({'otherField': 1}) | |
907 | True | |
908 | ||
909 | Order matters if you use an OrderedDict: | |
910 | ||
911 | >>> doc0 = OrderedDict([('a', 1), ('b', 1)]) | |
912 | >>> doc1 = OrderedDict([('b', 1), ('a', 1)]) | |
913 | >>> Matcher(doc0).matches(doc0) | |
914 | True | |
915 | >>> Matcher(doc0).matches(doc1) | |
916 | False | |
917 | ||
918 | The matcher must have the same number of documents as the request: | |
919 | ||
920 | >>> Matcher().matches() | |
921 | True | |
922 | >>> Matcher([]).matches([]) | |
923 | True | |
924 | >>> Matcher({'a': 2}).matches({'a': 1}, {'a': 1}) | |
925 | False | |
926 | ||
927 | By default, it matches any opcode: | |
928 | ||
929 | >>> m = Matcher() | |
930 | >>> m.matches(OpQuery) | |
931 | True | |
932 | >>> m.matches(OpInsert) | |
933 | True | |
934 | ||
935 | You can specify what request opcode to match: | |
936 | ||
937 | >>> m = Matcher(OpQuery) | |
938 | >>> m.matches(OpInsert, {'_id': 1}) | |
939 | False | |
940 | >>> m.matches(OpQuery, {'_id': 1}) | |
941 | True | |
942 | ||
943 | Commands are queries on some database's "database.$cmd" namespace. | |
944 | They are specially prohibited from matching regular queries: | |
945 | ||
946 | >>> Matcher(OpQuery).matches(Command) | |
947 | False | |
948 | >>> Matcher(Command).matches(Command) | |
949 | True | |
950 | >>> Matcher(OpQuery).matches(OpQuery) | |
951 | True | |
952 | >>> Matcher(Command).matches(OpQuery) | |
953 | False | |
954 | ||
955 | The command name is matched case-insensitively: | |
956 | ||
957 | >>> Matcher(Command('ismaster')).matches(Command('IsMaster')) | |
958 | True | |
959 | ||
960 | You can match properties specific to certain opcodes: | |
961 | ||
962 | >>> m = Matcher(OpGetMore, num_to_return=3) | |
963 | >>> m.matches(OpGetMore()) | |
964 | False | |
965 | >>> m.matches(OpGetMore(num_to_return=2)) | |
966 | False | |
967 | >>> m.matches(OpGetMore(num_to_return=3)) | |
968 | True | |
969 | >>> m = Matcher(OpQuery(namespace='db.collection')) | |
970 | >>> m.matches(OpQuery) | |
971 | False | |
972 | >>> m.matches(OpQuery(namespace='db.collection')) | |
973 | True | |
974 | ||
975 | It matches any wire protocol header bits you specify: | |
976 | ||
977 | >>> m = Matcher(flags=QUERY_FLAGS['SlaveOkay']) | |
978 | >>> m.matches(OpQuery({'_id': 1})) | |
979 | False | |
980 | >>> m.matches(OpQuery({'_id': 1}, flags=QUERY_FLAGS['SlaveOkay'])) | |
981 | True | |
982 | ||
983 | If you match on flags, be careful to also match on opcode. For example, | |
984 | if you simply check that the flag in bit position 0 is set: | |
985 | ||
986 | >>> m = Matcher(flags=INSERT_FLAGS['ContinueOnError']) | |
987 | ||
988 | ... you will match any request with that flag: | |
989 | ||
990 | >>> m.matches(OpDelete, flags=DELETE_FLAGS['SingleRemove']) | |
991 | True | |
992 | ||
993 | So specify the opcode, too: | |
994 | ||
995 | >>> m = Matcher(OpInsert, flags=INSERT_FLAGS['ContinueOnError']) | |
996 | >>> m.matches(OpDelete, flags=DELETE_FLAGS['SingleRemove']) | |
997 | False | |
998 | """ | |
999 | request = make_prototype_request(*args, **kwargs) | |
1000 | if self._prototype.opcode not in (None, request.opcode): | |
1001 | return False | |
1002 | if self._prototype.is_command not in (None, request.is_command): | |
1003 | return False | |
1004 | for name in dir(self._prototype): | |
1005 | if name.startswith('_') or name in request._non_matched_attrs: | |
1006 | # Ignore privates, and handle documents specially. | |
1007 | continue | |
1008 | prototype_value = getattr(self._prototype, name, None) | |
1009 | if inspect.ismethod(prototype_value): | |
1010 | continue | |
1011 | actual_value = getattr(request, name, None) | |
1012 | if prototype_value not in (None, actual_value): | |
1013 | return False | |
1014 | if len(self._prototype.docs) not in (0, len(request.docs)): | |
1015 | return False | |
1016 | ||
1017 | return self._prototype._matches_docs(self._prototype.docs, request.docs) | |
1018 | ||
1019 | @property | |
1020 | def prototype(self): | |
1021 | """The prototype `.Request` used to match actual requests with.""" | |
1022 | return self._prototype | |
1023 | ||
1024 | def __repr__(self): | |
1025 | return '%s(%r)' % (self.__class__.__name__, self._prototype) | |
1026 | ||
1027 | ||
1028 | def _synchronized(meth): | |
1029 | """Call method while holding a lock.""" | |
1030 | @functools.wraps(meth) | |
1031 | def wrapper(self, *args, **kwargs): | |
1032 | with self._lock: | |
1033 | return meth(self, *args, **kwargs) | |
1034 | ||
1035 | return wrapper | |
1036 | ||
1037 | ||
1038 | class _AutoResponder(object): | |
1039 | def __init__(self, server, matcher, *args, **kwargs): | |
1040 | self._server = server | |
1041 | if inspect.isfunction(matcher) or inspect.ismethod(matcher): | |
1042 | if args or kwargs: | |
1043 | raise_args_err() | |
1044 | self._matcher = Matcher() # Match anything. | |
1045 | self._handler = matcher | |
1046 | self._args = () | |
1047 | self._kwargs = {} | |
1048 | else: | |
1049 | self._matcher = make_matcher(matcher) | |
1050 | if args and callable(args[0]): | |
1051 | self._handler = args[0] | |
1052 | if args[1:] or kwargs: | |
1053 | raise_args_err() | |
1054 | self._args = () | |
1055 | self._kwargs = {} | |
1056 | else: | |
1057 | self._handler = None | |
1058 | self._args = args | |
1059 | self._kwargs = kwargs | |
1060 | ||
1061 | def handle(self, request): | |
1062 | if self._matcher.matches(request): | |
1063 | if self._handler: | |
1064 | return self._handler(request) | |
1065 | else: | |
1066 | # Command.replies() overrides Request.replies() with special | |
1067 | # logic, which is why we saved args and kwargs until now to | |
1068 | # pass it into request.replies, instead of making an OpReply | |
1069 | # ourselves in __init__. | |
1070 | request.replies(*self._args, **self._kwargs) | |
1071 | return True | |
1072 | ||
1073 | def cancel(self): | |
1074 | """Stop autoresponding.""" | |
1075 | self._server.cancel_responder(self) | |
1076 | ||
1077 | def __repr__(self): | |
1078 | return '_AutoResponder(%r, %r, %r)' % ( | |
1079 | self._matcher, self._args, self._kwargs) | |
1080 | ||
1081 | ||
1082 | class MockupDB(object): | |
1083 | """A simulated mongod or mongos. | |
1084 | ||
1085 | Call `run` to start the server, and always `close` it to avoid exceptions | |
1086 | during interpreter shutdown. | |
1087 | ||
1088 | See the tutorial for comprehensive examples. | |
1089 | ||
1090 | :Optional parameters: | |
1091 | - `port`: listening port number. If not specified, choose | |
1092 | some unused port and return the port number from `run`. | |
1093 | - `verbose`: if ``True``, print requests and replies to stdout. | |
1094 | - `request_timeout`: seconds to wait for the next client request, or else | |
1095 | assert. Default 10 seconds. Pass int(1e6) to disable. | |
1096 | - `auto_ismaster`: pass ``True`` to autorespond ``{'ok': 1}`` to | |
1097 | ismaster requests, or pass a dict or `OpReply`. | |
1098 | - `ssl`: pass ``True`` to require SSL. | |
1099 | """ | |
1100 | def __init__(self, port=None, verbose=False, | |
1101 | request_timeout=10, auto_ismaster=None, | |
1102 | ssl=False): | |
1103 | self._address = ('localhost', port) | |
1104 | self._verbose = verbose | |
1105 | self._label = None | |
1106 | self._ssl = ssl | |
1107 | ||
1108 | self._request_timeout = request_timeout | |
1109 | ||
1110 | self._listening_sock = None | |
1111 | self._accept_thread = None | |
1112 | ||
1113 | # Track sockets that we want to close in stop(). Keys are sockets, | |
1114 | # values are None (this could be a WeakSet but it's new in Python 2.7). | |
1115 | self._server_threads = weakref.WeakKeyDictionary() | |
1116 | self._server_socks = weakref.WeakKeyDictionary() | |
1117 | self._stopped = False | |
1118 | self._request_q = _PeekableQueue() | |
1119 | self._requests_count = 0 | |
1120 | self._lock = threading.Lock() | |
1121 | ||
1122 | # List of (request_matcher, args, kwargs), where args and kwargs are | |
1123 | # like those sent to request.reply(). | |
1124 | self._autoresponders = [] | |
1125 | ||
1126 | if auto_ismaster is True: | |
1127 | self.autoresponds(Command('ismaster'), {'ismaster': True}) | |
1128 | elif auto_ismaster: | |
1129 | self.autoresponds(Command('ismaster'), auto_ismaster) | |
1130 | ||
1131 | @_synchronized | |
1132 | def run(self): | |
1133 | """Begin serving. Returns the bound port.""" | |
1134 | self._listening_sock, self._address = bind_socket(self._address) | |
1135 | if self._ssl: | |
1136 | certfile = os.path.join(os.path.dirname(__file__), 'server.pem') | |
1137 | self._listening_sock = _ssl.wrap_socket( | |
1138 | self._listening_sock, | |
1139 | certfile=certfile, | |
1140 | server_side=True) | |
1141 | self._accept_thread = threading.Thread(target=self._accept_loop) | |
1142 | self._accept_thread.daemon = True | |
1143 | self._accept_thread.start() | |
1144 | return self.port | |
1145 | ||
1146 | @_synchronized | |
1147 | def stop(self): | |
1148 | """Stop serving. Always call this to clean up after yourself.""" | |
1149 | self._stopped = True | |
1150 | threads = [self._accept_thread] | |
1151 | threads.extend(self._server_threads) | |
1152 | self._listening_sock.close() | |
1153 | for sock in self._server_socks: | |
1154 | sock.close() | |
1155 | ||
1156 | with self._unlock(): | |
1157 | for thread in threads: | |
1158 | thread.join(10) | |
1159 | ||
1160 | def receives(self, *args, **kwargs): | |
1161 | """Pop the next `Request` and assert it matches. | |
1162 | ||
1163 | Returns None if the server is stopped. | |
1164 | ||
1165 | Pass a `Request` or request pattern to specify what client request to | |
1166 | expect. See the tutorial for examples. Pass ``timeout`` as a keyword | |
1167 | argument to override this server's ``request_timeout``. | |
1168 | """ | |
1169 | timeout = kwargs.pop('timeout', self._request_timeout) | |
1170 | end = time.time() + timeout | |
1171 | matcher = Matcher(*args, **kwargs) | |
1172 | while not self._stopped: | |
1173 | try: | |
1174 | # Short timeout so we notice if the server is stopped. | |
1175 | request = self._request_q.get(timeout=0.05) | |
1176 | except Empty: | |
1177 | if time.time() > end: | |
1178 | raise AssertionError('expected to receive %r, got nothing' | |
1179 | % matcher.prototype) | |
1180 | else: | |
1181 | if matcher.matches(request): | |
1182 | return request | |
1183 | else: | |
1184 | raise AssertionError('expected to receive %r, got %r' | |
1185 | % (matcher.prototype, request)) | |
1186 | ||
1187 | gets = pop = receive = receives | |
1188 | """Synonym for `receives`.""" | |
1189 | ||
1190 | def got(self, *args, **kwargs): | |
1191 | """Does `.request` match the given `request spec`_? | |
1192 | ||
1193 | >>> s = MockupDB(auto_ismaster=True) | |
1194 | >>> port = s.run() | |
1195 | >>> s.got(timeout=0) # No request enqueued. | |
1196 | False | |
1197 | >>> from pymongo import MongoClient | |
1198 | >>> client = MongoClient(s.uri) | |
1199 | >>> future = go(client.db.command, 'foo') | |
1200 | >>> s.got('foo') | |
1201 | True | |
1202 | >>> s.got(Command('foo', namespace='db')) | |
1203 | True | |
1204 | >>> s.got(Command('foo', key='value')) | |
1205 | False | |
1206 | >>> s.ok() | |
1207 | >>> future() == {'ok': 1} | |
1208 | True | |
1209 | >>> s.stop() | |
1210 | """ | |
1211 | timeout = kwargs.pop('timeout', self._request_timeout) | |
1212 | end = time.time() + timeout | |
1213 | matcher = make_matcher(*args, **kwargs) | |
1214 | ||
1215 | while not self._stopped: | |
1216 | try: | |
1217 | # Short timeout so we notice if the server is stopped. | |
1218 | request = self._request_q.peek(timeout=timeout) | |
1219 | except Empty: | |
1220 | if time.time() > end: | |
1221 | return False | |
1222 | else: | |
1223 | return matcher.matches(request) | |
1224 | ||
1225 | wait = got | |
1226 | """Synonym for `got`.""" | |
1227 | ||
1228 | def replies(self, *args, **kwargs): | |
1229 | """Call `~Request.reply` on the currently enqueued request.""" | |
1230 | self.pop().replies(*args, **kwargs) | |
1231 | ||
1232 | ok = send = sends = reply = replies | |
1233 | """Synonym for `.replies`.""" | |
1234 | ||
1235 | def fail(self, *args, **kwargs): | |
1236 | """Call `~Request.fail` on the currently enqueued request.""" | |
1237 | self.pop().fail(*args, **kwargs) | |
1238 | ||
1239 | def command_err(self, *args, **kwargs): | |
1240 | """Call `~Request.command_err` on the currently enqueued request.""" | |
1241 | self.pop().command_err(*args, **kwargs) | |
1242 | ||
1243 | def hangup(self): | |
1244 | """Call `~Request.hangup` on the currently enqueued request.""" | |
1245 | self.pop().hangup() | |
1246 | ||
1247 | hangs_up = hangup | |
1248 | """Synonym for `.hangup`.""" | |
1249 | ||
1250 | @_synchronized | |
1251 | def autoresponds(self, matcher, *args, **kwargs): | |
1252 | """Send a canned reply to all matching client requests. | |
1253 | ||
1254 | ``matcher`` is a `Matcher` or a command name, or an instance of | |
1255 | `OpInsert`, `OpQuery`, etc. | |
1256 | ||
1257 | >>> s = MockupDB() | |
1258 | >>> port = s.run() | |
1259 | >>> | |
1260 | >>> from pymongo import MongoClient | |
1261 | >>> client = MongoClient(s.uri) | |
1262 | >>> responder = s.autoresponds('ismaster') | |
1263 | >>> client.admin.command('ismaster') == {'ok': 1} | |
1264 | True | |
1265 | ||
1266 | The remaining arguments are a `reply spec`_: | |
1267 | ||
1268 | >>> responder = s.autoresponds('bar', ok=0, errmsg='err') | |
1269 | >>> client.db.command('bar') | |
1270 | Traceback (most recent call last): | |
1271 | ... | |
1272 | OperationFailure: command SON([('bar', 1)]) on namespace db.$cmd failed: err | |
1273 | >>> responder = s.autoresponds(OpQuery(namespace='db.collection'), | |
1274 | ... [{'_id': 1}, {'_id': 2}]) | |
1275 | >>> list(client.db.collection.find()) == [{'_id': 1}, {'_id': 2}] | |
1276 | True | |
1277 | >>> responder = s.autoresponds(OpQuery, {'a': 1}, {'a': 2}) | |
1278 | >>> list(client.db.collection.find()) == [{'a': 1}, {'a': 2}] | |
1279 | True | |
1280 | ||
1281 | Remove an autoresponder like: | |
1282 | ||
1283 | >>> responder.cancel() | |
1284 | ||
1285 | If the request currently at the head of the queue matches, it is popped | |
1286 | and replied to. Future matching requests skip the queue. | |
1287 | ||
1288 | >>> future = go(client.db.command, 'baz') | |
1289 | >>> responder = s.autoresponds('baz', {'key': 'value'}) | |
1290 | >>> future() == {'ok': 1, 'key': 'value'} | |
1291 | True | |
1292 | ||
1293 | Responders are applied in order, most recently added first, until one | |
1294 | matches: | |
1295 | ||
1296 | >>> responder = s.autoresponds('baz') | |
1297 | >>> client.db.command('baz') == {'ok': 1} | |
1298 | True | |
1299 | >>> responder.cancel() | |
1300 | >>> # The previous responder takes over again. | |
1301 | >>> client.db.command('baz') == {'ok': 1, 'key': 'value'} | |
1302 | True | |
1303 | ||
1304 | You can pass a request handler in place of the reply spec. Return | |
1305 | True if you handled the request: | |
1306 | ||
1307 | >>> responder = s.autoresponds('baz', lambda r: r.ok(a=2)) | |
1308 | ||
1309 | The standard `Request.ok`, `~Request.replies`, `~Request.fail`, | |
1310 | `~Request.hangup` and so on all return True to make them suitable | |
1311 | as handler functions. | |
1312 | ||
1313 | >>> client.db.command('baz') == {'ok': 1, 'a': 2} | |
1314 | True | |
1315 | ||
1316 | If the request is not handled, it is checked against the remaining | |
1317 | responders, or enqueued if none match. | |
1318 | ||
1319 | You can pass the handler as the only argument so it receives *all* | |
1320 | requests. For example you could log them, then return None to allow | |
1321 | other handlers to run: | |
1322 | ||
1323 | >>> def logger(request): | |
1324 | ... if not request.matches('ismaster'): | |
1325 | ... print('logging: %r' % request) | |
1326 | >>> responder = s.autoresponds(logger) | |
1327 | >>> client.db.command('baz') == {'ok': 1, 'a': 2} | |
1328 | logging: Command({"baz": 1}, flags=SlaveOkay, namespace="db") | |
1329 | True | |
1330 | ||
1331 | The synonym `subscribe` better expresses your intent if your handler | |
1332 | never returns True: | |
1333 | ||
1334 | >>> subscriber = s.subscribe(logger) | |
1335 | ||
1336 | .. doctest: | |
1337 | :hide: | |
1338 | ||
1339 | >>> client.close() | |
1340 | >>> s.stop() | |
1341 | """ | |
1342 | responder = _AutoResponder(self, matcher, *args, **kwargs) | |
1343 | self._autoresponders.append(responder) | |
1344 | try: | |
1345 | request = self._request_q.peek(block=False) | |
1346 | except Empty: | |
1347 | pass | |
1348 | else: | |
1349 | if responder.handle(request): | |
1350 | self._request_q.get_nowait() # Pop it. | |
1351 | ||
1352 | return responder | |
1353 | ||
1354 | subscribe = autoresponds | |
1355 | """Synonym for `.autoresponds`.""" | |
1356 | ||
1357 | @_synchronized | |
1358 | def cancel_responder(self, responder): | |
1359 | """Cancel a responder that was registered with `autoresponds`.""" | |
1360 | self._autoresponders.remove(responder) | |
1361 | ||
1362 | @property | |
1363 | def address(self): | |
1364 | """The listening (host, port).""" | |
1365 | return self._address | |
1366 | ||
1367 | @property | |
1368 | def address_string(self): | |
1369 | """The listening "host:port".""" | |
1370 | return '%s:%d' % self._address | |
1371 | ||
1372 | @property | |
1373 | def host(self): | |
1374 | """The listening hostname.""" | |
1375 | return self._address[0] | |
1376 | ||
1377 | @property | |
1378 | def port(self): | |
1379 | """The listening port.""" | |
1380 | return self._address[1] | |
1381 | ||
1382 | @property | |
1383 | def uri(self): | |
1384 | """Connection string to pass to `~pymongo.mongo_client.MongoClient`.""" | |
1385 | assert self.host and self.port | |
1386 | uri = 'mongodb://%s:%s' % self._address | |
1387 | return uri + '/?ssl=true' if self._ssl else uri | |
1388 | ||
1389 | @property | |
1390 | def verbose(self): | |
1391 | """If verbose logging is turned on.""" | |
1392 | return self._verbose | |
1393 | ||
1394 | @verbose.setter | |
1395 | def verbose(self, value): | |
1396 | if not isinstance(value, bool): | |
1397 | raise TypeError('value must be True or False, not %r' % value) | |
1398 | self._verbose = value | |
1399 | ||
1400 | @property | |
1401 | def label(self): | |
1402 | """Label for logging, or None.""" | |
1403 | return self._label | |
1404 | ||
1405 | @label.setter | |
1406 | def label(self, value): | |
1407 | self._label = value | |
1408 | ||
1409 | @property | |
1410 | def requests_count(self): | |
1411 | """Number of requests this server has received. | |
1412 | ||
1413 | Includes autoresponded requests. | |
1414 | """ | |
1415 | return self._requests_count | |
1416 | ||
1417 | @property | |
1418 | def request(self): | |
1419 | """The currently enqueued `Request`, or None. | |
1420 | ||
1421 | .. warning:: This property is useful to check what the current request | |
1422 | is, but the pattern ``server.request.replies()`` is dangerous: you | |
1423 | must follow it with ``server.pop()`` or the current request remains | |
1424 | enqueued. Better to reply with ``server.pop().replies()`` than | |
1425 | ``server.request.replies()`` or any variation on it. | |
1426 | """ | |
1427 | return self.got() or None | |
1428 | ||
1429 | @property | |
1430 | @_synchronized | |
1431 | def running(self): | |
1432 | """If this server is started and not stopped.""" | |
1433 | return self._accept_thread and not self._stopped | |
1434 | ||
1435 | def _accept_loop(self): | |
1436 | """Accept client connections and spawn a thread for each.""" | |
1437 | self._listening_sock.setblocking(0) | |
1438 | while not self._stopped: | |
1439 | try: | |
1440 | # Wait a short time to accept. | |
1441 | if select.select([self._listening_sock.fileno()], [], [], 1): | |
1442 | client, client_addr = self._listening_sock.accept() | |
1443 | self._log('connection from %s:%s' % client_addr) | |
1444 | server_thread = threading.Thread( | |
1445 | target=functools.partial( | |
1446 | self._server_loop, client, client_addr)) | |
1447 | ||
1448 | # Store weakrefs to the thread and socket, so we can | |
1449 | # dispose them in stop(). | |
1450 | self._server_threads[server_thread] = None | |
1451 | self._server_socks[client] = None | |
1452 | ||
1453 | server_thread.daemon = True | |
1454 | server_thread.start() | |
1455 | except socket.error as error: | |
1456 | if error.errno not in (errno.EAGAIN, errno.EBADF): | |
1457 | raise | |
1458 | except select.error as error: | |
1459 | if error.args[0] == errno.EBADF: | |
1460 | # Closed. | |
1461 | break | |
1462 | else: | |
1463 | raise | |
1464 | ||
1465 | @_synchronized | |
1466 | def _server_loop(self, client, client_addr): | |
1467 | """Read requests from one client socket, 'client'.""" | |
1468 | while not self._stopped: | |
1469 | try: | |
1470 | with self._unlock(): | |
1471 | request = mock_server_receive_request(client, self) | |
1472 | ||
1473 | self._requests_count += 1 | |
1474 | self._log('%d\t%r' % (request.client_port, request)) | |
1475 | ||
1476 | # Give most recently added responders precedence. | |
1477 | for responder in reversed(self._autoresponders): | |
1478 | if responder.handle(request): | |
1479 | self._log('\t(autoresponse)') | |
1480 | break | |
1481 | else: | |
1482 | self._request_q.put(request) | |
1483 | except socket.error as error: | |
1484 | if error.errno in (errno.ECONNRESET, errno.EBADF): | |
1485 | # We hung up, or the client did. | |
1486 | break | |
1487 | raise | |
1488 | except select.error as error: | |
1489 | if error.args[0] == errno.EBADF: | |
1490 | # Closed. | |
1491 | break | |
1492 | else: | |
1493 | raise | |
1494 | ||
1495 | self._log('disconnected: %s:%d' % client_addr) | |
1496 | client.close() | |
1497 | ||
1498 | def _log(self, msg): | |
1499 | if self._verbose: | |
1500 | if self._label: | |
1501 | msg = '%s:\t%s' % (self._label, msg) | |
1502 | print(msg) | |
1503 | ||
1504 | @contextlib.contextmanager | |
1505 | def _unlock(self): | |
1506 | """Temporarily release the lock.""" | |
1507 | self._lock.release() | |
1508 | try: | |
1509 | yield | |
1510 | finally: | |
1511 | self._lock.acquire() | |
1512 | ||
1513 | def __iter__(self): | |
1514 | return self | |
1515 | ||
1516 | def next(self): | |
1517 | request = self.receives() | |
1518 | if request is None: | |
1519 | # Server stopped. | |
1520 | raise StopIteration() | |
1521 | return request | |
1522 | ||
1523 | __next__ = next | |
1524 | ||
1525 | def __repr__(self): | |
1526 | return 'MockupDB(%s, %s)' % self._address | |
1527 | ||
1528 | ||
1529 | def bind_socket(address): | |
1530 | """Takes (host, port) and returns (socket_object, (host, port)). | |
1531 | ||
1532 | If the passed-in port is None, bind an unused port and return it. | |
1533 | """ | |
1534 | host, port = address | |
1535 | for res in set(socket.getaddrinfo(host, port, socket.AF_INET, | |
1536 | socket.SOCK_STREAM, 0, | |
1537 | socket.AI_PASSIVE)): | |
1538 | ||
1539 | family, socktype, proto, _, sock_addr = res | |
1540 | sock = socket.socket(family, socktype, proto) | |
1541 | if os.name != 'nt': | |
1542 | sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) | |
1543 | ||
1544 | # Automatic port allocation with port=None. | |
1545 | sock.bind(sock_addr) | |
1546 | sock.listen(128) | |
1547 | bound_port = sock.getsockname()[1] | |
1548 | return sock, (host, bound_port) | |
1549 | ||
1550 | raise socket.error('could not bind socket') | |
1551 | ||
1552 | ||
1553 | OPCODES = {OP_QUERY: OpQuery, | |
1554 | OP_INSERT: OpInsert, | |
1555 | OP_UPDATE: OpUpdate, | |
1556 | OP_DELETE: OpDelete, | |
1557 | OP_GET_MORE: OpGetMore, | |
1558 | OP_KILL_CURSORS: OpKillCursors} | |
1559 | ||
1560 | ||
1561 | def mock_server_receive_request(client, server): | |
1562 | """Take a client socket and return a Request.""" | |
1563 | header = mock_server_receive(client, 16) | |
1564 | length = _UNPACK_INT(header[:4])[0] | |
1565 | request_id = _UNPACK_INT(header[4:8])[0] | |
1566 | opcode = _UNPACK_INT(header[12:])[0] | |
1567 | msg_bytes = mock_server_receive(client, length - 16) | |
1568 | if opcode not in OPCODES: | |
1569 | raise NotImplementedError("Don't know how to unpack opcode %d yet" | |
1570 | % opcode) | |
1571 | return OPCODES[opcode].unpack(msg_bytes, client, server, request_id) | |
1572 | ||
1573 | ||
1574 | def mock_server_receive(sock, length): | |
1575 | """Receive `length` bytes from a socket object.""" | |
1576 | msg = b'' | |
1577 | while length: | |
1578 | if select.select([sock.fileno()], [], [], 1): | |
1579 | try: | |
1580 | chunk = sock.recv(length) | |
1581 | if chunk == b'': | |
1582 | raise socket.error(errno.ECONNRESET, 'closed') | |
1583 | ||
1584 | length -= len(chunk) | |
1585 | msg += chunk | |
1586 | except socket.error as error: | |
1587 | if error.errno == errno.EAGAIN: | |
1588 | continue | |
1589 | raise | |
1590 | ||
1591 | return msg | |
1592 | ||
1593 | ||
1594 | def make_docs(*args, **kwargs): | |
1595 | """Make the documents for a `Request` or `OpReply`. | |
1596 | ||
1597 | Takes a variety of argument styles, returns a list of dicts. | |
1598 | ||
1599 | Used by `make_prototype_request` and `make_reply`, which are in turn used by | |
1600 | `MockupDB.receives`, `Request.replies`, and so on. See examples in | |
1601 | tutorial. | |
1602 | """ | |
1603 | err_msg = "Can't interpret args: " | |
1604 | if not args and not kwargs: | |
1605 | return [] | |
1606 | ||
1607 | if not args: | |
1608 | # OpReply(ok=1, ismaster=True). | |
1609 | return [kwargs] | |
1610 | ||
1611 | if isinstance(args[0], (int, float, bool)): | |
1612 | # server.receives().ok(0, err='uh oh'). | |
1613 | if args[1:]: | |
1614 | raise_args_err(err_msg, ValueError) | |
1615 | doc = OrderedDict({'ok': args[0]}) | |
1616 | doc.update(kwargs) | |
1617 | return [doc] | |
1618 | ||
1619 | if isinstance(args[0], (list, tuple)): | |
1620 | # Send a batch: OpReply([{'a': 1}, {'a': 2}]). | |
1621 | if not all(isinstance(doc, (OpReply, collections.Mapping)) | |
1622 | for doc in args[0]): | |
1623 | raise_args_err('each doc must be a dict:') | |
1624 | if kwargs: | |
1625 | raise_args_err(err_msg, ValueError) | |
1626 | return list(args[0]) | |
1627 | ||
1628 | if isinstance(args[0], (string_type, text_type)): | |
1629 | if args[2:]: | |
1630 | raise_args_err(err_msg, ValueError) | |
1631 | ||
1632 | if len(args) == 2: | |
1633 | # Command('aggregate', 'collection', {'cursor': {'batchSize': 1}}). | |
1634 | doc = OrderedDict({args[0]: args[1]}) | |
1635 | else: | |
1636 | # OpReply('ismaster', me='a.com'). | |
1637 | doc = OrderedDict({args[0]: 1}) | |
1638 | doc.update(kwargs) | |
1639 | return [doc] | |
1640 | ||
1641 | if kwargs: | |
1642 | raise_args_err(err_msg, ValueError) | |
1643 | ||
1644 | # Send a batch as varargs: OpReply({'a': 1}, {'a': 2}). | |
1645 | if not all(isinstance(doc, (OpReply, collections.Mapping)) for doc in args): | |
1646 | raise_args_err('each doc must be a dict') | |
1647 | ||
1648 | return args | |
1649 | ||
1650 | ||
1651 | def make_matcher(*args, **kwargs): | |
1652 | """Make a Matcher from a `request spec`_: | |
1653 | ||
1654 | >>> make_matcher() | |
1655 | Matcher(Request()) | |
1656 | >>> make_matcher({'ismaster': 1}, namespace='admin') | |
1657 | Matcher(Request({"ismaster": 1}, namespace="admin")) | |
1658 | >>> make_matcher({}, {'_id': 1}) | |
1659 | Matcher(Request({}, {"_id": 1})) | |
1660 | ||
1661 | See more examples in tutorial. | |
1662 | """ | |
1663 | if args and isinstance(args[0], Matcher): | |
1664 | if args[1:] or kwargs: | |
1665 | raise_args_err("can't interpret args") | |
1666 | return args[0] | |
1667 | ||
1668 | return Matcher(*args, **kwargs) | |
1669 | ||
1670 | ||
1671 | def make_prototype_request(*args, **kwargs): | |
1672 | """Make a prototype Request for a Matcher.""" | |
1673 | if args and inspect.isclass(args[0]) and issubclass(args[0], Request): | |
1674 | request_cls, arg_list = args[0], args[1:] | |
1675 | return request_cls(*arg_list, **kwargs) | |
1676 | if args and isinstance(args[0], Request): | |
1677 | if args[1:] or kwargs: | |
1678 | raise_args_err("can't interpret args") | |
1679 | return args[0] | |
1680 | ||
1681 | # Match any opcode. | |
1682 | return Request(*args, **kwargs) | |
1683 | ||
1684 | ||
1685 | def make_reply(*args, **kwargs): | |
1686 | """Make an OpReply from a `reply spec`_: | |
1687 | ||
1688 | >>> make_reply() | |
1689 | OpReply() | |
1690 | >>> make_reply(OpReply({'ok': 0})) | |
1691 | OpReply({"ok": 0}) | |
1692 | >>> make_reply(0) | |
1693 | OpReply({"ok": 0}) | |
1694 | >>> make_reply(key='value') | |
1695 | OpReply({"key": "value"}) | |
1696 | ||
1697 | See more examples in tutorial. | |
1698 | """ | |
1699 | # Error we might raise. | |
1700 | if args and isinstance(args[0], OpReply): | |
1701 | if args[1:] or kwargs: | |
1702 | raise_args_err("can't interpret args") | |
1703 | return args[0] | |
1704 | ||
1705 | return OpReply(*args, **kwargs) | |
1706 | ||
1707 | ||
1708 | def unprefixed(bson_str): | |
1709 | rep = unicode(repr(bson_str)) | |
1710 | if rep.startswith(u'u"') or rep.startswith(u"u'"): | |
1711 | return rep[1:] | |
1712 | else: | |
1713 | return rep | |
1714 | ||
1715 | ||
1716 | def docs_repr(*args): | |
1717 | """Stringify ordered dicts like a regular ones. | |
1718 | ||
1719 | Preserve order, remove 'u'-prefix on unicodes in Python 2: | |
1720 | ||
1721 | >>> print(docs_repr(OrderedDict([(u'_id', 2)]))) | |
1722 | {"_id": 2} | |
1723 | >>> print(docs_repr(OrderedDict([(u'_id', 2), (u'a', u'b')]), | |
1724 | ... OrderedDict([(u'a', 1)]))) | |
1725 | {"_id": 2, "a": "b"}, {"a": 1} | |
1726 | >>> | |
1727 | >>> import datetime | |
1728 | >>> now = datetime.datetime.utcfromtimestamp(123456) | |
1729 | >>> print(docs_repr(OrderedDict([(u'ts', now)]))) | |
1730 | {"ts": {"$date": 123456000}} | |
1731 | >>> | |
1732 | >>> oid = _bson.ObjectId(b'123456781234567812345678') | |
1733 | >>> print(docs_repr(OrderedDict([(u'oid', oid)]))) | |
1734 | {"oid": {"$oid": "123456781234567812345678"}} | |
1735 | """ | |
1736 | sio = StringIO() | |
1737 | for doc_idx, doc in enumerate(args): | |
1738 | if doc_idx > 0: | |
1739 | sio.write(u', ') | |
1740 | sio.write(text_type(_json_util.dumps(doc))) | |
1741 | return sio.getvalue() | |
1742 | ||
1743 | ||
1744 | def seq_match(seq0, seq1): | |
1745 | """True if seq0 is a subset of seq1 and their elements are in same order. | |
1746 | ||
1747 | >>> seq_match([], []) | |
1748 | True | |
1749 | >>> seq_match([1], [1]) | |
1750 | True | |
1751 | >>> seq_match([1, 1], [1]) | |
1752 | False | |
1753 | >>> seq_match([1], [1, 2]) | |
1754 | True | |
1755 | >>> seq_match([1, 1], [1, 1]) | |
1756 | True | |
1757 | >>> seq_match([3], [1, 2, 3]) | |
1758 | True | |
1759 | >>> seq_match([1, 3], [1, 2, 3]) | |
1760 | True | |
1761 | >>> seq_match([2, 1], [1, 2, 3]) | |
1762 | False | |
1763 | """ | |
1764 | len_seq1 = len(seq1) | |
1765 | if len_seq1 < len(seq0): | |
1766 | return False | |
1767 | seq1_idx = 0 | |
1768 | for i, elem in enumerate(seq0): | |
1769 | while seq1_idx < len_seq1: | |
1770 | if seq1[seq1_idx] == elem: | |
1771 | break | |
1772 | seq1_idx += 1 | |
1773 | if seq1_idx >= len_seq1 or seq1[seq1_idx] != elem: | |
1774 | return False | |
1775 | seq1_idx += 1 | |
1776 | ||
1777 | return True | |
1778 | ||
1779 | ||
1780 | def format_call(frame): | |
1781 | fn_name = inspect.getframeinfo(frame)[2] | |
1782 | arg_info = inspect.getargvalues(frame) | |
1783 | args = [repr(arg_info.locals[arg]) for arg in arg_info.args] | |
1784 | varargs = [repr(x) for x in arg_info.locals[arg_info.varargs]] | |
1785 | kwargs = [', '.join("%s=%r" % (key, value) for key, value in | |
1786 | arg_info.locals[arg_info.keywords].items())] | |
1787 | return '%s(%s)' % (fn_name, ', '.join(args + varargs + kwargs)) | |
1788 | ||
1789 | ||
1790 | def raise_args_err(message='bad arguments', error_class=TypeError): | |
1791 | """Throw an error with standard message, displaying function call. | |
1792 | ||
1793 | >>> def f(a, *args, **kwargs): | |
1794 | ... raise_args_err() | |
1795 | ... | |
1796 | >>> f(1, 2, x='y') | |
1797 | Traceback (most recent call last): | |
1798 | ... | |
1799 | TypeError: bad arguments: f(1, 2, x='y') | |
1800 | """ | |
1801 | frame = inspect.currentframe().f_back | |
1802 | raise error_class(message + ': ' + format_call(frame)) | |
1803 | ||
1804 | ||
1805 | def interactive_server(port=27017, verbose=True, all_ok=False, name='MockupDB', | |
1806 | ssl=False): | |
1807 | """A `MockupDB` that the mongo shell can connect to. | |
1808 | ||
1809 | Call `~.MockupDB.run` on the returned server, and clean it up with | |
1810 | `~.MockupDB.stop`. | |
1811 | ||
1812 | If ``all_ok`` is True, replies {ok: 1} to anything unmatched by a specific | |
1813 | responder. | |
1814 | """ | |
1815 | server = MockupDB(port=port, | |
1816 | verbose=verbose, | |
1817 | request_timeout=int(1e6), | |
1818 | ssl=ssl) | |
1819 | if all_ok: | |
1820 | server.autoresponds({}) | |
1821 | server.autoresponds(Command('ismaster'), ismaster=True, setName=name) | |
1822 | server.autoresponds('whatsmyuri', you='localhost:12345') | |
1823 | server.autoresponds({'getLog': 'startupWarnings'}, | |
1824 | log=['hello from %s!' % name]) | |
1825 | server.autoresponds(Command('buildInfo'), version='MockupDB ' + __version__) | |
1826 | server.autoresponds(Command('listCollections')) | |
1827 | server.autoresponds('replSetGetStatus', ok=0) | |
1828 | return server |
0 | # -*- coding: utf-8 -*- | |
1 | # Copyright 2015 MongoDB, Inc. | |
2 | # | |
3 | # Licensed under the Apache License, Version 2.0 (the "License"); | |
4 | # you may not use this file except in compliance with the License. | |
5 | # You may obtain a copy of the License at | |
6 | # | |
7 | # http://www.apache.org/licenses/LICENSE-2.0 | |
8 | # | |
9 | # Unless required by applicable law or agreed to in writing, software | |
10 | # distributed under the License is distributed on an "AS IS" BASIS, | |
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
12 | # See the License for the specific language governing permissions and | |
13 | # limitations under the License. | |
14 | ||
15 | """Demonstrate a mocked MongoDB server.""" | |
16 | ||
17 | import time | |
18 | ||
19 | from mockupdb import interactive_server | |
20 | ||
21 | ||
22 | def main(): | |
23 | """Start an interactive `MockupDB`. | |
24 | ||
25 | Use like ``python -m mockupdb``. | |
26 | """ | |
27 | from optparse import OptionParser | |
28 | parser = OptionParser('Start mock MongoDB server') | |
29 | parser.add_option('-p', '--port', dest='port', default=27017, | |
30 | help='port on which mock mongod listens') | |
31 | parser.add_option('-q', '--quiet', | |
32 | action='store_false', dest='verbose', default=True, | |
33 | help="don't print messages to stdout") | |
34 | ||
35 | options, cmdline_args = parser.parse_args() | |
36 | if cmdline_args: | |
37 | parser.error('Unrecognized argument(s): %s' % ' '.join(cmdline_args)) | |
38 | ||
39 | server = interactive_server(port=options.port, verbose=options.verbose) | |
40 | try: | |
41 | server.run() | |
42 | print('Listening on port %d' % server.port) | |
43 | time.sleep(1e6) | |
44 | except KeyboardInterrupt: | |
45 | server.stop() | |
46 | ||
47 | if __name__ == '__main__': | |
48 | main() |
0 | # Copyright 2009-2015 MongoDB, Inc. | |
1 | # | |
2 | # Licensed under the Apache License, Version 2.0 (the "License"); | |
3 | # you may not use this file except in compliance with the License. | |
4 | # You may obtain a copy of the License at | |
5 | # | |
6 | # http://www.apache.org/licenses/LICENSE-2.0 | |
7 | # | |
8 | # Unless required by applicable law or agreed to in writing, software | |
9 | # distributed under the License is distributed on an "AS IS" BASIS, | |
10 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
11 | # See the License for the specific language governing permissions and | |
12 | # limitations under the License. | |
13 | ||
14 | """BSON (Binary JSON) encoding and decoding. | |
15 | """ | |
16 | ||
17 | import calendar | |
18 | import collections | |
19 | import datetime | |
20 | import itertools | |
21 | import re | |
22 | import struct | |
23 | import sys | |
24 | import uuid | |
25 | ||
26 | from codecs import (utf_8_decode as _utf_8_decode, | |
27 | utf_8_encode as _utf_8_encode) | |
28 | ||
29 | from mockupdb._bson.binary import (Binary, OLD_UUID_SUBTYPE, | |
30 | JAVA_LEGACY, CSHARP_LEGACY, | |
31 | UUIDLegacy) | |
32 | from mockupdb._bson.code import Code | |
33 | from mockupdb._bson.codec_options import CodecOptions, DEFAULT_CODEC_OPTIONS | |
34 | from mockupdb._bson.dbref import DBRef | |
35 | from mockupdb._bson.errors import (InvalidBSON, | |
36 | InvalidDocument, | |
37 | InvalidStringData) | |
38 | from mockupdb._bson.int64 import Int64 | |
39 | from mockupdb._bson.max_key import MaxKey | |
40 | from mockupdb._bson.min_key import MinKey | |
41 | from mockupdb._bson.objectid import ObjectId | |
42 | from mockupdb._bson.py3compat import (b, | |
43 | PY3, | |
44 | iteritems, | |
45 | text_type, | |
46 | string_type, | |
47 | reraise) | |
48 | from mockupdb._bson.regex import Regex | |
49 | from mockupdb._bson.son import SON, RE_TYPE | |
50 | from mockupdb._bson.timestamp import Timestamp | |
51 | from mockupdb._bson.tz_util import utc | |
52 | ||
53 | ||
54 | try: | |
55 | from mockupdb._bson import _cbson | |
56 | _USE_C = True | |
57 | except ImportError: | |
58 | _USE_C = False | |
59 | ||
60 | ||
61 | EPOCH_AWARE = datetime.datetime.fromtimestamp(0, utc) | |
62 | EPOCH_NAIVE = datetime.datetime.utcfromtimestamp(0) | |
63 | ||
64 | ||
65 | BSONNUM = b"\x01" # Floating point | |
66 | BSONSTR = b"\x02" # UTF-8 string | |
67 | BSONOBJ = b"\x03" # Embedded document | |
68 | BSONARR = b"\x04" # Array | |
69 | BSONBIN = b"\x05" # Binary | |
70 | BSONUND = b"\x06" # Undefined | |
71 | BSONOID = b"\x07" # ObjectId | |
72 | BSONBOO = b"\x08" # Boolean | |
73 | BSONDAT = b"\x09" # UTC Datetime | |
74 | BSONNUL = b"\x0A" # Null | |
75 | BSONRGX = b"\x0B" # Regex | |
76 | BSONREF = b"\x0C" # DBRef | |
77 | BSONCOD = b"\x0D" # Javascript code | |
78 | BSONSYM = b"\x0E" # Symbol | |
79 | BSONCWS = b"\x0F" # Javascript code with scope | |
80 | BSONINT = b"\x10" # 32bit int | |
81 | BSONTIM = b"\x11" # Timestamp | |
82 | BSONLON = b"\x12" # 64bit int | |
83 | BSONMIN = b"\xFF" # Min key | |
84 | BSONMAX = b"\x7F" # Max key | |
85 | ||
86 | ||
87 | _UNPACK_FLOAT = struct.Struct("<d").unpack | |
88 | _UNPACK_INT = struct.Struct("<i").unpack | |
89 | _UNPACK_LENGTH_SUBTYPE = struct.Struct("<iB").unpack | |
90 | _UNPACK_LONG = struct.Struct("<q").unpack | |
91 | _UNPACK_TIMESTAMP = struct.Struct("<II").unpack | |
92 | ||
93 | ||
94 | def _get_int(data, position, dummy0, dummy1): | |
95 | """Decode a BSON int32 to python int.""" | |
96 | end = position + 4 | |
97 | return _UNPACK_INT(data[position:end])[0], end | |
98 | ||
99 | ||
100 | def _get_c_string(data, position, opts): | |
101 | """Decode a BSON 'C' string to python unicode string.""" | |
102 | end = data.index(b"\x00", position) | |
103 | return _utf_8_decode(data[position:end], | |
104 | opts.unicode_decode_error_handler, True)[0], end + 1 | |
105 | ||
106 | ||
107 | def _get_float(data, position, dummy0, dummy1): | |
108 | """Decode a BSON double to python float.""" | |
109 | end = position + 8 | |
110 | return _UNPACK_FLOAT(data[position:end])[0], end | |
111 | ||
112 | ||
113 | def _get_string(data, position, obj_end, opts): | |
114 | """Decode a BSON string to python unicode string.""" | |
115 | length = _UNPACK_INT(data[position:position + 4])[0] | |
116 | position += 4 | |
117 | if length < 1 or obj_end - position < length: | |
118 | raise InvalidBSON("invalid string length") | |
119 | end = position + length - 1 | |
120 | if data[end:end + 1] != b"\x00": | |
121 | raise InvalidBSON("invalid end of string") | |
122 | return _utf_8_decode(data[position:end], | |
123 | opts.unicode_decode_error_handler, True)[0], end + 1 | |
124 | ||
125 | ||
126 | def _get_object(data, position, obj_end, opts): | |
127 | """Decode a BSON subdocument to opts.document_class or bson.dbref.DBRef.""" | |
128 | obj_size = _UNPACK_INT(data[position:position + 4])[0] | |
129 | end = position + obj_size - 1 | |
130 | if data[end:position + obj_size] != b"\x00": | |
131 | raise InvalidBSON("bad eoo") | |
132 | if end >= obj_end: | |
133 | raise InvalidBSON("invalid object length") | |
134 | obj = _elements_to_dict(data, position + 4, end, opts) | |
135 | ||
136 | position += obj_size | |
137 | if "$ref" in obj: | |
138 | return (DBRef(obj.pop("$ref"), obj.pop("$id", None), | |
139 | obj.pop("$db", None), obj), position) | |
140 | return obj, position | |
141 | ||
142 | ||
143 | def _get_array(data, position, obj_end, opts): | |
144 | """Decode a BSON array to python list.""" | |
145 | size = _UNPACK_INT(data[position:position + 4])[0] | |
146 | end = position + size - 1 | |
147 | if data[end:end + 1] != b"\x00": | |
148 | raise InvalidBSON("bad eoo") | |
149 | position += 4 | |
150 | end -= 1 | |
151 | result = [] | |
152 | ||
153 | # Avoid doing global and attibute lookups in the loop. | |
154 | append = result.append | |
155 | index = data.index | |
156 | getter = _ELEMENT_GETTER | |
157 | ||
158 | while position < end: | |
159 | element_type = data[position:position + 1] | |
160 | # Just skip the keys. | |
161 | position = index(b'\x00', position) + 1 | |
162 | value, position = getter[element_type](data, position, obj_end, opts) | |
163 | append(value) | |
164 | return result, position + 1 | |
165 | ||
166 | ||
167 | def _get_binary(data, position, dummy, opts): | |
168 | """Decode a BSON binary to bson.binary.Binary or python UUID.""" | |
169 | length, subtype = _UNPACK_LENGTH_SUBTYPE(data[position:position + 5]) | |
170 | position += 5 | |
171 | if subtype == 2: | |
172 | length2 = _UNPACK_INT(data[position:position + 4])[0] | |
173 | position += 4 | |
174 | if length2 != length - 4: | |
175 | raise InvalidBSON("invalid binary (st 2) - lengths don't match!") | |
176 | length = length2 | |
177 | end = position + length | |
178 | if subtype in (3, 4): | |
179 | # Java Legacy | |
180 | uuid_representation = opts.uuid_representation | |
181 | if uuid_representation == JAVA_LEGACY: | |
182 | java = data[position:end] | |
183 | value = uuid.UUID(bytes=java[0:8][::-1] + java[8:16][::-1]) | |
184 | # C# legacy | |
185 | elif uuid_representation == CSHARP_LEGACY: | |
186 | value = uuid.UUID(bytes_le=data[position:end]) | |
187 | # Python | |
188 | else: | |
189 | value = uuid.UUID(bytes=data[position:end]) | |
190 | return value, end | |
191 | # Python3 special case. Decode subtype 0 to 'bytes'. | |
192 | if PY3 and subtype == 0: | |
193 | value = data[position:end] | |
194 | else: | |
195 | value = Binary(data[position:end], subtype) | |
196 | return value, end | |
197 | ||
198 | ||
199 | def _get_oid(data, position, dummy0, dummy1): | |
200 | """Decode a BSON ObjectId to bson.objectid.ObjectId.""" | |
201 | end = position + 12 | |
202 | return ObjectId(data[position:end]), end | |
203 | ||
204 | ||
205 | def _get_boolean(data, position, dummy0, dummy1): | |
206 | """Decode a BSON true/false to python True/False.""" | |
207 | end = position + 1 | |
208 | return data[position:end] == b"\x01", end | |
209 | ||
210 | ||
211 | def _get_date(data, position, dummy, opts): | |
212 | """Decode a BSON datetime to python datetime.datetime.""" | |
213 | end = position + 8 | |
214 | millis = _UNPACK_LONG(data[position:end])[0] | |
215 | diff = ((millis % 1000) + 1000) % 1000 | |
216 | seconds = (millis - diff) / 1000 | |
217 | micros = diff * 1000 | |
218 | if opts.tz_aware: | |
219 | dt = EPOCH_AWARE + datetime.timedelta( | |
220 | seconds=seconds, microseconds=micros) | |
221 | if opts.tzinfo: | |
222 | dt = dt.astimezone(opts.tzinfo) | |
223 | else: | |
224 | dt = EPOCH_NAIVE + datetime.timedelta( | |
225 | seconds=seconds, microseconds=micros) | |
226 | return dt, end | |
227 | ||
228 | ||
229 | def _get_code(data, position, obj_end, opts): | |
230 | """Decode a BSON code to bson.code.Code.""" | |
231 | code, position = _get_string(data, position, obj_end, opts) | |
232 | return Code(code), position | |
233 | ||
234 | ||
235 | def _get_code_w_scope(data, position, obj_end, opts): | |
236 | """Decode a BSON code_w_scope to bson.code.Code.""" | |
237 | code, position = _get_string(data, position + 4, obj_end, opts) | |
238 | scope, position = _get_object(data, position, obj_end, opts) | |
239 | return Code(code, scope), position | |
240 | ||
241 | ||
242 | def _get_regex(data, position, dummy0, opts): | |
243 | """Decode a BSON regex to bson.regex.Regex or a python pattern object.""" | |
244 | pattern, position = _get_c_string(data, position, opts) | |
245 | bson_flags, position = _get_c_string(data, position, opts) | |
246 | bson_re = Regex(pattern, bson_flags) | |
247 | return bson_re, position | |
248 | ||
249 | ||
250 | def _get_ref(data, position, obj_end, opts): | |
251 | """Decode (deprecated) BSON DBPointer to bson.dbref.DBRef.""" | |
252 | collection, position = _get_string(data, position, obj_end, opts) | |
253 | oid, position = _get_oid(data, position, obj_end, opts) | |
254 | return DBRef(collection, oid), position | |
255 | ||
256 | ||
257 | def _get_timestamp(data, position, dummy0, dummy1): | |
258 | """Decode a BSON timestamp to bson.timestamp.Timestamp.""" | |
259 | end = position + 8 | |
260 | inc, timestamp = _UNPACK_TIMESTAMP(data[position:end]) | |
261 | return Timestamp(timestamp, inc), end | |
262 | ||
263 | ||
264 | def _get_int64(data, position, dummy0, dummy1): | |
265 | """Decode a BSON int64 to bson.int64.Int64.""" | |
266 | end = position + 8 | |
267 | return Int64(_UNPACK_LONG(data[position:end])[0]), end | |
268 | ||
269 | ||
270 | # Each decoder function's signature is: | |
271 | # - data: bytes | |
272 | # - position: int, beginning of object in 'data' to decode | |
273 | # - obj_end: int, end of object to decode in 'data' if variable-length type | |
274 | # - opts: a CodecOptions | |
275 | _ELEMENT_GETTER = { | |
276 | BSONNUM: _get_float, | |
277 | BSONSTR: _get_string, | |
278 | BSONOBJ: _get_object, | |
279 | BSONARR: _get_array, | |
280 | BSONBIN: _get_binary, | |
281 | BSONUND: lambda w, x, y, z: (None, x), # Deprecated undefined | |
282 | BSONOID: _get_oid, | |
283 | BSONBOO: _get_boolean, | |
284 | BSONDAT: _get_date, | |
285 | BSONNUL: lambda w, x, y, z: (None, x), | |
286 | BSONRGX: _get_regex, | |
287 | BSONREF: _get_ref, # Deprecated DBPointer | |
288 | BSONCOD: _get_code, | |
289 | BSONSYM: _get_string, # Deprecated symbol | |
290 | BSONCWS: _get_code_w_scope, | |
291 | BSONINT: _get_int, | |
292 | BSONTIM: _get_timestamp, | |
293 | BSONLON: _get_int64, | |
294 | BSONMIN: lambda w, x, y, z: (MinKey(), x), | |
295 | BSONMAX: lambda w, x, y, z: (MaxKey(), x)} | |
296 | ||
297 | ||
298 | def _element_to_dict(data, position, obj_end, opts): | |
299 | """Decode a single key, value pair.""" | |
300 | element_type = data[position:position + 1] | |
301 | position += 1 | |
302 | element_name, position = _get_c_string(data, position, opts) | |
303 | value, position = _ELEMENT_GETTER[element_type](data, | |
304 | position, obj_end, opts) | |
305 | return element_name, value, position | |
306 | ||
307 | ||
308 | def _elements_to_dict(data, position, obj_end, opts): | |
309 | """Decode a BSON document.""" | |
310 | result = opts.document_class() | |
311 | end = obj_end - 1 | |
312 | while position < end: | |
313 | (key, value, position) = _element_to_dict(data, position, obj_end, opts) | |
314 | result[key] = value | |
315 | return result | |
316 | ||
317 | ||
318 | def _bson_to_dict(data, opts): | |
319 | """Decode a BSON string to document_class.""" | |
320 | try: | |
321 | obj_size = _UNPACK_INT(data[:4])[0] | |
322 | except struct.error as exc: | |
323 | raise InvalidBSON(str(exc)) | |
324 | if obj_size != len(data): | |
325 | raise InvalidBSON("invalid object size") | |
326 | if data[obj_size - 1:obj_size] != b"\x00": | |
327 | raise InvalidBSON("bad eoo") | |
328 | try: | |
329 | return _elements_to_dict(data, 4, obj_size - 1, opts) | |
330 | except InvalidBSON: | |
331 | raise | |
332 | except Exception: | |
333 | # Change exception type to InvalidBSON but preserve traceback. | |
334 | _, exc_value, exc_tb = sys.exc_info() | |
335 | reraise(InvalidBSON, exc_value, exc_tb) | |
336 | if _USE_C: | |
337 | _bson_to_dict = _cbson._bson_to_dict | |
338 | ||
339 | ||
340 | _PACK_FLOAT = struct.Struct("<d").pack | |
341 | _PACK_INT = struct.Struct("<i").pack | |
342 | _PACK_LENGTH_SUBTYPE = struct.Struct("<iB").pack | |
343 | _PACK_LONG = struct.Struct("<q").pack | |
344 | _PACK_TIMESTAMP = struct.Struct("<II").pack | |
345 | _LIST_NAMES = tuple(b(str(i)) + b"\x00" for i in range(1000)) | |
346 | ||
347 | ||
348 | def gen_list_name(): | |
349 | """Generate "keys" for encoded lists in the sequence | |
350 | b"0\x00", b"1\x00", b"2\x00", ... | |
351 | ||
352 | The first 1000 keys are returned from a pre-built cache. All | |
353 | subsequent keys are generated on the fly. | |
354 | """ | |
355 | for name in _LIST_NAMES: | |
356 | yield name | |
357 | ||
358 | counter = itertools.count(1000) | |
359 | while True: | |
360 | yield b(str(next(counter))) + b"\x00" | |
361 | ||
362 | ||
363 | def _make_c_string_check(string): | |
364 | """Make a 'C' string, checking for embedded NUL characters.""" | |
365 | if isinstance(string, bytes): | |
366 | if b"\x00" in string: | |
367 | raise InvalidDocument("BSON keys / regex patterns must not " | |
368 | "contain a NUL character") | |
369 | try: | |
370 | _utf_8_decode(string, None, True) | |
371 | return string + b"\x00" | |
372 | except UnicodeError: | |
373 | raise InvalidStringData("strings in documents must be valid " | |
374 | "UTF-8: %r" % string) | |
375 | else: | |
376 | if "\x00" in string: | |
377 | raise InvalidDocument("BSON keys / regex patterns must not " | |
378 | "contain a NUL character") | |
379 | return _utf_8_encode(string)[0] + b"\x00" | |
380 | ||
381 | ||
382 | def _make_c_string(string): | |
383 | """Make a 'C' string.""" | |
384 | if isinstance(string, bytes): | |
385 | try: | |
386 | _utf_8_decode(string, None, True) | |
387 | return string + b"\x00" | |
388 | except UnicodeError: | |
389 | raise InvalidStringData("strings in documents must be valid " | |
390 | "UTF-8: %r" % string) | |
391 | else: | |
392 | return _utf_8_encode(string)[0] + b"\x00" | |
393 | ||
394 | ||
395 | if PY3: | |
396 | def _make_name(string): | |
397 | """Make a 'C' string suitable for a BSON key.""" | |
398 | # Keys can only be text in python 3. | |
399 | if "\x00" in string: | |
400 | raise InvalidDocument("BSON keys / regex patterns must not " | |
401 | "contain a NUL character") | |
402 | return _utf_8_encode(string)[0] + b"\x00" | |
403 | else: | |
404 | # Keys can be unicode or bytes in python 2. | |
405 | _make_name = _make_c_string_check | |
406 | ||
407 | ||
408 | def _encode_float(name, value, dummy0, dummy1): | |
409 | """Encode a float.""" | |
410 | return b"\x01" + name + _PACK_FLOAT(value) | |
411 | ||
412 | ||
413 | if PY3: | |
414 | def _encode_bytes(name, value, dummy0, dummy1): | |
415 | """Encode a python bytes.""" | |
416 | # Python3 special case. Store 'bytes' as BSON binary subtype 0. | |
417 | return b"\x05" + name + _PACK_INT(len(value)) + b"\x00" + value | |
418 | else: | |
419 | def _encode_bytes(name, value, dummy0, dummy1): | |
420 | """Encode a python str (python 2.x).""" | |
421 | try: | |
422 | _utf_8_decode(value, None, True) | |
423 | except UnicodeError: | |
424 | raise InvalidStringData("strings in documents must be valid " | |
425 | "UTF-8: %r" % (value,)) | |
426 | return b"\x02" + name + _PACK_INT(len(value) + 1) + value + b"\x00" | |
427 | ||
428 | ||
429 | def _encode_mapping(name, value, check_keys, opts): | |
430 | """Encode a mapping type.""" | |
431 | data = b"".join([_element_to_bson(key, val, check_keys, opts) | |
432 | for key, val in iteritems(value)]) | |
433 | return b"\x03" + name + _PACK_INT(len(data) + 5) + data + b"\x00" | |
434 | ||
435 | ||
436 | def _encode_dbref(name, value, check_keys, opts): | |
437 | """Encode bson.dbref.DBRef.""" | |
438 | buf = bytearray(b"\x03" + name + b"\x00\x00\x00\x00") | |
439 | begin = len(buf) - 4 | |
440 | ||
441 | buf += _name_value_to_bson(b"$ref\x00", | |
442 | value.collection, check_keys, opts) | |
443 | buf += _name_value_to_bson(b"$id\x00", | |
444 | value.id, check_keys, opts) | |
445 | if value.database is not None: | |
446 | buf += _name_value_to_bson( | |
447 | b"$db\x00", value.database, check_keys, opts) | |
448 | for key, val in iteritems(value._DBRef__kwargs): | |
449 | buf += _element_to_bson(key, val, check_keys, opts) | |
450 | ||
451 | buf += b"\x00" | |
452 | buf[begin:begin + 4] = _PACK_INT(len(buf) - begin) | |
453 | return bytes(buf) | |
454 | ||
455 | ||
456 | def _encode_list(name, value, check_keys, opts): | |
457 | """Encode a list/tuple.""" | |
458 | lname = gen_list_name() | |
459 | data = b"".join([_name_value_to_bson(next(lname), item, | |
460 | check_keys, opts) | |
461 | for item in value]) | |
462 | return b"\x04" + name + _PACK_INT(len(data) + 5) + data + b"\x00" | |
463 | ||
464 | ||
465 | def _encode_text(name, value, dummy0, dummy1): | |
466 | """Encode a python unicode (python 2.x) / str (python 3.x).""" | |
467 | value = _utf_8_encode(value)[0] | |
468 | return b"\x02" + name + _PACK_INT(len(value) + 1) + value + b"\x00" | |
469 | ||
470 | ||
471 | def _encode_binary(name, value, dummy0, dummy1): | |
472 | """Encode bson.binary.Binary.""" | |
473 | subtype = value.subtype | |
474 | if subtype == 2: | |
475 | value = _PACK_INT(len(value)) + value | |
476 | return b"\x05" + name + _PACK_LENGTH_SUBTYPE(len(value), subtype) + value | |
477 | ||
478 | ||
479 | def _encode_uuid(name, value, dummy, opts): | |
480 | """Encode uuid.UUID.""" | |
481 | uuid_representation = opts.uuid_representation | |
482 | # Python Legacy Common Case | |
483 | if uuid_representation == OLD_UUID_SUBTYPE: | |
484 | return b"\x05" + name + b'\x10\x00\x00\x00\x03' + value.bytes | |
485 | # Java Legacy | |
486 | elif uuid_representation == JAVA_LEGACY: | |
487 | from_uuid = value.bytes | |
488 | data = from_uuid[0:8][::-1] + from_uuid[8:16][::-1] | |
489 | return b"\x05" + name + b'\x10\x00\x00\x00\x03' + data | |
490 | # C# legacy | |
491 | elif uuid_representation == CSHARP_LEGACY: | |
492 | # Microsoft GUID representation. | |
493 | return b"\x05" + name + b'\x10\x00\x00\x00\x03' + value.bytes_le | |
494 | # New | |
495 | else: | |
496 | return b"\x05" + name + b'\x10\x00\x00\x00\x04' + value.bytes | |
497 | ||
498 | ||
499 | def _encode_objectid(name, value, dummy0, dummy1): | |
500 | """Encode bson.objectid.ObjectId.""" | |
501 | return b"\x07" + name + value.binary | |
502 | ||
503 | ||
504 | def _encode_bool(name, value, dummy0, dummy1): | |
505 | """Encode a python boolean (True/False).""" | |
506 | return b"\x08" + name + (value and b"\x01" or b"\x00") | |
507 | ||
508 | ||
509 | def _encode_datetime(name, value, dummy0, dummy1): | |
510 | """Encode datetime.datetime.""" | |
511 | if value.utcoffset() is not None: | |
512 | value = value - value.utcoffset() | |
513 | millis = int(calendar.timegm(value.timetuple()) * 1000 + | |
514 | value.microsecond / 1000) | |
515 | return b"\x09" + name + _PACK_LONG(millis) | |
516 | ||
517 | ||
518 | def _encode_none(name, dummy0, dummy1, dummy2): | |
519 | """Encode python None.""" | |
520 | return b"\x0A" + name | |
521 | ||
522 | ||
523 | def _encode_regex(name, value, dummy0, dummy1): | |
524 | """Encode a python regex or bson.regex.Regex.""" | |
525 | flags = value.flags | |
526 | # Python 2 common case | |
527 | if flags == 0: | |
528 | return b"\x0B" + name + _make_c_string_check(value.pattern) + b"\x00" | |
529 | # Python 3 common case | |
530 | elif flags == re.UNICODE: | |
531 | return b"\x0B" + name + _make_c_string_check(value.pattern) + b"u\x00" | |
532 | else: | |
533 | sflags = b"" | |
534 | if flags & re.IGNORECASE: | |
535 | sflags += b"i" | |
536 | if flags & re.LOCALE: | |
537 | sflags += b"l" | |
538 | if flags & re.MULTILINE: | |
539 | sflags += b"m" | |
540 | if flags & re.DOTALL: | |
541 | sflags += b"s" | |
542 | if flags & re.UNICODE: | |
543 | sflags += b"u" | |
544 | if flags & re.VERBOSE: | |
545 | sflags += b"x" | |
546 | sflags += b"\x00" | |
547 | return b"\x0B" + name + _make_c_string_check(value.pattern) + sflags | |
548 | ||
549 | ||
550 | def _encode_code(name, value, dummy, opts): | |
551 | """Encode bson.code.Code.""" | |
552 | cstring = _make_c_string(value) | |
553 | cstrlen = len(cstring) | |
554 | if not value.scope: | |
555 | return b"\x0D" + name + _PACK_INT(cstrlen) + cstring | |
556 | scope = _dict_to_bson(value.scope, False, opts, False) | |
557 | full_length = _PACK_INT(8 + cstrlen + len(scope)) | |
558 | return b"\x0F" + name + full_length + _PACK_INT(cstrlen) + cstring + scope | |
559 | ||
560 | ||
561 | def _encode_int(name, value, dummy0, dummy1): | |
562 | """Encode a python int.""" | |
563 | if -2147483648 <= value <= 2147483647: | |
564 | return b"\x10" + name + _PACK_INT(value) | |
565 | else: | |
566 | try: | |
567 | return b"\x12" + name + _PACK_LONG(value) | |
568 | except struct.error: | |
569 | raise OverflowError("BSON can only handle up to 8-byte ints") | |
570 | ||
571 | ||
572 | def _encode_timestamp(name, value, dummy0, dummy1): | |
573 | """Encode bson.timestamp.Timestamp.""" | |
574 | return b"\x11" + name + _PACK_TIMESTAMP(value.inc, value.time) | |
575 | ||
576 | ||
577 | def _encode_long(name, value, dummy0, dummy1): | |
578 | """Encode a python long (python 2.x)""" | |
579 | try: | |
580 | return b"\x12" + name + _PACK_LONG(value) | |
581 | except struct.error: | |
582 | raise OverflowError("BSON can only handle up to 8-byte ints") | |
583 | ||
584 | ||
585 | def _encode_minkey(name, dummy0, dummy1, dummy2): | |
586 | """Encode bson.min_key.MinKey.""" | |
587 | return b"\xFF" + name | |
588 | ||
589 | ||
590 | def _encode_maxkey(name, dummy0, dummy1, dummy2): | |
591 | """Encode bson.max_key.MaxKey.""" | |
592 | return b"\x7F" + name | |
593 | ||
594 | ||
595 | # Each encoder function's signature is: | |
596 | # - name: utf-8 bytes | |
597 | # - value: a Python data type, e.g. a Python int for _encode_int | |
598 | # - check_keys: bool, whether to check for invalid names | |
599 | # - opts: a CodecOptions | |
600 | _ENCODERS = { | |
601 | bool: _encode_bool, | |
602 | bytes: _encode_bytes, | |
603 | datetime.datetime: _encode_datetime, | |
604 | dict: _encode_mapping, | |
605 | float: _encode_float, | |
606 | int: _encode_int, | |
607 | list: _encode_list, | |
608 | # unicode in py2, str in py3 | |
609 | text_type: _encode_text, | |
610 | tuple: _encode_list, | |
611 | type(None): _encode_none, | |
612 | uuid.UUID: _encode_uuid, | |
613 | Binary: _encode_binary, | |
614 | Int64: _encode_long, | |
615 | Code: _encode_code, | |
616 | DBRef: _encode_dbref, | |
617 | MaxKey: _encode_maxkey, | |
618 | MinKey: _encode_minkey, | |
619 | ObjectId: _encode_objectid, | |
620 | Regex: _encode_regex, | |
621 | RE_TYPE: _encode_regex, | |
622 | SON: _encode_mapping, | |
623 | Timestamp: _encode_timestamp, | |
624 | UUIDLegacy: _encode_binary, | |
625 | # Special case. This will never be looked up directly. | |
626 | collections.Mapping: _encode_mapping, | |
627 | } | |
628 | ||
629 | ||
630 | _MARKERS = { | |
631 | 5: _encode_binary, | |
632 | 7: _encode_objectid, | |
633 | 11: _encode_regex, | |
634 | 13: _encode_code, | |
635 | 17: _encode_timestamp, | |
636 | 18: _encode_long, | |
637 | 100: _encode_dbref, | |
638 | 127: _encode_maxkey, | |
639 | 255: _encode_minkey, | |
640 | } | |
641 | ||
642 | if not PY3: | |
643 | _ENCODERS[long] = _encode_long | |
644 | ||
645 | ||
646 | def _name_value_to_bson(name, value, check_keys, opts): | |
647 | """Encode a single name, value pair.""" | |
648 | ||
649 | # First see if the type is already cached. KeyError will only ever | |
650 | # happen once per subtype. | |
651 | try: | |
652 | return _ENCODERS[type(value)](name, value, check_keys, opts) | |
653 | except KeyError: | |
654 | pass | |
655 | ||
656 | # Second, fall back to trying _type_marker. This has to be done | |
657 | # before the loop below since users could subclass one of our | |
658 | # custom types that subclasses a python built-in (e.g. Binary) | |
659 | marker = getattr(value, "_type_marker", None) | |
660 | if isinstance(marker, int) and marker in _MARKERS: | |
661 | func = _MARKERS[marker] | |
662 | # Cache this type for faster subsequent lookup. | |
663 | _ENCODERS[type(value)] = func | |
664 | return func(name, value, check_keys, opts) | |
665 | ||
666 | # If all else fails test each base type. This will only happen once for | |
667 | # a subtype of a supported base type. | |
668 | for base in _ENCODERS: | |
669 | if isinstance(value, base): | |
670 | func = _ENCODERS[base] | |
671 | # Cache this type for faster subsequent lookup. | |
672 | _ENCODERS[type(value)] = func | |
673 | return func(name, value, check_keys, opts) | |
674 | ||
675 | raise InvalidDocument("cannot convert value of type %s to bson" % | |
676 | type(value)) | |
677 | ||
678 | ||
679 | def _element_to_bson(key, value, check_keys, opts): | |
680 | """Encode a single key, value pair.""" | |
681 | if not isinstance(key, string_type): | |
682 | raise InvalidDocument("documents must have only string keys, " | |
683 | "key was %r" % (key,)) | |
684 | if check_keys: | |
685 | if key.startswith("$"): | |
686 | raise InvalidDocument("key %r must not start with '$'" % (key,)) | |
687 | if "." in key: | |
688 | raise InvalidDocument("key %r must not contain '.'" % (key,)) | |
689 | ||
690 | name = _make_name(key) | |
691 | return _name_value_to_bson(name, value, check_keys, opts) | |
692 | ||
693 | ||
694 | def _dict_to_bson(doc, check_keys, opts, top_level=True): | |
695 | """Encode a document to BSON.""" | |
696 | try: | |
697 | elements = [] | |
698 | if top_level and "_id" in doc: | |
699 | elements.append(_name_value_to_bson(b"_id\x00", doc["_id"], | |
700 | check_keys, opts)) | |
701 | for (key, value) in iteritems(doc): | |
702 | if not top_level or key != "_id": | |
703 | elements.append(_element_to_bson(key, value, | |
704 | check_keys, opts)) | |
705 | except AttributeError: | |
706 | raise TypeError("encoder expected a mapping type but got: %r" % (doc,)) | |
707 | ||
708 | encoded = b"".join(elements) | |
709 | return _PACK_INT(len(encoded) + 5) + encoded + b"\x00" | |
710 | if _USE_C: | |
711 | _dict_to_bson = _cbson._dict_to_bson | |
712 | ||
713 | ||
714 | _CODEC_OPTIONS_TYPE_ERROR = TypeError( | |
715 | "codec_options must be an instance of CodecOptions") | |
716 | ||
717 | ||
718 | def decode_all(data, codec_options=DEFAULT_CODEC_OPTIONS): | |
719 | """Decode BSON data to multiple documents. | |
720 | ||
721 | `data` must be a string of concatenated, valid, BSON-encoded | |
722 | documents. | |
723 | ||
724 | :Parameters: | |
725 | - `data`: BSON data | |
726 | - `codec_options` (optional): An instance of | |
727 | :class:`~bson.codec_options.CodecOptions`. | |
728 | ||
729 | .. versionchanged:: 3.0 | |
730 | Removed `compile_re` option: PyMongo now always represents BSON regular | |
731 | expressions as :class:`~bson.regex.Regex` objects. Use | |
732 | :meth:`~bson.regex.Regex.try_compile` to attempt to convert from a | |
733 | BSON regular expression to a Python regular expression object. | |
734 | ||
735 | Replaced `as_class`, `tz_aware`, and `uuid_subtype` options with | |
736 | `codec_options`. | |
737 | ||
738 | .. versionchanged:: 2.7 | |
739 | Added `compile_re` option. If set to False, PyMongo represented BSON | |
740 | regular expressions as :class:`~bson.regex.Regex` objects instead of | |
741 | attempting to compile BSON regular expressions as Python native | |
742 | regular expressions, thus preventing errors for some incompatible | |
743 | patterns, see `PYTHON-500`_. | |
744 | ||
745 | .. _PYTHON-500: https://jira.mongodb.org/browse/PYTHON-500 | |
746 | """ | |
747 | if not isinstance(codec_options, CodecOptions): | |
748 | raise _CODEC_OPTIONS_TYPE_ERROR | |
749 | ||
750 | docs = [] | |
751 | position = 0 | |
752 | end = len(data) - 1 | |
753 | try: | |
754 | while position < end: | |
755 | obj_size = _UNPACK_INT(data[position:position + 4])[0] | |
756 | if len(data) - position < obj_size: | |
757 | raise InvalidBSON("invalid object size") | |
758 | obj_end = position + obj_size - 1 | |
759 | if data[obj_end:position + obj_size] != b"\x00": | |
760 | raise InvalidBSON("bad eoo") | |
761 | docs.append(_elements_to_dict(data, | |
762 | position + 4, | |
763 | obj_end, | |
764 | codec_options)) | |
765 | position += obj_size | |
766 | return docs | |
767 | except InvalidBSON: | |
768 | raise | |
769 | except Exception: | |
770 | # Change exception type to InvalidBSON but preserve traceback. | |
771 | _, exc_value, exc_tb = sys.exc_info() | |
772 | reraise(InvalidBSON, exc_value, exc_tb) | |
773 | ||
774 | ||
775 | if _USE_C: | |
776 | decode_all = _cbson.decode_all | |
777 | ||
778 | ||
779 | def decode_iter(data, codec_options=DEFAULT_CODEC_OPTIONS): | |
780 | """Decode BSON data to multiple documents as a generator. | |
781 | ||
782 | Works similarly to the decode_all function, but yields one document at a | |
783 | time. | |
784 | ||
785 | `data` must be a string of concatenated, valid, BSON-encoded | |
786 | documents. | |
787 | ||
788 | :Parameters: | |
789 | - `data`: BSON data | |
790 | - `codec_options` (optional): An instance of | |
791 | :class:`~bson.codec_options.CodecOptions`. | |
792 | ||
793 | .. versionchanged:: 3.0 | |
794 | Replaced `as_class`, `tz_aware`, and `uuid_subtype` options with | |
795 | `codec_options`. | |
796 | ||
797 | .. versionadded:: 2.8 | |
798 | """ | |
799 | if not isinstance(codec_options, CodecOptions): | |
800 | raise _CODEC_OPTIONS_TYPE_ERROR | |
801 | ||
802 | position = 0 | |
803 | end = len(data) - 1 | |
804 | while position < end: | |
805 | obj_size = _UNPACK_INT(data[position:position + 4])[0] | |
806 | elements = data[position:position + obj_size] | |
807 | position += obj_size | |
808 | ||
809 | yield _bson_to_dict(elements, codec_options) | |
810 | ||
811 | ||
812 | def decode_file_iter(file_obj, codec_options=DEFAULT_CODEC_OPTIONS): | |
813 | """Decode bson data from a file to multiple documents as a generator. | |
814 | ||
815 | Works similarly to the decode_all function, but reads from the file object | |
816 | in chunks and parses bson in chunks, yielding one document at a time. | |
817 | ||
818 | :Parameters: | |
819 | - `file_obj`: A file object containing BSON data. | |
820 | - `codec_options` (optional): An instance of | |
821 | :class:`~bson.codec_options.CodecOptions`. | |
822 | ||
823 | .. versionchanged:: 3.0 | |
824 | Replaced `as_class`, `tz_aware`, and `uuid_subtype` options with | |
825 | `codec_options`. | |
826 | ||
827 | .. versionadded:: 2.8 | |
828 | """ | |
829 | while True: | |
830 | # Read size of next object. | |
831 | size_data = file_obj.read(4) | |
832 | if len(size_data) == 0: | |
833 | break # Finished with file normaly. | |
834 | elif len(size_data) != 4: | |
835 | raise InvalidBSON("cut off in middle of objsize") | |
836 | obj_size = _UNPACK_INT(size_data)[0] - 4 | |
837 | elements = size_data + file_obj.read(obj_size) | |
838 | yield _bson_to_dict(elements, codec_options) | |
839 | ||
840 | ||
841 | def is_valid(bson): | |
842 | """Check that the given string represents valid :class:`BSON` data. | |
843 | ||
844 | Raises :class:`TypeError` if `bson` is not an instance of | |
845 | :class:`str` (:class:`bytes` in python 3). Returns ``True`` | |
846 | if `bson` is valid :class:`BSON`, ``False`` otherwise. | |
847 | ||
848 | :Parameters: | |
849 | - `bson`: the data to be validated | |
850 | """ | |
851 | if not isinstance(bson, bytes): | |
852 | raise TypeError("BSON data must be an instance of a subclass of bytes") | |
853 | ||
854 | try: | |
855 | _bson_to_dict(bson, DEFAULT_CODEC_OPTIONS) | |
856 | return True | |
857 | except Exception: | |
858 | return False | |
859 | ||
860 | ||
861 | class BSON(bytes): | |
862 | """BSON (Binary JSON) data. | |
863 | """ | |
864 | ||
865 | @classmethod | |
866 | def encode(cls, document, check_keys=False, | |
867 | codec_options=DEFAULT_CODEC_OPTIONS): | |
868 | """Encode a document to a new :class:`BSON` instance. | |
869 | ||
870 | A document can be any mapping type (like :class:`dict`). | |
871 | ||
872 | Raises :class:`TypeError` if `document` is not a mapping type, | |
873 | or contains keys that are not instances of | |
874 | :class:`basestring` (:class:`str` in python 3). Raises | |
875 | :class:`~bson.errors.InvalidDocument` if `document` cannot be | |
876 | converted to :class:`BSON`. | |
877 | ||
878 | :Parameters: | |
879 | - `document`: mapping type representing a document | |
880 | - `check_keys` (optional): check if keys start with '$' or | |
881 | contain '.', raising :class:`~bson.errors.InvalidDocument` in | |
882 | either case | |
883 | - `codec_options` (optional): An instance of | |
884 | :class:`~bson.codec_options.CodecOptions`. | |
885 | ||
886 | .. versionchanged:: 3.0 | |
887 | Replaced `uuid_subtype` option with `codec_options`. | |
888 | """ | |
889 | if not isinstance(codec_options, CodecOptions): | |
890 | raise _CODEC_OPTIONS_TYPE_ERROR | |
891 | ||
892 | return cls(_dict_to_bson(document, check_keys, codec_options)) | |
893 | ||
894 | def decode(self, codec_options=DEFAULT_CODEC_OPTIONS): | |
895 | """Decode this BSON data. | |
896 | ||
897 | By default, returns a BSON document represented as a Python | |
898 | :class:`dict`. To use a different :class:`MutableMapping` class, | |
899 | configure a :class:`~bson.codec_options.CodecOptions`:: | |
900 | ||
901 | >>> import collections # From Python standard library. | |
902 | >>> import bson | |
903 | >>> from mockupdb._bson.codec_options import CodecOptions | |
904 | >>> data = bson.BSON.encode({'a': 1}) | |
905 | >>> decoded_doc = bson.BSON.decode(data) | |
906 | <type 'dict'> | |
907 | >>> options = CodecOptions(document_class=collections.OrderedDict) | |
908 | >>> decoded_doc = bson.BSON.decode(data, codec_options=options) | |
909 | >>> type(decoded_doc) | |
910 | <class 'collections.OrderedDict'> | |
911 | ||
912 | :Parameters: | |
913 | - `codec_options` (optional): An instance of | |
914 | :class:`~bson.codec_options.CodecOptions`. | |
915 | ||
916 | .. versionchanged:: 3.0 | |
917 | Removed `compile_re` option: PyMongo now always represents BSON | |
918 | regular expressions as :class:`~bson.regex.Regex` objects. Use | |
919 | :meth:`~bson.regex.Regex.try_compile` to attempt to convert from a | |
920 | BSON regular expression to a Python regular expression object. | |
921 | ||
922 | Replaced `as_class`, `tz_aware`, and `uuid_subtype` options with | |
923 | `codec_options`. | |
924 | ||
925 | .. versionchanged:: 2.7 | |
926 | Added `compile_re` option. If set to False, PyMongo represented BSON | |
927 | regular expressions as :class:`~bson.regex.Regex` objects instead of | |
928 | attempting to compile BSON regular expressions as Python native | |
929 | regular expressions, thus preventing errors for some incompatible | |
930 | patterns, see `PYTHON-500`_. | |
931 | ||
932 | .. _PYTHON-500: https://jira.mongodb.org/browse/PYTHON-500 | |
933 | """ | |
934 | if not isinstance(codec_options, CodecOptions): | |
935 | raise _CODEC_OPTIONS_TYPE_ERROR | |
936 | ||
937 | return _bson_to_dict(self, codec_options) | |
938 | ||
939 | ||
940 | def has_c(): | |
941 | """Is the C extension installed? | |
942 | """ | |
943 | return _USE_C |
0 | # Copyright 2009-2015 MongoDB, Inc. | |
1 | # | |
2 | # Licensed under the Apache License, Version 2.0 (the "License"); | |
3 | # you may not use this file except in compliance with the License. | |
4 | # You may obtain a copy of the License at | |
5 | # | |
6 | # http://www.apache.org/licenses/LICENSE-2.0 | |
7 | # | |
8 | # Unless required by applicable law or agreed to in writing, software | |
9 | # distributed under the License is distributed on an "AS IS" BASIS, | |
10 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
11 | # See the License for the specific language governing permissions and | |
12 | # limitations under the License. | |
13 | ||
14 | from uuid import UUID | |
15 | ||
16 | from mockupdb._bson.py3compat import PY3 | |
17 | ||
18 | """Tools for representing BSON binary data. | |
19 | """ | |
20 | ||
21 | BINARY_SUBTYPE = 0 | |
22 | """BSON binary subtype for binary data. | |
23 | ||
24 | This is the default subtype for binary data. | |
25 | """ | |
26 | ||
27 | FUNCTION_SUBTYPE = 1 | |
28 | """BSON binary subtype for functions. | |
29 | """ | |
30 | ||
31 | OLD_BINARY_SUBTYPE = 2 | |
32 | """Old BSON binary subtype for binary data. | |
33 | ||
34 | This is the old default subtype, the current | |
35 | default is :data:`BINARY_SUBTYPE`. | |
36 | """ | |
37 | ||
38 | OLD_UUID_SUBTYPE = 3 | |
39 | """Old BSON binary subtype for a UUID. | |
40 | ||
41 | :class:`uuid.UUID` instances will automatically be encoded | |
42 | by :mod:`bson` using this subtype. | |
43 | ||
44 | .. versionadded:: 2.1 | |
45 | """ | |
46 | ||
47 | UUID_SUBTYPE = 4 | |
48 | """BSON binary subtype for a UUID. | |
49 | ||
50 | This is the new BSON binary subtype for UUIDs. The | |
51 | current default is :data:`OLD_UUID_SUBTYPE` but will | |
52 | change to this in a future release. | |
53 | ||
54 | .. versionchanged:: 2.1 | |
55 | Changed to subtype 4. | |
56 | """ | |
57 | ||
58 | STANDARD = UUID_SUBTYPE | |
59 | """The standard UUID representation. | |
60 | ||
61 | :class:`uuid.UUID` instances will automatically be encoded to | |
62 | and decoded from mockupdb._bson binary, using RFC-4122 byte order with | |
63 | binary subtype :data:`UUID_SUBTYPE`. | |
64 | ||
65 | .. versionadded:: 3.0 | |
66 | """ | |
67 | ||
68 | PYTHON_LEGACY = OLD_UUID_SUBTYPE | |
69 | """The Python legacy UUID representation. | |
70 | ||
71 | :class:`uuid.UUID` instances will automatically be encoded to | |
72 | and decoded from mockupdb._bson binary, using RFC-4122 byte order with | |
73 | binary subtype :data:`OLD_UUID_SUBTYPE`. | |
74 | ||
75 | .. versionadded:: 3.0 | |
76 | """ | |
77 | ||
78 | JAVA_LEGACY = 5 | |
79 | """The Java legacy UUID representation. | |
80 | ||
81 | :class:`uuid.UUID` instances will automatically be encoded to | |
82 | and decoded from mockupdb._bson binary, using the Java driver's legacy | |
83 | byte order with binary subtype :data:`OLD_UUID_SUBTYPE`. | |
84 | ||
85 | .. versionadded:: 2.3 | |
86 | """ | |
87 | ||
88 | CSHARP_LEGACY = 6 | |
89 | """The C#/.net legacy UUID representation. | |
90 | ||
91 | :class:`uuid.UUID` instances will automatically be encoded to | |
92 | and decoded from mockupdb._bson binary, using the C# driver's legacy | |
93 | byte order and binary subtype :data:`OLD_UUID_SUBTYPE`. | |
94 | ||
95 | .. versionadded:: 2.3 | |
96 | """ | |
97 | ||
98 | ALL_UUID_SUBTYPES = (OLD_UUID_SUBTYPE, UUID_SUBTYPE) | |
99 | ALL_UUID_REPRESENTATIONS = (STANDARD, PYTHON_LEGACY, JAVA_LEGACY, CSHARP_LEGACY) | |
100 | UUID_REPRESENTATION_NAMES = { | |
101 | PYTHON_LEGACY: 'PYTHON_LEGACY', | |
102 | STANDARD: 'STANDARD', | |
103 | JAVA_LEGACY: 'JAVA_LEGACY', | |
104 | CSHARP_LEGACY: 'CSHARP_LEGACY'} | |
105 | ||
106 | MD5_SUBTYPE = 5 | |
107 | """BSON binary subtype for an MD5 hash. | |
108 | """ | |
109 | ||
110 | USER_DEFINED_SUBTYPE = 128 | |
111 | """BSON binary subtype for any user defined structure. | |
112 | """ | |
113 | ||
114 | ||
115 | class Binary(bytes): | |
116 | """Representation of BSON binary data. | |
117 | ||
118 | This is necessary because we want to represent Python strings as | |
119 | the BSON string type. We need to wrap binary data so we can tell | |
120 | the difference between what should be considered binary data and | |
121 | what should be considered a string when we encode to BSON. | |
122 | ||
123 | Raises TypeError if `data` is not an instance of :class:`str` | |
124 | (:class:`bytes` in python 3) or `subtype` is not an instance of | |
125 | :class:`int`. Raises ValueError if `subtype` is not in [0, 256). | |
126 | ||
127 | .. note:: | |
128 | In python 3 instances of Binary with subtype 0 will be decoded | |
129 | directly to :class:`bytes`. | |
130 | ||
131 | :Parameters: | |
132 | - `data`: the binary data to represent | |
133 | - `subtype` (optional): the `binary subtype | |
134 | <http://bsonspec.org/#/specification>`_ | |
135 | to use | |
136 | """ | |
137 | ||
138 | _type_marker = 5 | |
139 | ||
140 | def __new__(cls, data, subtype=BINARY_SUBTYPE): | |
141 | if not isinstance(data, bytes): | |
142 | raise TypeError("data must be an instance of bytes") | |
143 | if not isinstance(subtype, int): | |
144 | raise TypeError("subtype must be an instance of int") | |
145 | if subtype >= 256 or subtype < 0: | |
146 | raise ValueError("subtype must be contained in [0, 256)") | |
147 | self = bytes.__new__(cls, data) | |
148 | self.__subtype = subtype | |
149 | return self | |
150 | ||
151 | @property | |
152 | def subtype(self): | |
153 | """Subtype of this binary data. | |
154 | """ | |
155 | return self.__subtype | |
156 | ||
157 | def __getnewargs__(self): | |
158 | # Work around http://bugs.python.org/issue7382 | |
159 | data = super(Binary, self).__getnewargs__()[0] | |
160 | if PY3 and not isinstance(data, bytes): | |
161 | data = data.encode('latin-1') | |
162 | return data, self.__subtype | |
163 | ||
164 | def __eq__(self, other): | |
165 | if isinstance(other, Binary): | |
166 | return ((self.__subtype, bytes(self)) == | |
167 | (other.subtype, bytes(other))) | |
168 | # We don't return NotImplemented here because if we did then | |
169 | # Binary("foo") == "foo" would return True, since Binary is a | |
170 | # subclass of str... | |
171 | return False | |
172 | ||
173 | def __hash__(self): | |
174 | return super(Binary, self).__hash__() ^ hash(self.__subtype) | |
175 | ||
176 | def __ne__(self, other): | |
177 | return not self == other | |
178 | ||
179 | def __repr__(self): | |
180 | return "Binary(%s, %s)" % (bytes.__repr__(self), self.__subtype) | |
181 | ||
182 | ||
183 | class UUIDLegacy(Binary): | |
184 | """UUID wrapper to support working with UUIDs stored as PYTHON_LEGACY. | |
185 | ||
186 | .. doctest:: | |
187 | ||
188 | >>> import uuid | |
189 | >>> from mockupdb._bson.binary import Binary, UUIDLegacy, STANDARD | |
190 | >>> from mockupdb._bson.codec_options import CodecOptions | |
191 | >>> my_uuid = uuid.uuid4() | |
192 | >>> coll = db.get_collection('test', | |
193 | ... CodecOptions(uuid_representation=STANDARD)) | |
194 | >>> coll.insert_one({'uuid': Binary(my_uuid.bytes, 3)}).inserted_id | |
195 | ObjectId('...') | |
196 | >>> coll.find({'uuid': my_uuid}).count() | |
197 | 0 | |
198 | >>> coll.find({'uuid': UUIDLegacy(my_uuid)}).count() | |
199 | 1 | |
200 | >>> coll.find({'uuid': UUIDLegacy(my_uuid)})[0]['uuid'] | |
201 | UUID('...') | |
202 | >>> | |
203 | >>> # Convert from subtype 3 to subtype 4 | |
204 | >>> doc = coll.find_one({'uuid': UUIDLegacy(my_uuid)}) | |
205 | >>> coll.replace_one({"_id": doc["_id"]}, doc).matched_count | |
206 | 1 | |
207 | >>> coll.find({'uuid': UUIDLegacy(my_uuid)}).count() | |
208 | 0 | |
209 | >>> coll.find({'uuid': {'$in': [UUIDLegacy(my_uuid), my_uuid]}}).count() | |
210 | 1 | |
211 | >>> coll.find_one({'uuid': my_uuid})['uuid'] | |
212 | UUID('...') | |
213 | ||
214 | Raises TypeError if `obj` is not an instance of :class:`~uuid.UUID`. | |
215 | ||
216 | :Parameters: | |
217 | - `obj`: An instance of :class:`~uuid.UUID`. | |
218 | """ | |
219 | ||
220 | def __new__(cls, obj): | |
221 | if not isinstance(obj, UUID): | |
222 | raise TypeError("obj must be an instance of uuid.UUID") | |
223 | self = Binary.__new__(cls, obj.bytes, OLD_UUID_SUBTYPE) | |
224 | self.__uuid = obj | |
225 | return self | |
226 | ||
227 | def __getnewargs__(self): | |
228 | # Support copy and deepcopy | |
229 | return (self.__uuid,) | |
230 | ||
231 | @property | |
232 | def uuid(self): | |
233 | """UUID instance wrapped by this UUIDLegacy instance. | |
234 | """ | |
235 | return self.__uuid | |
236 | ||
237 | def __repr__(self): | |
238 | return "UUIDLegacy('%s')" % self.__uuid |
0 | # Copyright 2009-2015 MongoDB, Inc. | |
1 | # | |
2 | # Licensed under the Apache License, Version 2.0 (the "License"); | |
3 | # you may not use this file except in compliance with the License. | |
4 | # You may obtain a copy of the License at | |
5 | # | |
6 | # http://www.apache.org/licenses/LICENSE-2.0 | |
7 | # | |
8 | # Unless required by applicable law or agreed to in writing, software | |
9 | # distributed under the License is distributed on an "AS IS" BASIS, | |
10 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
11 | # See the License for the specific language governing permissions and | |
12 | # limitations under the License. | |
13 | ||
14 | """Tools for representing JavaScript code in BSON. | |
15 | """ | |
16 | import collections | |
17 | ||
18 | from mockupdb._bson.py3compat import string_type | |
19 | ||
20 | ||
21 | class Code(str): | |
22 | """BSON's JavaScript code type. | |
23 | ||
24 | Raises :class:`TypeError` if `code` is not an instance of | |
25 | :class:`basestring` (:class:`str` in python 3) or `scope` | |
26 | is not ``None`` or an instance of :class:`dict`. | |
27 | ||
28 | Scope variables can be set by passing a dictionary as the `scope` | |
29 | argument or by using keyword arguments. If a variable is set as a | |
30 | keyword argument it will override any setting for that variable in | |
31 | the `scope` dictionary. | |
32 | ||
33 | :Parameters: | |
34 | - `code`: string containing JavaScript code to be evaluated | |
35 | - `scope` (optional): dictionary representing the scope in which | |
36 | `code` should be evaluated - a mapping from identifiers (as | |
37 | strings) to values | |
38 | - `**kwargs` (optional): scope variables can also be passed as | |
39 | keyword arguments | |
40 | """ | |
41 | ||
42 | _type_marker = 13 | |
43 | ||
44 | def __new__(cls, code, scope=None, **kwargs): | |
45 | if not isinstance(code, string_type): | |
46 | raise TypeError("code must be an " | |
47 | "instance of %s" % (string_type.__name__)) | |
48 | ||
49 | self = str.__new__(cls, code) | |
50 | ||
51 | try: | |
52 | self.__scope = code.scope | |
53 | except AttributeError: | |
54 | self.__scope = {} | |
55 | ||
56 | if scope is not None: | |
57 | if not isinstance(scope, collections.Mapping): | |
58 | raise TypeError("scope must be an instance of dict") | |
59 | self.__scope.update(scope) | |
60 | ||
61 | self.__scope.update(kwargs) | |
62 | ||
63 | return self | |
64 | ||
65 | @property | |
66 | def scope(self): | |
67 | """Scope dictionary for this instance. | |
68 | """ | |
69 | return self.__scope | |
70 | ||
71 | def __repr__(self): | |
72 | return "Code(%s, %r)" % (str.__repr__(self), self.__scope) | |
73 | ||
74 | def __eq__(self, other): | |
75 | if isinstance(other, Code): | |
76 | return (self.__scope, str(self)) == (other.__scope, str(other)) | |
77 | return False | |
78 | ||
79 | __hash__ = None | |
80 | ||
81 | def __ne__(self, other): | |
82 | return not self == other |
0 | # Copyright 2014-2015 MongoDB, Inc. | |
1 | # | |
2 | # Licensed under the Apache License, Version 2.0 (the "License"); | |
3 | # you may not use this file except in compliance with the License. | |
4 | # You may obtain a copy of the License at | |
5 | # | |
6 | # http://www.apache.org/licenses/LICENSE-2.0 | |
7 | # | |
8 | # Unless required by applicable law or agreed to in writing, software | |
9 | # distributed under the License is distributed on an "AS IS" BASIS, | |
10 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
11 | # See the License for the specific language governing permissions and | |
12 | # limitations under the License. | |
13 | ||
14 | """Tools for specifying BSON codec options.""" | |
15 | ||
16 | import datetime | |
17 | ||
18 | from collections import MutableMapping, namedtuple | |
19 | ||
20 | from mockupdb._bson.py3compat import string_type | |
21 | from mockupdb._bson.binary import (ALL_UUID_REPRESENTATIONS, | |
22 | PYTHON_LEGACY, | |
23 | UUID_REPRESENTATION_NAMES) | |
24 | ||
25 | ||
26 | _options_base = namedtuple( | |
27 | 'CodecOptions', | |
28 | ('document_class', 'tz_aware', 'uuid_representation', | |
29 | 'unicode_decode_error_handler', 'tzinfo')) | |
30 | ||
31 | ||
32 | class CodecOptions(_options_base): | |
33 | """Encapsulates BSON options used in CRUD operations. | |
34 | ||
35 | :Parameters: | |
36 | - `document_class`: BSON documents returned in queries will be decoded | |
37 | to an instance of this class. Must be a subclass of | |
38 | :class:`~collections.MutableMapping`. Defaults to :class:`dict`. | |
39 | - `tz_aware`: If ``True``, BSON datetimes will be decoded to timezone | |
40 | aware instances of :class:`~datetime.datetime`. Otherwise they will be | |
41 | naive. Defaults to ``False``. | |
42 | - `uuid_representation`: The BSON representation to use when encoding | |
43 | and decoding instances of :class:`~uuid.UUID`. Defaults to | |
44 | :data:`~bson.binary.PYTHON_LEGACY`. | |
45 | - `unicode_decode_error_handler`: The error handler to use when decoding | |
46 | an invalid BSON string. Valid options include 'strict', 'replace', and | |
47 | 'ignore'. Defaults to 'strict'. | |
48 | ||
49 | .. warning:: Care must be taken when changing | |
50 | `unicode_decode_error_handler` from its default value ('strict'). | |
51 | The 'replace' and 'ignore' modes should not be used when documents | |
52 | retrieved from the server will be modified in the client application | |
53 | and stored back to the server. | |
54 | ||
55 | - `tzinfo`: A :class:`~datetime.tzinfo` subclass that specifies the | |
56 | timezone to/from which :class:`~datetime.datetime` objects should be | |
57 | encoded/decoded. | |
58 | ||
59 | """ | |
60 | ||
61 | def __new__(cls, document_class=dict, | |
62 | tz_aware=False, uuid_representation=PYTHON_LEGACY, | |
63 | unicode_decode_error_handler="strict", | |
64 | tzinfo=None): | |
65 | if not issubclass(document_class, MutableMapping): | |
66 | raise TypeError("document_class must be dict, bson.son.SON, or " | |
67 | "another subclass of collections.MutableMapping") | |
68 | if not isinstance(tz_aware, bool): | |
69 | raise TypeError("tz_aware must be True or False") | |
70 | if uuid_representation not in ALL_UUID_REPRESENTATIONS: | |
71 | raise ValueError("uuid_representation must be a value " | |
72 | "from mockupdb._bson.binary.ALL_UUID_REPRESENTATIONS") | |
73 | if not isinstance(unicode_decode_error_handler, (string_type, None)): | |
74 | raise ValueError("unicode_decode_error_handler must be a string " | |
75 | "or None") | |
76 | if tzinfo is not None: | |
77 | if not isinstance(tzinfo, datetime.tzinfo): | |
78 | raise TypeError( | |
79 | "tzinfo must be an instance of datetime.tzinfo") | |
80 | if not tz_aware: | |
81 | raise ValueError( | |
82 | "cannot specify tzinfo without also setting tz_aware=True") | |
83 | ||
84 | return tuple.__new__( | |
85 | cls, (document_class, tz_aware, uuid_representation, | |
86 | unicode_decode_error_handler, tzinfo)) | |
87 | ||
88 | def __repr__(self): | |
89 | document_class_repr = ( | |
90 | 'dict' if self.document_class is dict | |
91 | else repr(self.document_class)) | |
92 | ||
93 | uuid_rep_repr = UUID_REPRESENTATION_NAMES.get(self.uuid_representation, | |
94 | self.uuid_representation) | |
95 | ||
96 | return ( | |
97 | 'CodecOptions(document_class=%s, tz_aware=%r, uuid_representation=' | |
98 | '%s, unicode_decode_error_handler=%r, tzinfo=%r)' % | |
99 | (document_class_repr, self.tz_aware, uuid_rep_repr, | |
100 | self.unicode_decode_error_handler, | |
101 | self.tzinfo)) | |
102 | ||
103 | ||
104 | DEFAULT_CODEC_OPTIONS = CodecOptions() | |
105 | ||
106 | ||
107 | def _parse_codec_options(options): | |
108 | """Parse BSON codec options.""" | |
109 | return CodecOptions( | |
110 | document_class=options.get( | |
111 | 'document_class', DEFAULT_CODEC_OPTIONS.document_class), | |
112 | tz_aware=options.get( | |
113 | 'tz_aware', DEFAULT_CODEC_OPTIONS.tz_aware), | |
114 | uuid_representation=options.get( | |
115 | 'uuidrepresentation', DEFAULT_CODEC_OPTIONS.uuid_representation), | |
116 | unicode_decode_error_handler=options.get( | |
117 | 'unicode_decode_error_handler', | |
118 | DEFAULT_CODEC_OPTIONS.unicode_decode_error_handler), | |
119 | tzinfo=options.get('tzinfo', DEFAULT_CODEC_OPTIONS.tzinfo)) |
0 | # Copyright 2009-2015 MongoDB, Inc. | |
1 | # | |
2 | # Licensed under the Apache License, Version 2.0 (the "License"); | |
3 | # you may not use this file except in compliance with the License. | |
4 | # You may obtain a copy of the License at | |
5 | # | |
6 | # http://www.apache.org/licenses/LICENSE-2.0 | |
7 | # | |
8 | # Unless required by applicable law or agreed to in writing, software | |
9 | # distributed under the License is distributed on an "AS IS" BASIS, | |
10 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
11 | # See the License for the specific language governing permissions and | |
12 | # limitations under the License. | |
13 | ||
14 | """Tools for manipulating DBRefs (references to MongoDB documents).""" | |
15 | ||
16 | from copy import deepcopy | |
17 | ||
18 | from mockupdb._bson.py3compat import iteritems, string_type | |
19 | from mockupdb._bson.son import SON | |
20 | ||
21 | ||
22 | class DBRef(object): | |
23 | """A reference to a document stored in MongoDB. | |
24 | """ | |
25 | ||
26 | # DBRef isn't actually a BSON "type" so this number was arbitrarily chosen. | |
27 | _type_marker = 100 | |
28 | ||
29 | def __init__(self, collection, id, database=None, _extra={}, **kwargs): | |
30 | """Initialize a new :class:`DBRef`. | |
31 | ||
32 | Raises :class:`TypeError` if `collection` or `database` is not | |
33 | an instance of :class:`basestring` (:class:`str` in python 3). | |
34 | `database` is optional and allows references to documents to work | |
35 | across databases. Any additional keyword arguments will create | |
36 | additional fields in the resultant embedded document. | |
37 | ||
38 | :Parameters: | |
39 | - `collection`: name of the collection the document is stored in | |
40 | - `id`: the value of the document's ``"_id"`` field | |
41 | - `database` (optional): name of the database to reference | |
42 | - `**kwargs` (optional): additional keyword arguments will | |
43 | create additional, custom fields | |
44 | ||
45 | .. mongodoc:: dbrefs | |
46 | """ | |
47 | if not isinstance(collection, string_type): | |
48 | raise TypeError("collection must be an " | |
49 | "instance of %s" % string_type.__name__) | |
50 | if database is not None and not isinstance(database, string_type): | |
51 | raise TypeError("database must be an " | |
52 | "instance of %s" % string_type.__name__) | |
53 | ||
54 | self.__collection = collection | |
55 | self.__id = id | |
56 | self.__database = database | |
57 | kwargs.update(_extra) | |
58 | self.__kwargs = kwargs | |
59 | ||
60 | @property | |
61 | def collection(self): | |
62 | """Get the name of this DBRef's collection as unicode. | |
63 | """ | |
64 | return self.__collection | |
65 | ||
66 | @property | |
67 | def id(self): | |
68 | """Get this DBRef's _id. | |
69 | """ | |
70 | return self.__id | |
71 | ||
72 | @property | |
73 | def database(self): | |
74 | """Get the name of this DBRef's database. | |
75 | ||
76 | Returns None if this DBRef doesn't specify a database. | |
77 | """ | |
78 | return self.__database | |
79 | ||
80 | def __getattr__(self, key): | |
81 | try: | |
82 | return self.__kwargs[key] | |
83 | except KeyError: | |
84 | raise AttributeError(key) | |
85 | ||
86 | # Have to provide __setstate__ to avoid | |
87 | # infinite recursion since we override | |
88 | # __getattr__. | |
89 | def __setstate__(self, state): | |
90 | self.__dict__.update(state) | |
91 | ||
92 | def as_doc(self): | |
93 | """Get the SON document representation of this DBRef. | |
94 | ||
95 | Generally not needed by application developers | |
96 | """ | |
97 | doc = SON([("$ref", self.collection), | |
98 | ("$id", self.id)]) | |
99 | if self.database is not None: | |
100 | doc["$db"] = self.database | |
101 | doc.update(self.__kwargs) | |
102 | return doc | |
103 | ||
104 | def __repr__(self): | |
105 | extra = "".join([", %s=%r" % (k, v) | |
106 | for k, v in iteritems(self.__kwargs)]) | |
107 | if self.database is None: | |
108 | return "DBRef(%r, %r%s)" % (self.collection, self.id, extra) | |
109 | return "DBRef(%r, %r, %r%s)" % (self.collection, self.id, | |
110 | self.database, extra) | |
111 | ||
112 | def __eq__(self, other): | |
113 | if isinstance(other, DBRef): | |
114 | us = (self.__database, self.__collection, | |
115 | self.__id, self.__kwargs) | |
116 | them = (other.__database, other.__collection, | |
117 | other.__id, other.__kwargs) | |
118 | return us == them | |
119 | return NotImplemented | |
120 | ||
121 | def __ne__(self, other): | |
122 | return not self == other | |
123 | ||
124 | def __hash__(self): | |
125 | """Get a hash value for this :class:`DBRef`.""" | |
126 | return hash((self.__collection, self.__id, self.__database, | |
127 | tuple(sorted(self.__kwargs.items())))) | |
128 | ||
129 | def __deepcopy__(self, memo): | |
130 | """Support function for `copy.deepcopy()`.""" | |
131 | return DBRef(deepcopy(self.__collection, memo), | |
132 | deepcopy(self.__id, memo), | |
133 | deepcopy(self.__database, memo), | |
134 | deepcopy(self.__kwargs, memo)) |
0 | # Copyright 2009-2015 MongoDB, Inc. | |
1 | # | |
2 | # Licensed under the Apache License, Version 2.0 (the "License"); | |
3 | # you may not use this file except in compliance with the License. | |
4 | # You may obtain a copy of the License at | |
5 | # | |
6 | # http://www.apache.org/licenses/LICENSE-2.0 | |
7 | # | |
8 | # Unless required by applicable law or agreed to in writing, software | |
9 | # distributed under the License is distributed on an "AS IS" BASIS, | |
10 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
11 | # See the License for the specific language governing permissions and | |
12 | # limitations under the License. | |
13 | ||
14 | """Exceptions raised by the BSON package.""" | |
15 | ||
16 | ||
17 | class BSONError(Exception): | |
18 | """Base class for all BSON exceptions. | |
19 | """ | |
20 | ||
21 | ||
22 | class InvalidBSON(BSONError): | |
23 | """Raised when trying to create a BSON object from invalid data. | |
24 | """ | |
25 | ||
26 | ||
27 | class InvalidStringData(BSONError): | |
28 | """Raised when trying to encode a string containing non-UTF8 data. | |
29 | """ | |
30 | ||
31 | ||
32 | class InvalidDocument(BSONError): | |
33 | """Raised when trying to create a BSON object from an invalid document. | |
34 | """ | |
35 | ||
36 | ||
37 | class InvalidId(BSONError): | |
38 | """Raised when trying to create an ObjectId from invalid data. | |
39 | """ |
0 | # Copyright 2014-2015 MongoDB, Inc. | |
1 | # | |
2 | # Licensed under the Apache License, Version 2.0 (the "License"); | |
3 | # you may not use this file except in compliance with the License. | |
4 | # You may obtain a copy of the License at | |
5 | # | |
6 | # http://www.apache.org/licenses/LICENSE-2.0 | |
7 | # | |
8 | # Unless required by applicable law or agreed to in writing, software | |
9 | # distributed under the License is distributed on an "AS IS" BASIS, | |
10 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
11 | # See the License for the specific language governing permissions and | |
12 | # limitations under the License. | |
13 | ||
14 | """A BSON wrapper for long (int in python3)""" | |
15 | ||
16 | from mockupdb._bson.py3compat import PY3 | |
17 | ||
18 | if PY3: | |
19 | long = int | |
20 | ||
21 | ||
22 | class Int64(long): | |
23 | """Representation of the BSON int64 type. | |
24 | ||
25 | This is necessary because every integral number is an :class:`int` in | |
26 | Python 3. Small integral numbers are encoded to BSON int32 by default, | |
27 | but Int64 numbers will always be encoded to BSON int64. | |
28 | ||
29 | :Parameters: | |
30 | - `value`: the numeric value to represent | |
31 | """ | |
32 | ||
33 | _type_marker = 18 |
0 | # Copyright 2009-2015 MongoDB, Inc. | |
1 | # | |
2 | # Licensed under the Apache License, Version 2.0 (the "License"); | |
3 | # you may not use this file except in compliance with the License. | |
4 | # You may obtain a copy of the License at | |
5 | # | |
6 | # http://www.apache.org/licenses/LICENSE-2.0 | |
7 | # | |
8 | # Unless required by applicable law or agreed to in writing, software | |
9 | # distributed under the License is distributed on an "AS IS" BASIS, | |
10 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
11 | # See the License for the specific language governing permissions and | |
12 | # limitations under the License. | |
13 | ||
14 | """Tools for using Python's :mod:`json` module with BSON documents. | |
15 | ||
16 | This module provides two helper methods `dumps` and `loads` that wrap the | |
17 | native :mod:`json` methods and provide explicit BSON conversion to and from | |
18 | json. This allows for specialized encoding and decoding of BSON documents | |
19 | into `Mongo Extended JSON | |
20 | <http://www.mongodb.org/display/DOCS/Mongo+Extended+JSON>`_'s *Strict* | |
21 | mode. This lets you encode / decode BSON documents to JSON even when | |
22 | they use special BSON types. | |
23 | ||
24 | Example usage (serialization): | |
25 | ||
26 | .. doctest:: | |
27 | ||
28 | >>> from mockupdb._bson import Binary, Code | |
29 | >>> from mockupdb._bson.json_util import dumps | |
30 | >>> dumps([{'foo': [1, 2]}, | |
31 | ... {'bar': {'hello': 'world'}}, | |
32 | ... {'code': Code("function x() { return 1; }")}, | |
33 | ... {'bin': Binary("\x01\x02\x03\x04")}]) | |
34 | '[{"foo": [1, 2]}, {"bar": {"hello": "world"}}, {"code": {"$code": "function x() { return 1; }", "$scope": {}}}, {"bin": {"$binary": "AQIDBA==", "$type": "00"}}]' | |
35 | ||
36 | Example usage (deserialization): | |
37 | ||
38 | .. doctest:: | |
39 | ||
40 | >>> from mockupdb._bson.json_util import loads | |
41 | >>> loads('[{"foo": [1, 2]}, {"bar": {"hello": "world"}}, {"code": {"$scope": {}, "$code": "function x() { return 1; }"}}, {"bin": {"$type": "00", "$binary": "AQIDBA=="}}]') | |
42 | [{u'foo': [1, 2]}, {u'bar': {u'hello': u'world'}}, {u'code': Code('function x() { return 1; }', {})}, {u'bin': Binary('...', 0)}] | |
43 | ||
44 | Alternatively, you can manually pass the `default` to :func:`json.dumps`. | |
45 | It won't handle :class:`~bson.binary.Binary` and :class:`~bson.code.Code` | |
46 | instances (as they are extended strings you can't provide custom defaults), | |
47 | but it will be faster as there is less recursion. | |
48 | ||
49 | .. versionchanged:: 2.8 | |
50 | The output format for :class:`~bson.timestamp.Timestamp` has changed from | |
51 | '{"t": <int>, "i": <int>}' to '{"$timestamp": {"t": <int>, "i": <int>}}'. | |
52 | This new format will be decoded to an instance of | |
53 | :class:`~bson.timestamp.Timestamp`. The old format will continue to be | |
54 | decoded to a python dict as before. Encoding to the old format is no longer | |
55 | supported as it was never correct and loses type information. | |
56 | Added support for $numberLong and $undefined - new in MongoDB 2.6 - and | |
57 | parsing $date in ISO-8601 format. | |
58 | ||
59 | .. versionchanged:: 2.7 | |
60 | Preserves order when rendering SON, Timestamp, Code, Binary, and DBRef | |
61 | instances. | |
62 | ||
63 | .. versionchanged:: 2.3 | |
64 | Added dumps and loads helpers to automatically handle conversion to and | |
65 | from json and supports :class:`~bson.binary.Binary` and | |
66 | :class:`~bson.code.Code` | |
67 | """ | |
68 | ||
69 | import base64 | |
70 | import calendar | |
71 | import collections | |
72 | import datetime | |
73 | import json | |
74 | import re | |
75 | import uuid | |
76 | ||
77 | from mockupdb._bson import EPOCH_AWARE, RE_TYPE, SON | |
78 | from mockupdb._bson.binary import Binary | |
79 | from mockupdb._bson.code import Code | |
80 | from mockupdb._bson.dbref import DBRef | |
81 | from mockupdb._bson.int64 import Int64 | |
82 | from mockupdb._bson.max_key import MaxKey | |
83 | from mockupdb._bson.min_key import MinKey | |
84 | from mockupdb._bson.objectid import ObjectId | |
85 | from mockupdb._bson.regex import Regex | |
86 | from mockupdb._bson.timestamp import Timestamp | |
87 | from mockupdb._bson.tz_util import utc | |
88 | ||
89 | from mockupdb._bson.py3compat import PY3, iteritems, string_type, text_type | |
90 | ||
91 | ||
92 | _RE_OPT_TABLE = { | |
93 | "i": re.I, | |
94 | "l": re.L, | |
95 | "m": re.M, | |
96 | "s": re.S, | |
97 | "u": re.U, | |
98 | "x": re.X, | |
99 | } | |
100 | ||
101 | ||
102 | def dumps(obj, *args, **kwargs): | |
103 | """Helper function that wraps :class:`json.dumps`. | |
104 | ||
105 | Recursive function that handles all BSON types including | |
106 | :class:`~bson.binary.Binary` and :class:`~bson.code.Code`. | |
107 | ||
108 | .. versionchanged:: 2.7 | |
109 | Preserves order when rendering SON, Timestamp, Code, Binary, and DBRef | |
110 | instances. | |
111 | """ | |
112 | return json.dumps(_json_convert(obj), *args, **kwargs) | |
113 | ||
114 | ||
115 | def loads(s, *args, **kwargs): | |
116 | """Helper function that wraps :class:`json.loads`. | |
117 | ||
118 | Automatically passes the object_hook for BSON type conversion. | |
119 | """ | |
120 | kwargs['object_hook'] = lambda dct: object_hook(dct) | |
121 | return json.loads(s, *args, **kwargs) | |
122 | ||
123 | ||
124 | def _json_convert(obj): | |
125 | """Recursive helper method that converts BSON types so they can be | |
126 | converted into json. | |
127 | """ | |
128 | if hasattr(obj, 'iteritems') or hasattr(obj, 'items'): # PY3 support | |
129 | return SON(((k, _json_convert(v)) for k, v in iteritems(obj))) | |
130 | elif hasattr(obj, '__iter__') and not isinstance(obj, (text_type, bytes)): | |
131 | return list((_json_convert(v) for v in obj)) | |
132 | try: | |
133 | return default(obj) | |
134 | except TypeError: | |
135 | return obj | |
136 | ||
137 | ||
138 | def object_hook(dct): | |
139 | if "$oid" in dct: | |
140 | return ObjectId(str(dct["$oid"])) | |
141 | if "$ref" in dct: | |
142 | return DBRef(dct["$ref"], dct["$id"], dct.get("$db", None)) | |
143 | if "$date" in dct: | |
144 | dtm = dct["$date"] | |
145 | # mongoexport 2.6 and newer | |
146 | if isinstance(dtm, string_type): | |
147 | aware = datetime.datetime.strptime( | |
148 | dtm[:23], "%Y-%m-%dT%H:%M:%S.%f").replace(tzinfo=utc) | |
149 | offset = dtm[23:] | |
150 | if not offset or offset == 'Z': | |
151 | # UTC | |
152 | return aware | |
153 | else: | |
154 | if len(offset) == 5: | |
155 | # Offset from mongoexport is in format (+|-)HHMM | |
156 | secs = (int(offset[1:3]) * 3600 + int(offset[3:]) * 60) | |
157 | elif ':' in offset and len(offset) == 6: | |
158 | # RFC-3339 format (+|-)HH:MM | |
159 | hours, minutes = offset[1:].split(':') | |
160 | secs = (int(hours) * 3600 + int(minutes) * 60) | |
161 | else: | |
162 | # Not RFC-3339 compliant or mongoexport output. | |
163 | raise ValueError("invalid format for offset") | |
164 | if offset[0] == "-": | |
165 | secs *= -1 | |
166 | return aware - datetime.timedelta(seconds=secs) | |
167 | # mongoexport 2.6 and newer, time before the epoch (SERVER-15275) | |
168 | elif isinstance(dtm, collections.Mapping): | |
169 | secs = float(dtm["$numberLong"]) / 1000.0 | |
170 | # mongoexport before 2.6 | |
171 | else: | |
172 | secs = float(dtm) / 1000.0 | |
173 | return EPOCH_AWARE + datetime.timedelta(seconds=secs) | |
174 | if "$regex" in dct: | |
175 | flags = 0 | |
176 | # PyMongo always adds $options but some other tools may not. | |
177 | for opt in dct.get("$options", ""): | |
178 | flags |= _RE_OPT_TABLE.get(opt, 0) | |
179 | return Regex(dct["$regex"], flags) | |
180 | if "$minKey" in dct: | |
181 | return MinKey() | |
182 | if "$maxKey" in dct: | |
183 | return MaxKey() | |
184 | if "$binary" in dct: | |
185 | if isinstance(dct["$type"], int): | |
186 | dct["$type"] = "%02x" % dct["$type"] | |
187 | subtype = int(dct["$type"], 16) | |
188 | if subtype >= 0xffffff80: # Handle mongoexport values | |
189 | subtype = int(dct["$type"][6:], 16) | |
190 | return Binary(base64.b64decode(dct["$binary"].encode()), subtype) | |
191 | if "$code" in dct: | |
192 | return Code(dct["$code"], dct.get("$scope")) | |
193 | if "$uuid" in dct: | |
194 | return uuid.UUID(dct["$uuid"]) | |
195 | if "$undefined" in dct: | |
196 | return None | |
197 | if "$numberLong" in dct: | |
198 | return Int64(dct["$numberLong"]) | |
199 | if "$timestamp" in dct: | |
200 | tsp = dct["$timestamp"] | |
201 | return Timestamp(tsp["t"], tsp["i"]) | |
202 | return dct | |
203 | ||
204 | ||
205 | def default(obj): | |
206 | # We preserve key order when rendering SON, DBRef, etc. as JSON by | |
207 | # returning a SON for those types instead of a dict. | |
208 | if isinstance(obj, ObjectId): | |
209 | return {"$oid": str(obj)} | |
210 | if isinstance(obj, DBRef): | |
211 | return _json_convert(obj.as_doc()) | |
212 | if isinstance(obj, datetime.datetime): | |
213 | # TODO share this code w/ bson.py? | |
214 | if obj.utcoffset() is not None: | |
215 | obj = obj - obj.utcoffset() | |
216 | millis = int(calendar.timegm(obj.timetuple()) * 1000 + | |
217 | obj.microsecond / 1000) | |
218 | return {"$date": millis} | |
219 | if isinstance(obj, (RE_TYPE, Regex)): | |
220 | flags = "" | |
221 | if obj.flags & re.IGNORECASE: | |
222 | flags += "i" | |
223 | if obj.flags & re.LOCALE: | |
224 | flags += "l" | |
225 | if obj.flags & re.MULTILINE: | |
226 | flags += "m" | |
227 | if obj.flags & re.DOTALL: | |
228 | flags += "s" | |
229 | if obj.flags & re.UNICODE: | |
230 | flags += "u" | |
231 | if obj.flags & re.VERBOSE: | |
232 | flags += "x" | |
233 | if isinstance(obj.pattern, text_type): | |
234 | pattern = obj.pattern | |
235 | else: | |
236 | pattern = obj.pattern.decode('utf-8') | |
237 | return SON([("$regex", pattern), ("$options", flags)]) | |
238 | if isinstance(obj, MinKey): | |
239 | return {"$minKey": 1} | |
240 | if isinstance(obj, MaxKey): | |
241 | return {"$maxKey": 1} | |
242 | if isinstance(obj, Timestamp): | |
243 | return {"$timestamp": SON([("t", obj.time), ("i", obj.inc)])} | |
244 | if isinstance(obj, Code): | |
245 | return SON([('$code', str(obj)), ('$scope', obj.scope)]) | |
246 | if isinstance(obj, Binary): | |
247 | return SON([ | |
248 | ('$binary', base64.b64encode(obj).decode()), | |
249 | ('$type', "%02x" % obj.subtype)]) | |
250 | if PY3 and isinstance(obj, bytes): | |
251 | return SON([ | |
252 | ('$binary', base64.b64encode(obj).decode()), | |
253 | ('$type', "00")]) | |
254 | if isinstance(obj, uuid.UUID): | |
255 | return {"$uuid": obj.hex} | |
256 | raise TypeError("%r is not JSON serializable" % obj) |
0 | # Copyright 2010-2015 MongoDB, Inc. | |
1 | # | |
2 | # Licensed under the Apache License, Version 2.0 (the "License"); | |
3 | # you may not use this file except in compliance with the License. | |
4 | # You may obtain a copy of the License at | |
5 | # | |
6 | # http://www.apache.org/licenses/LICENSE-2.0 | |
7 | # | |
8 | # Unless required by applicable law or agreed to in writing, software | |
9 | # distributed under the License is distributed on an "AS IS" BASIS, | |
10 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
11 | # See the License for the specific language governing permissions and | |
12 | # limitations under the License. | |
13 | ||
14 | """Representation for the MongoDB internal MaxKey type. | |
15 | """ | |
16 | ||
17 | ||
18 | class MaxKey(object): | |
19 | """MongoDB internal MaxKey type. | |
20 | ||
21 | .. versionchanged:: 2.7 | |
22 | ``MaxKey`` now implements comparison operators. | |
23 | """ | |
24 | ||
25 | _type_marker = 127 | |
26 | ||
27 | def __eq__(self, other): | |
28 | return isinstance(other, MaxKey) | |
29 | ||
30 | def __hash__(self): | |
31 | return hash(self._type_marker) | |
32 | ||
33 | def __ne__(self, other): | |
34 | return not self == other | |
35 | ||
36 | def __le__(self, other): | |
37 | return isinstance(other, MaxKey) | |
38 | ||
39 | def __lt__(self, dummy): | |
40 | return False | |
41 | ||
42 | def __ge__(self, dummy): | |
43 | return True | |
44 | ||
45 | def __gt__(self, other): | |
46 | return not isinstance(other, MaxKey) | |
47 | ||
48 | def __repr__(self): | |
49 | return "MaxKey()" |
0 | # Copyright 2010-2015 MongoDB, Inc. | |
1 | # | |
2 | # Licensed under the Apache License, Version 2.0 (the "License"); | |
3 | # you may not use this file except in compliance with the License. | |
4 | # You may obtain a copy of the License at | |
5 | # | |
6 | # http://www.apache.org/licenses/LICENSE-2.0 | |
7 | # | |
8 | # Unless required by applicable law or agreed to in writing, software | |
9 | # distributed under the License is distributed on an "AS IS" BASIS, | |
10 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
11 | # See the License for the specific language governing permissions and | |
12 | # limitations under the License. | |
13 | ||
14 | """Representation for the MongoDB internal MinKey type. | |
15 | """ | |
16 | ||
17 | ||
18 | class MinKey(object): | |
19 | """MongoDB internal MinKey type. | |
20 | ||
21 | .. versionchanged:: 2.7 | |
22 | ``MinKey`` now implements comparison operators. | |
23 | """ | |
24 | ||
25 | _type_marker = 255 | |
26 | ||
27 | def __eq__(self, other): | |
28 | return isinstance(other, MinKey) | |
29 | ||
30 | def __hash__(self): | |
31 | return hash(self._type_marker) | |
32 | ||
33 | def __ne__(self, other): | |
34 | return not self == other | |
35 | ||
36 | def __le__(self, dummy): | |
37 | return True | |
38 | ||
39 | def __lt__(self, other): | |
40 | return not isinstance(other, MinKey) | |
41 | ||
42 | def __ge__(self, other): | |
43 | return isinstance(other, MinKey) | |
44 | ||
45 | def __gt__(self, dummy): | |
46 | return False | |
47 | ||
48 | def __repr__(self): | |
49 | return "MinKey()" |
0 | # Copyright 2009-2015 MongoDB, Inc. | |
1 | # | |
2 | # Licensed under the Apache License, Version 2.0 (the "License"); | |
3 | # you may not use this file except in compliance with the License. | |
4 | # You may obtain a copy of the License at | |
5 | # | |
6 | # http://www.apache.org/licenses/LICENSE-2.0 | |
7 | # | |
8 | # Unless required by applicable law or agreed to in writing, software | |
9 | # distributed under the License is distributed on an "AS IS" BASIS, | |
10 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
11 | # See the License for the specific language governing permissions and | |
12 | # limitations under the License. | |
13 | ||
14 | """Tools for working with MongoDB `ObjectIds | |
15 | <http://dochub.mongodb.org/core/objectids>`_. | |
16 | """ | |
17 | ||
18 | import binascii | |
19 | import calendar | |
20 | import datetime | |
21 | import hashlib | |
22 | import os | |
23 | import random | |
24 | import socket | |
25 | import struct | |
26 | import threading | |
27 | import time | |
28 | ||
29 | from mockupdb._bson.errors import InvalidId | |
30 | from mockupdb._bson.py3compat import PY3, bytes_from_hex, string_type, text_type | |
31 | from mockupdb._bson.tz_util import utc | |
32 | ||
33 | ||
34 | def _machine_bytes(): | |
35 | """Get the machine portion of an ObjectId. | |
36 | """ | |
37 | machine_hash = hashlib.md5() | |
38 | if PY3: | |
39 | # gethostname() returns a unicode string in python 3.x | |
40 | # while update() requires a byte string. | |
41 | machine_hash.update(socket.gethostname().encode()) | |
42 | else: | |
43 | # Calling encode() here will fail with non-ascii hostnames | |
44 | machine_hash.update(socket.gethostname()) | |
45 | return machine_hash.digest()[0:3] | |
46 | ||
47 | ||
48 | def _raise_invalid_id(oid): | |
49 | raise InvalidId( | |
50 | "%r is not a valid ObjectId, it must be a 12-byte input" | |
51 | " or a 24-character hex string" % oid) | |
52 | ||
53 | ||
54 | class ObjectId(object): | |
55 | """A MongoDB ObjectId. | |
56 | """ | |
57 | ||
58 | _inc = random.randint(0, 0xFFFFFF) | |
59 | _inc_lock = threading.Lock() | |
60 | ||
61 | _machine_bytes = _machine_bytes() | |
62 | ||
63 | __slots__ = ('__id') | |
64 | ||
65 | _type_marker = 7 | |
66 | ||
67 | def __init__(self, oid=None): | |
68 | """Initialize a new ObjectId. | |
69 | ||
70 | An ObjectId is a 12-byte unique identifier consisting of: | |
71 | ||
72 | - a 4-byte value representing the seconds since the Unix epoch, | |
73 | - a 3-byte machine identifier, | |
74 | - a 2-byte process id, and | |
75 | - a 3-byte counter, starting with a random value. | |
76 | ||
77 | By default, ``ObjectId()`` creates a new unique identifier. The | |
78 | optional parameter `oid` can be an :class:`ObjectId`, or any 12 | |
79 | :class:`bytes` or, in Python 2, any 12-character :class:`str`. | |
80 | ||
81 | For example, the 12 bytes b'foo-bar-quux' do not follow the ObjectId | |
82 | specification but they are acceptable input:: | |
83 | ||
84 | >>> ObjectId(b'foo-bar-quux') | |
85 | ObjectId('666f6f2d6261722d71757578') | |
86 | ||
87 | `oid` can also be a :class:`unicode` or :class:`str` of 24 hex digits:: | |
88 | ||
89 | >>> ObjectId('0123456789ab0123456789ab') | |
90 | ObjectId('0123456789ab0123456789ab') | |
91 | >>> | |
92 | >>> # A u-prefixed unicode literal: | |
93 | >>> ObjectId(u'0123456789ab0123456789ab') | |
94 | ObjectId('0123456789ab0123456789ab') | |
95 | ||
96 | Raises :class:`~bson.errors.InvalidId` if `oid` is not 12 bytes nor | |
97 | 24 hex digits, or :class:`TypeError` if `oid` is not an accepted type. | |
98 | ||
99 | :Parameters: | |
100 | - `oid` (optional): a valid ObjectId. | |
101 | ||
102 | .. mongodoc:: objectids | |
103 | """ | |
104 | if oid is None: | |
105 | self.__generate() | |
106 | elif isinstance(oid, bytes) and len(oid) == 12: | |
107 | self.__id = oid | |
108 | else: | |
109 | self.__validate(oid) | |
110 | ||
111 | @classmethod | |
112 | def from_datetime(cls, generation_time): | |
113 | """Create a dummy ObjectId instance with a specific generation time. | |
114 | ||
115 | This method is useful for doing range queries on a field | |
116 | containing :class:`ObjectId` instances. | |
117 | ||
118 | .. warning:: | |
119 | It is not safe to insert a document containing an ObjectId | |
120 | generated using this method. This method deliberately | |
121 | eliminates the uniqueness guarantee that ObjectIds | |
122 | generally provide. ObjectIds generated with this method | |
123 | should be used exclusively in queries. | |
124 | ||
125 | `generation_time` will be converted to UTC. Naive datetime | |
126 | instances will be treated as though they already contain UTC. | |
127 | ||
128 | An example using this helper to get documents where ``"_id"`` | |
129 | was generated before January 1, 2010 would be: | |
130 | ||
131 | >>> gen_time = datetime.datetime(2010, 1, 1) | |
132 | >>> dummy_id = ObjectId.from_datetime(gen_time) | |
133 | >>> result = collection.find({"_id": {"$lt": dummy_id}}) | |
134 | ||
135 | :Parameters: | |
136 | - `generation_time`: :class:`~datetime.datetime` to be used | |
137 | as the generation time for the resulting ObjectId. | |
138 | """ | |
139 | if generation_time.utcoffset() is not None: | |
140 | generation_time = generation_time - generation_time.utcoffset() | |
141 | timestamp = calendar.timegm(generation_time.timetuple()) | |
142 | oid = struct.pack( | |
143 | ">i", int(timestamp)) + b"\x00\x00\x00\x00\x00\x00\x00\x00" | |
144 | return cls(oid) | |
145 | ||
146 | @classmethod | |
147 | def is_valid(cls, oid): | |
148 | """Checks if a `oid` string is valid or not. | |
149 | ||
150 | :Parameters: | |
151 | - `oid`: the object id to validate | |
152 | ||
153 | .. versionadded:: 2.3 | |
154 | """ | |
155 | if not oid: | |
156 | return False | |
157 | ||
158 | try: | |
159 | ObjectId(oid) | |
160 | return True | |
161 | except (InvalidId, TypeError): | |
162 | return False | |
163 | ||
164 | def __generate(self): | |
165 | """Generate a new value for this ObjectId. | |
166 | """ | |
167 | ||
168 | # 4 bytes current time | |
169 | oid = struct.pack(">i", int(time.time())) | |
170 | ||
171 | # 3 bytes machine | |
172 | oid += ObjectId._machine_bytes | |
173 | ||
174 | # 2 bytes pid | |
175 | oid += struct.pack(">H", os.getpid() % 0xFFFF) | |
176 | ||
177 | # 3 bytes inc | |
178 | with ObjectId._inc_lock: | |
179 | oid += struct.pack(">i", ObjectId._inc)[1:4] | |
180 | ObjectId._inc = (ObjectId._inc + 1) % 0xFFFFFF | |
181 | ||
182 | self.__id = oid | |
183 | ||
184 | def __validate(self, oid): | |
185 | """Validate and use the given id for this ObjectId. | |
186 | ||
187 | Raises TypeError if id is not an instance of | |
188 | (:class:`basestring` (:class:`str` or :class:`bytes` | |
189 | in python 3), ObjectId) and InvalidId if it is not a | |
190 | valid ObjectId. | |
191 | ||
192 | :Parameters: | |
193 | - `oid`: a valid ObjectId | |
194 | """ | |
195 | if isinstance(oid, ObjectId): | |
196 | self.__id = oid.binary | |
197 | # bytes or unicode in python 2, str in python 3 | |
198 | elif isinstance(oid, string_type): | |
199 | if len(oid) == 24: | |
200 | try: | |
201 | self.__id = bytes_from_hex(oid) | |
202 | except (TypeError, ValueError): | |
203 | _raise_invalid_id(oid) | |
204 | else: | |
205 | _raise_invalid_id(oid) | |
206 | else: | |
207 | raise TypeError("id must be an instance of (bytes, %s, ObjectId), " | |
208 | "not %s" % (text_type.__name__, type(oid))) | |
209 | ||
210 | @property | |
211 | def binary(self): | |
212 | """12-byte binary representation of this ObjectId. | |
213 | """ | |
214 | return self.__id | |
215 | ||
216 | @property | |
217 | def generation_time(self): | |
218 | """A :class:`datetime.datetime` instance representing the time of | |
219 | generation for this :class:`ObjectId`. | |
220 | ||
221 | The :class:`datetime.datetime` is timezone aware, and | |
222 | represents the generation time in UTC. It is precise to the | |
223 | second. | |
224 | """ | |
225 | timestamp = struct.unpack(">i", self.__id[0:4])[0] | |
226 | return datetime.datetime.fromtimestamp(timestamp, utc) | |
227 | ||
228 | def __getstate__(self): | |
229 | """return value of object for pickling. | |
230 | needed explicitly because __slots__() defined. | |
231 | """ | |
232 | return self.__id | |
233 | ||
234 | def __setstate__(self, value): | |
235 | """explicit state set from pickling | |
236 | """ | |
237 | # Provide backwards compatability with OIDs | |
238 | # pickled with pymongo-1.9 or older. | |
239 | if isinstance(value, dict): | |
240 | oid = value["_ObjectId__id"] | |
241 | else: | |
242 | oid = value | |
243 | # ObjectIds pickled in python 2.x used `str` for __id. | |
244 | # In python 3.x this has to be converted to `bytes` | |
245 | # by encoding latin-1. | |
246 | if PY3 and isinstance(oid, text_type): | |
247 | self.__id = oid.encode('latin-1') | |
248 | else: | |
249 | self.__id = oid | |
250 | ||
251 | def __str__(self): | |
252 | if PY3: | |
253 | return binascii.hexlify(self.__id).decode() | |
254 | return binascii.hexlify(self.__id) | |
255 | ||
256 | def __repr__(self): | |
257 | return "ObjectId('%s')" % (str(self),) | |
258 | ||
259 | def __eq__(self, other): | |
260 | if isinstance(other, ObjectId): | |
261 | return self.__id == other.binary | |
262 | return NotImplemented | |
263 | ||
264 | def __ne__(self, other): | |
265 | if isinstance(other, ObjectId): | |
266 | return self.__id != other.binary | |
267 | return NotImplemented | |
268 | ||
269 | def __lt__(self, other): | |
270 | if isinstance(other, ObjectId): | |
271 | return self.__id < other.binary | |
272 | return NotImplemented | |
273 | ||
274 | def __le__(self, other): | |
275 | if isinstance(other, ObjectId): | |
276 | return self.__id <= other.binary | |
277 | return NotImplemented | |
278 | ||
279 | def __gt__(self, other): | |
280 | if isinstance(other, ObjectId): | |
281 | return self.__id > other.binary | |
282 | return NotImplemented | |
283 | ||
284 | def __ge__(self, other): | |
285 | if isinstance(other, ObjectId): | |
286 | return self.__id >= other.binary | |
287 | return NotImplemented | |
288 | ||
289 | def __hash__(self): | |
290 | """Get a hash value for this :class:`ObjectId`.""" | |
291 | return hash(self.__id) |
0 | # Copyright 2009-2015 MongoDB, Inc. | |
1 | # | |
2 | # Licensed under the Apache License, Version 2.0 (the "License"); you | |
3 | # may not use this file except in compliance with the License. You | |
4 | # may obtain a copy of the License at | |
5 | # | |
6 | # http://www.apache.org/licenses/LICENSE-2.0 | |
7 | # | |
8 | # Unless required by applicable law or agreed to in writing, software | |
9 | # distributed under the License is distributed on an "AS IS" BASIS, | |
10 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or | |
11 | # implied. See the License for the specific language governing | |
12 | # permissions and limitations under the License. | |
13 | ||
14 | """Utility functions and definitions for python3 compatibility.""" | |
15 | ||
16 | import sys | |
17 | ||
18 | PY3 = sys.version_info[0] == 3 | |
19 | ||
20 | if PY3: | |
21 | import codecs | |
22 | import _thread as thread | |
23 | from io import BytesIO as StringIO | |
24 | MAXSIZE = sys.maxsize | |
25 | ||
26 | imap = map | |
27 | ||
28 | def b(s): | |
29 | # BSON and socket operations deal in binary data. In | |
30 | # python 3 that means instances of `bytes`. In python | |
31 | # 2.6 and 2.7 you can create an alias for `bytes` using | |
32 | # the b prefix (e.g. b'foo'). | |
33 | # See http://python3porting.com/problems.html#nicer-solutions | |
34 | return codecs.latin_1_encode(s)[0] | |
35 | ||
36 | def u(s): | |
37 | # PY3 strings may already be treated as unicode literals | |
38 | return s | |
39 | ||
40 | def bytes_from_hex(h): | |
41 | return bytes.fromhex(h) | |
42 | ||
43 | def iteritems(d): | |
44 | return iter(d.items()) | |
45 | ||
46 | def itervalues(d): | |
47 | return iter(d.values()) | |
48 | ||
49 | def reraise(exctype, value, trace=None): | |
50 | raise exctype(str(value)).with_traceback(trace) | |
51 | ||
52 | def _unicode(s): | |
53 | return s | |
54 | ||
55 | text_type = str | |
56 | string_type = str | |
57 | integer_types = int | |
58 | else: | |
59 | import thread | |
60 | ||
61 | from itertools import imap | |
62 | try: | |
63 | from cStringIO import StringIO | |
64 | except ImportError: | |
65 | from StringIO import StringIO | |
66 | ||
67 | MAXSIZE = sys.maxint | |
68 | ||
69 | def b(s): | |
70 | # See comments above. In python 2.x b('foo') is just 'foo'. | |
71 | return s | |
72 | ||
73 | def u(s): | |
74 | """Replacement for unicode literal prefix.""" | |
75 | return unicode(s.replace('\\', '\\\\'), 'unicode_escape') | |
76 | ||
77 | def bytes_from_hex(h): | |
78 | return h.decode('hex') | |
79 | ||
80 | def iteritems(d): | |
81 | return d.iteritems() | |
82 | ||
83 | def itervalues(d): | |
84 | return d.itervalues() | |
85 | ||
86 | # "raise x, y, z" raises SyntaxError in Python 3 | |
87 | exec("""def reraise(exctype, value, trace=None): | |
88 | raise exctype, str(value), trace | |
89 | """) | |
90 | ||
91 | _unicode = unicode | |
92 | ||
93 | string_type = basestring | |
94 | text_type = unicode | |
95 | integer_types = (int, long) |
0 | # Copyright 2013-2015 MongoDB, Inc. | |
1 | # | |
2 | # Licensed under the Apache License, Version 2.0 (the "License"); | |
3 | # you may not use this file except in compliance with the License. | |
4 | # You may obtain a copy of the License at | |
5 | # | |
6 | # http://www.apache.org/licenses/LICENSE-2.0 | |
7 | # | |
8 | # Unless required by applicable law or agreed to in writing, software | |
9 | # distributed under the License is distributed on an "AS IS" BASIS, | |
10 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
11 | # See the License for the specific language governing permissions and | |
12 | # limitations under the License. | |
13 | ||
14 | """Tools for representing MongoDB regular expressions. | |
15 | """ | |
16 | ||
17 | import re | |
18 | ||
19 | from mockupdb._bson.son import RE_TYPE | |
20 | from mockupdb._bson.py3compat import string_type, text_type | |
21 | ||
22 | ||
23 | def str_flags_to_int(str_flags): | |
24 | flags = 0 | |
25 | if "i" in str_flags: | |
26 | flags |= re.IGNORECASE | |
27 | if "l" in str_flags: | |
28 | flags |= re.LOCALE | |
29 | if "m" in str_flags: | |
30 | flags |= re.MULTILINE | |
31 | if "s" in str_flags: | |
32 | flags |= re.DOTALL | |
33 | if "u" in str_flags: | |
34 | flags |= re.UNICODE | |
35 | if "x" in str_flags: | |
36 | flags |= re.VERBOSE | |
37 | ||
38 | return flags | |
39 | ||
40 | ||
41 | class Regex(object): | |
42 | """BSON regular expression data.""" | |
43 | _type_marker = 11 | |
44 | ||
45 | @classmethod | |
46 | def from_native(cls, regex): | |
47 | """Convert a Python regular expression into a ``Regex`` instance. | |
48 | ||
49 | Note that in Python 3, a regular expression compiled from a | |
50 | :class:`str` has the ``re.UNICODE`` flag set. If it is undesirable | |
51 | to store this flag in a BSON regular expression, unset it first:: | |
52 | ||
53 | >>> pattern = re.compile('.*') | |
54 | >>> regex = Regex.from_native(pattern) | |
55 | >>> regex.flags ^= re.UNICODE | |
56 | >>> db.collection.insert({'pattern': regex}) | |
57 | ||
58 | :Parameters: | |
59 | - `regex`: A regular expression object from ``re.compile()``. | |
60 | ||
61 | .. warning:: | |
62 | Python regular expressions use a different syntax and different | |
63 | set of flags than MongoDB, which uses `PCRE`_. A regular | |
64 | expression retrieved from the server may not compile in | |
65 | Python, or may match a different set of strings in Python than | |
66 | when used in a MongoDB query. | |
67 | ||
68 | .. _PCRE: http://www.pcre.org/ | |
69 | """ | |
70 | if not isinstance(regex, RE_TYPE): | |
71 | raise TypeError( | |
72 | "regex must be a compiled regular expression, not %s" | |
73 | % type(regex)) | |
74 | ||
75 | return Regex(regex.pattern, regex.flags) | |
76 | ||
77 | def __init__(self, pattern, flags=0): | |
78 | """BSON regular expression data. | |
79 | ||
80 | This class is useful to store and retrieve regular expressions that are | |
81 | incompatible with Python's regular expression dialect. | |
82 | ||
83 | :Parameters: | |
84 | - `pattern`: string | |
85 | - `flags`: (optional) an integer bitmask, or a string of flag | |
86 | characters like "im" for IGNORECASE and MULTILINE | |
87 | """ | |
88 | if not isinstance(pattern, (text_type, bytes)): | |
89 | raise TypeError("pattern must be a string, not %s" % type(pattern)) | |
90 | self.pattern = pattern | |
91 | ||
92 | if isinstance(flags, string_type): | |
93 | self.flags = str_flags_to_int(flags) | |
94 | elif isinstance(flags, int): | |
95 | self.flags = flags | |
96 | else: | |
97 | raise TypeError( | |
98 | "flags must be a string or int, not %s" % type(flags)) | |
99 | ||
100 | def __eq__(self, other): | |
101 | if isinstance(other, Regex): | |
102 | return self.pattern == self.pattern and self.flags == other.flags | |
103 | else: | |
104 | return NotImplemented | |
105 | ||
106 | __hash__ = None | |
107 | ||
108 | def __ne__(self, other): | |
109 | return not self == other | |
110 | ||
111 | def __repr__(self): | |
112 | return "Regex(%r, %r)" % (self.pattern, self.flags) | |
113 | ||
114 | def try_compile(self): | |
115 | """Compile this :class:`Regex` as a Python regular expression. | |
116 | ||
117 | .. warning:: | |
118 | Python regular expressions use a different syntax and different | |
119 | set of flags than MongoDB, which uses `PCRE`_. A regular | |
120 | expression retrieved from the server may not compile in | |
121 | Python, or may match a different set of strings in Python than | |
122 | when used in a MongoDB query. :meth:`try_compile()` may raise | |
123 | :exc:`re.error`. | |
124 | ||
125 | .. _PCRE: http://www.pcre.org/ | |
126 | """ | |
127 | return re.compile(self.pattern, self.flags) |
0 | # Copyright 2009-2015 MongoDB, Inc. | |
1 | # | |
2 | # Licensed under the Apache License, Version 2.0 (the "License"); | |
3 | # you may not use this file except in compliance with the License. | |
4 | # You may obtain a copy of the License at | |
5 | # | |
6 | # http://www.apache.org/licenses/LICENSE-2.0 | |
7 | # | |
8 | # Unless required by applicable law or agreed to in writing, software | |
9 | # distributed under the License is distributed on an "AS IS" BASIS, | |
10 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
11 | # See the License for the specific language governing permissions and | |
12 | # limitations under the License. | |
13 | ||
14 | """Tools for creating and manipulating SON, the Serialized Ocument Notation. | |
15 | ||
16 | Regular dictionaries can be used instead of SON objects, but not when the order | |
17 | of keys is important. A SON object can be used just like a normal Python | |
18 | dictionary.""" | |
19 | ||
20 | import collections | |
21 | import copy | |
22 | import re | |
23 | ||
24 | from mockupdb._bson.py3compat import iteritems | |
25 | ||
26 | ||
27 | # This sort of sucks, but seems to be as good as it gets... | |
28 | # This is essentially the same as re._pattern_type | |
29 | RE_TYPE = type(re.compile("")) | |
30 | ||
31 | ||
32 | class SON(dict): | |
33 | """SON data. | |
34 | ||
35 | A subclass of dict that maintains ordering of keys and provides a | |
36 | few extra niceties for dealing with SON. SON objects can be | |
37 | converted to and from mockupdb._bson. | |
38 | ||
39 | The mapping from Python types to BSON types is as follows: | |
40 | ||
41 | ======================================= ============= =================== | |
42 | Python Type BSON Type Supported Direction | |
43 | ======================================= ============= =================== | |
44 | None null both | |
45 | bool boolean both | |
46 | int [#int]_ int32 / int64 py -> bson | |
47 | long int64 py -> bson | |
48 | `bson.int64.Int64` int64 both | |
49 | float number (real) both | |
50 | string string py -> bson | |
51 | unicode string both | |
52 | list array both | |
53 | dict / `SON` object both | |
54 | datetime.datetime [#dt]_ [#dt2]_ date both | |
55 | `bson.regex.Regex` regex both | |
56 | compiled re [#re]_ regex py -> bson | |
57 | `bson.binary.Binary` binary both | |
58 | `bson.objectid.ObjectId` oid both | |
59 | `bson.dbref.DBRef` dbref both | |
60 | None undefined bson -> py | |
61 | unicode code bson -> py | |
62 | `bson.code.Code` code py -> bson | |
63 | unicode symbol bson -> py | |
64 | bytes (Python 3) [#bytes]_ binary both | |
65 | ======================================= ============= =================== | |
66 | ||
67 | Note that to save binary data it must be wrapped as an instance of | |
68 | `bson.binary.Binary`. Otherwise it will be saved as a BSON string | |
69 | and retrieved as unicode. | |
70 | ||
71 | .. [#int] A Python int will be saved as a BSON int32 or BSON int64 depending | |
72 | on its size. A BSON int32 will always decode to a Python int. A BSON | |
73 | int64 will always decode to a :class:`~bson.int64.Int64`. | |
74 | .. [#dt] datetime.datetime instances will be rounded to the nearest | |
75 | millisecond when saved | |
76 | .. [#dt2] all datetime.datetime instances are treated as *naive*. clients | |
77 | should always use UTC. | |
78 | .. [#re] :class:`~bson.regex.Regex` instances and regular expression | |
79 | objects from ``re.compile()`` are both saved as BSON regular expressions. | |
80 | BSON regular expressions are decoded as :class:`~bson.regex.Regex` | |
81 | instances. | |
82 | .. [#bytes] The bytes type from Python 3.x is encoded as BSON binary with | |
83 | subtype 0. In Python 3.x it will be decoded back to bytes. In Python 2.x | |
84 | it will be decoded to an instance of :class:`~bson.binary.Binary` with | |
85 | subtype 0. | |
86 | """ | |
87 | ||
88 | def __init__(self, data=None, **kwargs): | |
89 | self.__keys = [] | |
90 | dict.__init__(self) | |
91 | self.update(data) | |
92 | self.update(kwargs) | |
93 | ||
94 | def __new__(cls, *args, **kwargs): | |
95 | instance = super(SON, cls).__new__(cls, *args, **kwargs) | |
96 | instance.__keys = [] | |
97 | return instance | |
98 | ||
99 | def __repr__(self): | |
100 | result = [] | |
101 | for key in self.__keys: | |
102 | result.append("(%r, %r)" % (key, self[key])) | |
103 | return "SON([%s])" % ", ".join(result) | |
104 | ||
105 | def __setitem__(self, key, value): | |
106 | if key not in self.__keys: | |
107 | self.__keys.append(key) | |
108 | dict.__setitem__(self, key, value) | |
109 | ||
110 | def __delitem__(self, key): | |
111 | self.__keys.remove(key) | |
112 | dict.__delitem__(self, key) | |
113 | ||
114 | def keys(self): | |
115 | return list(self.__keys) | |
116 | ||
117 | def copy(self): | |
118 | other = SON() | |
119 | other.update(self) | |
120 | return other | |
121 | ||
122 | # TODO this is all from UserDict.DictMixin. it could probably be made more | |
123 | # efficient. | |
124 | # second level definitions support higher levels | |
125 | def __iter__(self): | |
126 | for k in self.__keys: | |
127 | yield k | |
128 | ||
129 | def has_key(self, key): | |
130 | return key in self.__keys | |
131 | ||
132 | # third level takes advantage of second level definitions | |
133 | def iteritems(self): | |
134 | for k in self: | |
135 | yield (k, self[k]) | |
136 | ||
137 | def iterkeys(self): | |
138 | return self.__iter__() | |
139 | ||
140 | # fourth level uses definitions from lower levels | |
141 | def itervalues(self): | |
142 | for _, v in self.iteritems(): | |
143 | yield v | |
144 | ||
145 | def values(self): | |
146 | return [v for _, v in self.iteritems()] | |
147 | ||
148 | def items(self): | |
149 | return [(key, self[key]) for key in self] | |
150 | ||
151 | def clear(self): | |
152 | self.__keys = [] | |
153 | super(SON, self).clear() | |
154 | ||
155 | def setdefault(self, key, default=None): | |
156 | try: | |
157 | return self[key] | |
158 | except KeyError: | |
159 | self[key] = default | |
160 | return default | |
161 | ||
162 | def pop(self, key, *args): | |
163 | if len(args) > 1: | |
164 | raise TypeError("pop expected at most 2 arguments, got "\ | |
165 | + repr(1 + len(args))) | |
166 | try: | |
167 | value = self[key] | |
168 | except KeyError: | |
169 | if args: | |
170 | return args[0] | |
171 | raise | |
172 | del self[key] | |
173 | return value | |
174 | ||
175 | def popitem(self): | |
176 | try: | |
177 | k, v = next(self.iteritems()) | |
178 | except StopIteration: | |
179 | raise KeyError('container is empty') | |
180 | del self[k] | |
181 | return (k, v) | |
182 | ||
183 | def update(self, other=None, **kwargs): | |
184 | # Make progressively weaker assumptions about "other" | |
185 | if other is None: | |
186 | pass | |
187 | elif hasattr(other, 'iteritems'): # iteritems saves memory and lookups | |
188 | for k, v in other.iteritems(): | |
189 | self[k] = v | |
190 | elif hasattr(other, 'keys'): | |
191 | for k in other.keys(): | |
192 | self[k] = other[k] | |
193 | else: | |
194 | for k, v in other: | |
195 | self[k] = v | |
196 | if kwargs: | |
197 | self.update(kwargs) | |
198 | ||
199 | def get(self, key, default=None): | |
200 | try: | |
201 | return self[key] | |
202 | except KeyError: | |
203 | return default | |
204 | ||
205 | def __eq__(self, other): | |
206 | """Comparison to another SON is order-sensitive while comparison to a | |
207 | regular dictionary is order-insensitive. | |
208 | """ | |
209 | if isinstance(other, SON): | |
210 | return len(self) == len(other) and self.items() == other.items() | |
211 | return self.to_dict() == other | |
212 | ||
213 | def __ne__(self, other): | |
214 | return not self == other | |
215 | ||
216 | def __len__(self): | |
217 | return len(self.__keys) | |
218 | ||
219 | def to_dict(self): | |
220 | """Convert a SON document to a normal Python dictionary instance. | |
221 | ||
222 | This is trickier than just *dict(...)* because it needs to be | |
223 | recursive. | |
224 | """ | |
225 | ||
226 | def transform_value(value): | |
227 | if isinstance(value, list): | |
228 | return [transform_value(v) for v in value] | |
229 | elif isinstance(value, collections.Mapping): | |
230 | return dict([ | |
231 | (k, transform_value(v)) | |
232 | for k, v in iteritems(value)]) | |
233 | else: | |
234 | return value | |
235 | ||
236 | return transform_value(dict(self)) | |
237 | ||
238 | def __deepcopy__(self, memo): | |
239 | out = SON() | |
240 | val_id = id(self) | |
241 | if val_id in memo: | |
242 | return memo.get(val_id) | |
243 | memo[val_id] = out | |
244 | for k, v in self.iteritems(): | |
245 | if not isinstance(v, RE_TYPE): | |
246 | v = copy.deepcopy(v, memo) | |
247 | out[k] = v | |
248 | return out |
0 | # Copyright 2010-2015 MongoDB, Inc. | |
1 | # | |
2 | # Licensed under the Apache License, Version 2.0 (the "License"); | |
3 | # you may not use this file except in compliance with the License. | |
4 | # You may obtain a copy of the License at | |
5 | # | |
6 | # http://www.apache.org/licenses/LICENSE-2.0 | |
7 | # | |
8 | # Unless required by applicable law or agreed to in writing, software | |
9 | # distributed under the License is distributed on an "AS IS" BASIS, | |
10 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
11 | # See the License for the specific language governing permissions and | |
12 | # limitations under the License. | |
13 | ||
14 | """Tools for representing MongoDB internal Timestamps. | |
15 | """ | |
16 | ||
17 | import calendar | |
18 | import datetime | |
19 | ||
20 | from mockupdb._bson.py3compat import integer_types | |
21 | from mockupdb._bson.tz_util import utc | |
22 | ||
23 | UPPERBOUND = 4294967296 | |
24 | ||
25 | ||
26 | class Timestamp(object): | |
27 | """MongoDB internal timestamps used in the opLog. | |
28 | """ | |
29 | ||
30 | _type_marker = 17 | |
31 | ||
32 | def __init__(self, time, inc): | |
33 | """Create a new :class:`Timestamp`. | |
34 | ||
35 | This class is only for use with the MongoDB opLog. If you need | |
36 | to store a regular timestamp, please use a | |
37 | :class:`~datetime.datetime`. | |
38 | ||
39 | Raises :class:`TypeError` if `time` is not an instance of | |
40 | :class: `int` or :class:`~datetime.datetime`, or `inc` is not | |
41 | an instance of :class:`int`. Raises :class:`ValueError` if | |
42 | `time` or `inc` is not in [0, 2**32). | |
43 | ||
44 | :Parameters: | |
45 | - `time`: time in seconds since epoch UTC, or a naive UTC | |
46 | :class:`~datetime.datetime`, or an aware | |
47 | :class:`~datetime.datetime` | |
48 | - `inc`: the incrementing counter | |
49 | """ | |
50 | if isinstance(time, datetime.datetime): | |
51 | if time.utcoffset() is not None: | |
52 | time = time - time.utcoffset() | |
53 | time = int(calendar.timegm(time.timetuple())) | |
54 | if not isinstance(time, integer_types): | |
55 | raise TypeError("time must be an instance of int") | |
56 | if not isinstance(inc, integer_types): | |
57 | raise TypeError("inc must be an instance of int") | |
58 | if not 0 <= time < UPPERBOUND: | |
59 | raise ValueError("time must be contained in [0, 2**32)") | |
60 | if not 0 <= inc < UPPERBOUND: | |
61 | raise ValueError("inc must be contained in [0, 2**32)") | |
62 | ||
63 | self.__time = time | |
64 | self.__inc = inc | |
65 | ||
66 | @property | |
67 | def time(self): | |
68 | """Get the time portion of this :class:`Timestamp`. | |
69 | """ | |
70 | return self.__time | |
71 | ||
72 | @property | |
73 | def inc(self): | |
74 | """Get the inc portion of this :class:`Timestamp`. | |
75 | """ | |
76 | return self.__inc | |
77 | ||
78 | def __eq__(self, other): | |
79 | if isinstance(other, Timestamp): | |
80 | return (self.__time == other.time and self.__inc == other.inc) | |
81 | else: | |
82 | return NotImplemented | |
83 | ||
84 | def __hash__(self): | |
85 | return hash(self.time) ^ hash(self.inc) | |
86 | ||
87 | def __ne__(self, other): | |
88 | return not self == other | |
89 | ||
90 | def __lt__(self, other): | |
91 | if isinstance(other, Timestamp): | |
92 | return (self.time, self.inc) < (other.time, other.inc) | |
93 | return NotImplemented | |
94 | ||
95 | def __le__(self, other): | |
96 | if isinstance(other, Timestamp): | |
97 | return (self.time, self.inc) <= (other.time, other.inc) | |
98 | return NotImplemented | |
99 | ||
100 | def __gt__(self, other): | |
101 | if isinstance(other, Timestamp): | |
102 | return (self.time, self.inc) > (other.time, other.inc) | |
103 | return NotImplemented | |
104 | ||
105 | def __ge__(self, other): | |
106 | if isinstance(other, Timestamp): | |
107 | return (self.time, self.inc) >= (other.time, other.inc) | |
108 | return NotImplemented | |
109 | ||
110 | def __repr__(self): | |
111 | return "Timestamp(%s, %s)" % (self.__time, self.__inc) | |
112 | ||
113 | def as_datetime(self): | |
114 | """Return a :class:`~datetime.datetime` instance corresponding | |
115 | to the time portion of this :class:`Timestamp`. | |
116 | ||
117 | The returned datetime's timezone is UTC. | |
118 | """ | |
119 | return datetime.datetime.fromtimestamp(self.__time, utc) |
0 | # Copyright 2010-2015 MongoDB, Inc. | |
1 | # | |
2 | # Licensed under the Apache License, Version 2.0 (the "License"); | |
3 | # you may not use this file except in compliance with the License. | |
4 | # You may obtain a copy of the License at | |
5 | # | |
6 | # http://www.apache.org/licenses/LICENSE-2.0 | |
7 | # | |
8 | # Unless required by applicable law or agreed to in writing, software | |
9 | # distributed under the License is distributed on an "AS IS" BASIS, | |
10 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
11 | # See the License for the specific language governing permissions and | |
12 | # limitations under the License. | |
13 | ||
14 | """Timezone related utilities for BSON.""" | |
15 | ||
16 | from datetime import (timedelta, | |
17 | tzinfo) | |
18 | ||
19 | ZERO = timedelta(0) | |
20 | ||
21 | ||
22 | class FixedOffset(tzinfo): | |
23 | """Fixed offset timezone, in minutes east from UTC. | |
24 | ||
25 | Implementation based from the Python `standard library documentation | |
26 | <http://docs.python.org/library/datetime.html#tzinfo-objects>`_. | |
27 | Defining __getinitargs__ enables pickling / copying. | |
28 | """ | |
29 | ||
30 | def __init__(self, offset, name): | |
31 | if isinstance(offset, timedelta): | |
32 | self.__offset = offset | |
33 | else: | |
34 | self.__offset = timedelta(minutes=offset) | |
35 | self.__name = name | |
36 | ||
37 | def __getinitargs__(self): | |
38 | return self.__offset, self.__name | |
39 | ||
40 | def utcoffset(self, dt): | |
41 | return self.__offset | |
42 | ||
43 | def tzname(self, dt): | |
44 | return self.__name | |
45 | ||
46 | def dst(self, dt): | |
47 | return ZERO | |
48 | ||
49 | ||
50 | utc = FixedOffset(0, "UTC") | |
51 | """Fixed offset timezone representing UTC.""" |
0 | -----BEGIN PRIVATE KEY----- | |
1 | MIICdwIBADANBgkqhkiG9w0BAQEFAASCAmEwggJdAgEAAoGBAK53miP9GczBWXnq | |
2 | NxHwQkgVqsDuesjwJbWilMK4gf3fjnf2PN3qDpnGbZbPD0ij8975pIKtSPoDycFm | |
3 | A8Mogip0yU2Lv2lL56CWthSBftOFDL2CWIsmuuURFXZPiVLtLytfI9oLASZFlywW | |
4 | Cs83qEDTvdW8VoVhVsxV1JFDnpXLAgMBAAECgYBoGBgxrMt97UazhNkCrPT/CV5t | |
5 | 6lv8E7yMGMrlOyzkCkR4ssQyK3o2qbutJTGbR6czvIM5LKbD9Qqlh3ZrNHokWmTR | |
6 | VQQpJxt8HwP5boQvwRHg9+KSGr4JvRko1qxFs9C7Bzjt4r9VxdjhwZPdy0McGI/z | |
7 | yPXyQHjqBayrHV1EwQJBANorfCKeIxLhH3LAeUZuRS8ACldJ2N1kL6Ov43/v+0S/ | |
8 | OprQeBTODuTds3sv7FCT1aYDTOe6JLNOwN2i4YVOMBsCQQDMuCozrwqftD17D06P | |
9 | 9+lRXUekY5kFBs5j28Xnl8t8jnuxsXtQUTru660LD0QrmDNSauhpEmlpJknicnGt | |
10 | hmwRAkEA12MI6bBPlir0/jgxQqxI1w7mJqj8Vg27zpEuO7dzzLoyJHddpcSNBbwu | |
11 | npaAakiZK42klj26T9+XHvjYRuAbMwJBAJ5WnwWEkGH/pUHGEAyYQdSVojDKe/MA | |
12 | Vae0tzguFswK5C8GyArSGRPsItYYA7D4MlG/sGx8Oh2C6MiFndkJzBECQDcP1y4r | |
13 | Qsek151t1zArLKH4gG5dQAeZ0Lc2VeC4nLMUqVwrHcZDdd1RzLlSaH3j1MekFVfT | |
14 | 6v6rrcNLEVbeuk4= | |
15 | -----END PRIVATE KEY----- | |
16 | -----BEGIN CERTIFICATE----- | |
17 | MIIC7jCCAlegAwIBAgIBCjANBgkqhkiG9w0BAQUFADCBkjELMAkGA1UEBhMCVVMx | |
18 | ETAPBgNVBAgMCE5ldyBZb3JrMRYwFAYDVQQHDA1OZXcgWW9yayBDaXR5MQ4wDAYD | |
19 | VQQKDAUxMEdlbjEPMA0GA1UECwwGS2VybmVsMRowGAYDVQQDDBFNeSBDZXJ0IEF1 | |
20 | dGhvcml0eTEbMBkGCSqGSIb3DQEJARYMcm9vdEBsYXphcnVzMB4XDTEzMTIwNTEz | |
21 | MjU0MFoXDTQxMDQyMTEzMjU0MFowajELMAkGA1UEBhMCVVMxETAPBgNVBAgMCE5l | |
22 | dyBZb3JrMRYwFAYDVQQHDA1OZXcgWW9yayBDaXR5MQ4wDAYDVQQKDAUxMEdlbjEP | |
23 | MA0GA1UECwwGS2VybmVsMQ8wDQYDVQQDDAZzZXJ2ZXIwgZ8wDQYJKoZIhvcNAQEB | |
24 | BQADgY0AMIGJAoGBAK53miP9GczBWXnqNxHwQkgVqsDuesjwJbWilMK4gf3fjnf2 | |
25 | PN3qDpnGbZbPD0ij8975pIKtSPoDycFmA8Mogip0yU2Lv2lL56CWthSBftOFDL2C | |
26 | WIsmuuURFXZPiVLtLytfI9oLASZFlywWCs83qEDTvdW8VoVhVsxV1JFDnpXLAgMB | |
27 | AAGjezB5MAkGA1UdEwQCMAAwLAYJYIZIAYb4QgENBB8WHU9wZW5TU0wgR2VuZXJh | |
28 | dGVkIENlcnRpZmljYXRlMB0GA1UdDgQWBBQgCkKiZhUV9/Zo7RwYYwm2cNK6tzAf | |
29 | BgNVHSMEGDAWgBQHQRk6n37FtyJOt7zV3+T8CbhkFjANBgkqhkiG9w0BAQUFAAOB | |
30 | gQCbsfr+Q4pty4Fy38lSxoCgnbB4pX6+Ex3xyw5zxDYR3xUlb/uHBiNZ1dBrXBxU | |
31 | ekU8dEvf+hx4iRDSW/C5N6BGnBBhCHcrPabo2bEEWKVsbUC3xchTB5rNGkvnMt9t | |
32 | G9ol7vanuzjL3S8/2PB33OshkBH570CxqqPflQbdjwt9dg== | |
33 | -----END CERTIFICATE----- |
0 | #!/usr/bin/env python | |
1 | # -*- coding: utf-8 -*- | |
2 | import sys | |
3 | ||
4 | ||
5 | try: | |
6 | from setuptools import setup | |
7 | except ImportError: | |
8 | from distutils.core import setup | |
9 | ||
10 | ||
11 | with open('README.rst') as readme_file: | |
12 | readme = readme_file.read() | |
13 | ||
14 | with open('CHANGELOG.rst') as changelog_file: | |
15 | changelog = changelog_file.read().replace('.. :changelog:', '') | |
16 | ||
17 | requirements = [] | |
18 | test_requirements = ['pymongo>=3'] | |
19 | ||
20 | if sys.version_info[:2] == (2, 6): | |
21 | requirements.append('ordereddict') | |
22 | test_requirements.append('unittest2') | |
23 | ||
24 | setup( | |
25 | name='mockupdb', | |
26 | version='1.1', | |
27 | description="MongoDB Wire Protocol server library", | |
28 | long_description=readme + '\n\n' + changelog, | |
29 | author="A. Jesse Jiryu Davis", | |
30 | author_email='jesse@mongodb.com', | |
31 | url='https://github.com/ajdavis/mongo-mockup-db', | |
32 | packages=['mockupdb'], | |
33 | package_dir={'mockupdb': 'mockupdb'}, | |
34 | include_package_data=True, | |
35 | install_requires=requirements, | |
36 | license="Apache License, Version 2.0", | |
37 | zip_safe=False, | |
38 | keywords=["mongo", "mongodb", "wire protocol", "mockupdb", "mock"], | |
39 | classifiers=[ | |
40 | 'Development Status :: 2 - Pre-Alpha', | |
41 | 'Intended Audience :: Developers', | |
42 | "License :: OSI Approved :: Apache Software License", | |
43 | 'Natural Language :: English', | |
44 | "Programming Language :: Python :: 2", | |
45 | 'Programming Language :: Python :: 2.6', | |
46 | 'Programming Language :: Python :: 2.7', | |
47 | 'Programming Language :: Python :: 3', | |
48 | 'Programming Language :: Python :: 3.3', | |
49 | 'Programming Language :: Python :: 3.4', | |
50 | ], | |
51 | test_suite='tests', | |
52 | tests_require=test_requirements | |
53 | ) |
0 | # -*- coding: utf-8 -*- | |
1 | ||
2 | import unittest | |
3 | ||
4 | try: | |
5 | import unittest2 as unittest | |
6 | except ImportError: | |
7 | pass |
0 | #!/usr/bin/env python | |
1 | # -*- coding: utf-8 -*- | |
2 | ||
3 | """Test MockupDB.""" | |
4 | ||
5 | import contextlib | |
6 | import ssl | |
7 | import sys | |
8 | ||
9 | if sys.version_info[0] < 3: | |
10 | from io import BytesIO as StringIO | |
11 | else: | |
12 | from io import StringIO | |
13 | ||
14 | try: | |
15 | from queue import Queue | |
16 | except ImportError: | |
17 | from Queue import Queue | |
18 | ||
19 | # Tests depend on PyMongo's BSON implementation, but MockupDB itself does not. | |
20 | from bson import SON | |
21 | from bson.codec_options import CodecOptions | |
22 | from pymongo import MongoClient, message, WriteConcern | |
23 | ||
24 | from mockupdb import (go, going, | |
25 | Command, Matcher, MockupDB, Request, | |
26 | OpDelete, OpInsert, OpQuery, OpUpdate, | |
27 | DELETE_FLAGS, INSERT_FLAGS, UPDATE_FLAGS, QUERY_FLAGS) | |
28 | ||
29 | from tests import unittest # unittest2 on Python 2.6. | |
30 | ||
31 | ||
32 | @contextlib.contextmanager | |
33 | def capture_stderr(): | |
34 | sio = StringIO() | |
35 | stderr, sys.stderr = sys.stderr, sio | |
36 | try: | |
37 | yield sio | |
38 | finally: | |
39 | sys.stderr = stderr | |
40 | sio.seek(0) | |
41 | ||
42 | ||
43 | class TestGoing(unittest.TestCase): | |
44 | def test_nested_errors(self): | |
45 | def thrower(): | |
46 | raise AssertionError("thrown") | |
47 | ||
48 | with capture_stderr() as stderr: | |
49 | with self.assertRaises(ZeroDivisionError): | |
50 | with going(thrower) as future: | |
51 | 1 / 0 | |
52 | ||
53 | self.assertIn('error in going(', stderr.getvalue()) | |
54 | self.assertIn('AssertionError: thrown', stderr.getvalue()) | |
55 | ||
56 | # Future keeps raising. | |
57 | self.assertRaises(AssertionError, future) | |
58 | self.assertRaises(AssertionError, future) | |
59 | ||
60 | ||
61 | class TestRequest(unittest.TestCase): | |
62 | def _pack_request(self, ns, slave_ok): | |
63 | flags = 4 if slave_ok else 0 | |
64 | request_id, msg_bytes, max_doc_size = message.query( | |
65 | flags, ns, 0, 0, {}, None, CodecOptions()) | |
66 | ||
67 | # Skip 16-byte standard header. | |
68 | return msg_bytes[16:], request_id | |
69 | ||
70 | def test_flags(self): | |
71 | request = Request() | |
72 | self.assertIsNone(request.flags) | |
73 | self.assertFalse(request.slave_ok) | |
74 | ||
75 | msg_bytes, request_id = self._pack_request('db.collection', False) | |
76 | request = OpQuery.unpack(msg_bytes, None, None, request_id) | |
77 | self.assertIsInstance(request, OpQuery) | |
78 | self.assertNotIsInstance(request, Command) | |
79 | self.assertEqual(0, request.flags) | |
80 | self.assertFalse(request.slave_ok) | |
81 | self.assertFalse(request.slave_okay) # Synonymous. | |
82 | ||
83 | msg_bytes, request_id = self._pack_request('db.$cmd', False) | |
84 | request = OpQuery.unpack(msg_bytes, None, None, request_id) | |
85 | self.assertIsInstance(request, Command) | |
86 | self.assertEqual(0, request.flags) | |
87 | ||
88 | msg_bytes, request_id = self._pack_request('db.collection', True) | |
89 | request = OpQuery.unpack(msg_bytes, None, None, request_id) | |
90 | self.assertEqual(4, request.flags) | |
91 | self.assertTrue(request.slave_ok) | |
92 | ||
93 | msg_bytes, request_id = self._pack_request('db.$cmd', True) | |
94 | request = OpQuery.unpack(msg_bytes, None, None, request_id) | |
95 | self.assertEqual(4, request.flags) | |
96 | ||
97 | def test_fields(self): | |
98 | self.assertIsNone(OpQuery({}).fields) | |
99 | self.assertEqual({'_id': False, 'a': 1}, | |
100 | OpQuery({}, fields={'_id': False, 'a': 1}).fields) | |
101 | ||
102 | def test_repr(self): | |
103 | self.assertEqual('Request()', repr(Request())) | |
104 | self.assertEqual('Request({})', repr(Request({}))) | |
105 | self.assertEqual('Request({})', repr(Request([{}]))) | |
106 | self.assertEqual('Request(flags=4)', repr(Request(flags=4))) | |
107 | ||
108 | self.assertEqual('OpQuery({})', repr(OpQuery())) | |
109 | self.assertEqual('OpQuery({})', repr(OpQuery({}))) | |
110 | self.assertEqual('OpQuery({})', repr(OpQuery([{}]))) | |
111 | self.assertEqual('OpQuery({}, flags=SlaveOkay)', | |
112 | repr(OpQuery(flags=4))) | |
113 | self.assertEqual('OpQuery({}, flags=SlaveOkay)', | |
114 | repr(OpQuery({}, flags=4))) | |
115 | self.assertEqual('OpQuery({}, flags=TailableCursor|AwaitData)', | |
116 | repr(OpQuery({}, flags=34))) | |
117 | ||
118 | self.assertEqual('Command({})', repr(Command())) | |
119 | self.assertEqual('Command({"foo": 1})', repr(Command('foo'))) | |
120 | son = SON([('b', 1), ('a', 1), ('c', 1)]) | |
121 | self.assertEqual('Command({"b": 1, "a": 1, "c": 1})', | |
122 | repr(Command(son))) | |
123 | self.assertEqual('Command({}, flags=SlaveOkay)', | |
124 | repr(Command(flags=4))) | |
125 | ||
126 | self.assertEqual('OpInsert({}, {})', repr(OpInsert([{}, {}]))) | |
127 | self.assertEqual('OpInsert({}, {})', repr(OpInsert({}, {}))) | |
128 | ||
129 | def test_assert_matches(self): | |
130 | request = OpQuery({'x': 17}, flags=QUERY_FLAGS['SlaveOkay']) | |
131 | request.assert_matches(request) | |
132 | ||
133 | with self.assertRaises(AssertionError): | |
134 | request.assert_matches(Command('foo')) | |
135 | ||
136 | ||
137 | class TestLegacyWrites(unittest.TestCase): | |
138 | def setUp(self): | |
139 | self.server = MockupDB(auto_ismaster=True) | |
140 | self.server.run() | |
141 | self.addCleanup(self.server.stop) | |
142 | self.client = MongoClient(self.server.uri) | |
143 | self.collection = self.client.db.collection | |
144 | ||
145 | def test_insert_one(self): | |
146 | with going(self.collection.insert_one, {'_id': 1}) as future: | |
147 | self.server.receives(OpInsert({'_id': 1}, flags=0)) | |
148 | self.server.receives(Command('getlasterror')).replies_to_gle() | |
149 | ||
150 | self.assertEqual(1, future().inserted_id) | |
151 | ||
152 | def test_insert_many(self): | |
153 | collection = self.collection.with_options( | |
154 | write_concern=WriteConcern(0)) | |
155 | ||
156 | flags = INSERT_FLAGS['ContinueOnError'] | |
157 | docs = [{'_id': 1}, {'_id': 2}] | |
158 | with going(collection.insert_many, docs, ordered=False) as future: | |
159 | self.server.receives(OpInsert(docs, flags=flags)) | |
160 | ||
161 | self.assertEqual([1, 2], future().inserted_ids) | |
162 | ||
163 | def test_replace_one(self): | |
164 | with going(self.collection.replace_one, {}, {}) as future: | |
165 | self.server.receives(OpUpdate({}, {}, flags=0)) | |
166 | request = self.server.receives(Command('getlasterror')) | |
167 | request.replies_to_gle(upserted=1) | |
168 | ||
169 | self.assertEqual(1, future().upserted_id) | |
170 | ||
171 | def test_update_many(self): | |
172 | flags = UPDATE_FLAGS['MultiUpdate'] | |
173 | with going(self.collection.update_many, {}, {'$unset': 'a'}) as future: | |
174 | update = self.server.receives(OpUpdate({}, {}, flags=flags)) | |
175 | self.assertEqual(2, update.flags) | |
176 | gle = self.server.receives(Command('getlasterror')) | |
177 | gle.replies_to_gle(upserted=1) | |
178 | ||
179 | self.assertEqual(1, future().upserted_id) | |
180 | ||
181 | def test_delete_one(self): | |
182 | flags = DELETE_FLAGS['SingleRemove'] | |
183 | with going(self.collection.delete_one, {}) as future: | |
184 | delete = self.server.receives(OpDelete({}, flags=flags)) | |
185 | self.assertEqual(1, delete.flags) | |
186 | gle = self.server.receives(Command('getlasterror')) | |
187 | gle.replies_to_gle(n=1) | |
188 | ||
189 | self.assertEqual(1, future().deleted_count) | |
190 | ||
191 | def test_delete_many(self): | |
192 | with going(self.collection.delete_many, {}) as future: | |
193 | delete = self.server.receives(OpDelete({}, flags=0)) | |
194 | self.assertEqual(0, delete.flags) | |
195 | gle = self.server.receives(Command('getlasterror')) | |
196 | gle.replies_to_gle(n=2) | |
197 | ||
198 | self.assertEqual(2, future().deleted_count) | |
199 | ||
200 | ||
201 | class TestMatcher(unittest.TestCase): | |
202 | def test_command_name_case_insensitive(self): | |
203 | self.assertTrue( | |
204 | Matcher(Command('ismaster')).matches(Command('IsMaster'))) | |
205 | ||
206 | def test_command_first_arg(self): | |
207 | self.assertFalse( | |
208 | Matcher(Command(ismaster=1)).matches(Command(ismaster=2))) | |
209 | ||
210 | def test_command_fields(self): | |
211 | self.assertTrue( | |
212 | Matcher(Command('a', b=1)).matches(Command('a', b=1))) | |
213 | ||
214 | self.assertFalse( | |
215 | Matcher(Command('a', b=1)).matches(Command('a', b=2))) | |
216 | ||
217 | ||
218 | class TestAutoresponds(unittest.TestCase): | |
219 | def test_auto_dequeue(self): | |
220 | server = MockupDB(auto_ismaster=True) | |
221 | server.run() | |
222 | client = MongoClient(server.uri) | |
223 | future = go(client.admin.command, 'ping') | |
224 | server.autoresponds('ping') # Should dequeue the request. | |
225 | future() | |
226 | ||
227 | def test_autoresponds_case_insensitive(self): | |
228 | server = MockupDB() | |
229 | # Little M. Note this is only case-insensitive because it's a Command. | |
230 | server.autoresponds(Command('ismaster'), foo='bar') | |
231 | server.run() | |
232 | response = MongoClient(server.uri).admin.command('isMaster') # Big M. | |
233 | self.assertEqual('bar', response['foo']) | |
234 | ||
235 | ||
236 | class TestSSL(unittest.TestCase): | |
237 | def test_ssl_uri(self): | |
238 | server = MockupDB(ssl=True) | |
239 | server.run() | |
240 | self.addCleanup(server.stop) | |
241 | self.assertEqual( | |
242 | 'mongodb://localhost:%d/?ssl=true' % server.port, | |
243 | server.uri) | |
244 | ||
245 | def test_ssl_basic(self): | |
246 | server = MockupDB(ssl=True, auto_ismaster=True) | |
247 | server.run() | |
248 | self.addCleanup(server.stop) | |
249 | client = MongoClient(server.uri, ssl_cert_reqs=ssl.CERT_NONE) | |
250 | client.db.command('ismaster') | |
251 | ||
252 | ||
253 | class TestMockupDB(unittest.TestCase): | |
254 | def test_iteration(self): | |
255 | server = MockupDB(auto_ismaster={'maxWireVersion': 3}) | |
256 | server.run() | |
257 | self.addCleanup(server.stop) | |
258 | client = MongoClient(server.uri) | |
259 | ||
260 | def send_three_docs(): | |
261 | for i in range(3): | |
262 | client.test.test.insert({'_id': i}) | |
263 | ||
264 | with going(send_three_docs): | |
265 | j = 0 | |
266 | ||
267 | # The "for request in server" statement is the point of this test. | |
268 | for request in server: | |
269 | self.assertTrue(request.matches({'insert': 'test', | |
270 | 'documents': [{'_id': j}]})) | |
271 | ||
272 | request.ok() | |
273 | j += 1 | |
274 | if j == 3: | |
275 | break | |
276 | ||
277 | ||
278 | class TestResponse(unittest.TestCase): | |
279 | def test_ok(self): | |
280 | server = MockupDB(auto_ismaster={'maxWireVersion': 3}) | |
281 | server.run() | |
282 | self.addCleanup(server.stop) | |
283 | client = MongoClient(server.uri) | |
284 | ||
285 | with going(client.test.command, {'foo': 1}) as future: | |
286 | server.receives().ok(3) | |
287 | ||
288 | response = future() | |
289 | self.assertEqual(3, response['ok']) | |
290 | ||
291 | ||
292 | if __name__ == '__main__': | |
293 | unittest.main() |
0 | [tox] | |
1 | envlist = | |
2 | {py26,py27,py33,py34}-test, | |
3 | {py26,py27,py33,py34}-doctest | |
4 | ||
5 | [testenv] | |
6 | setenv = | |
7 | PYTHONPATH = {toxinidir}:{toxinidir}/mockupdb | |
8 | changedir = | |
9 | doctest: docs | |
10 | commands = | |
11 | test: python setup.py test | |
12 | doctest: sphinx-build -q -E -n -b doctest . {envtmpdir}/doctest | |
13 | ||
14 | deps = | |
15 | doctest: sphinx | |
16 | doctest: pymongo>=3 | |
17 | py26-doctest: ordereddict |