Update upstream source from tag 'upstream/0.1.1'
Update to upstream version '0.1.1'
with Debian dir ccf5262ced19d550a6b33fdf0c99cb7b178b208c
Sandro Tosi
1 year, 5 months ago
0 | ref-names: tag: v0.1.1 |
0 | # Force LF line endings for text files | |
1 | * text=auto eol=lf | |
2 | ||
3 | # Needed for setuptools-scm-git-archive | |
4 | .git_archival.txt export-subst |
0 | --- | |
1 | name: Bug Report | |
2 | description: Create a report to help us improve. | |
3 | labels: [bug] | |
4 | body: | |
5 | - type: markdown | |
6 | attributes: | |
7 | value: | | |
8 | **Thanks for taking a minute to file a bug report!** | |
9 | ||
10 | ⚠ | |
11 | Verify first that your issue is not [already reported on | |
12 | GitHub][issue search]. | |
13 | ||
14 | _Please fill out the form below with as many precise | |
15 | details as possible._ | |
16 | ||
17 | [issue search]: ../search?q=is%3Aissue&type=issues | |
18 | ||
19 | - type: textarea | |
20 | attributes: | |
21 | label: Describe the bug | |
22 | description: >- | |
23 | A clear and concise description of what the bug is. | |
24 | validations: | |
25 | required: true | |
26 | ||
27 | - type: textarea | |
28 | attributes: | |
29 | label: To Reproduce | |
30 | description: >- | |
31 | Describe the steps to reproduce this bug. | |
32 | placeholder: | | |
33 | 1. Have certain environment | |
34 | 2. Then run '...' | |
35 | 3. An error occurs. | |
36 | validations: | |
37 | required: true | |
38 | ||
39 | - type: textarea | |
40 | attributes: | |
41 | label: Expected behavior | |
42 | description: >- | |
43 | A clear and concise description of what you expected to happen. | |
44 | validations: | |
45 | required: true | |
46 | ||
47 | - type: textarea | |
48 | attributes: | |
49 | label: Logs/tracebacks | |
50 | description: | | |
51 | If applicable, add logs/tracebacks to help explain your problem. | |
52 | Paste the output of the steps above, including the commands | |
53 | themselves and their output/traceback etc. | |
54 | render: python-traceback | |
55 | validations: | |
56 | required: true | |
57 | ||
58 | - type: textarea | |
59 | attributes: | |
60 | label: Python Version | |
61 | description: Attach your version of Python. | |
62 | render: console | |
63 | value: | | |
64 | $ python --version | |
65 | validations: | |
66 | required: true | |
67 | - type: textarea | |
68 | attributes: | |
69 | label: aiomysql Version | |
70 | description: Attach your version of aiomysql. | |
71 | render: console | |
72 | value: | | |
73 | $ python -m pip show aiomysql | |
74 | validations: | |
75 | required: true | |
76 | - type: textarea | |
77 | attributes: | |
78 | label: PyMySQL Version | |
79 | description: Attach your version of PyMySQL. | |
80 | render: console | |
81 | value: | | |
82 | $ python -m pip show PyMySQL | |
83 | validations: | |
84 | required: true | |
85 | - type: textarea | |
86 | attributes: | |
87 | label: SQLAlchemy Version | |
88 | description: Attach your version of SQLAlchemy if you're using it. | |
89 | render: console | |
90 | value: | | |
91 | $ python -m pip show sqlalchemy | |
92 | ||
93 | - type: textarea | |
94 | attributes: | |
95 | label: OS | |
96 | placeholder: >- | |
97 | For example, Arch Linux, Windows, macOS, etc. | |
98 | validations: | |
99 | required: true | |
100 | ||
101 | - type: textarea | |
102 | attributes: | |
103 | label: Database type and version | |
104 | description: Attach your version of MariaDB/MySQL. | |
105 | render: console | |
106 | value: | | |
107 | SELECT VERSION(); | |
108 | validations: | |
109 | required: true | |
110 | ||
111 | - type: textarea | |
112 | attributes: | |
113 | label: Additional context | |
114 | description: | | |
115 | Add any other context about the problem here. | |
116 | ||
117 | Describe the environment you have that lead to your issue. | |
118 | ||
119 | - type: checkboxes | |
120 | attributes: | |
121 | label: Code of Conduct | |
122 | description: | | |
123 | Read the [aio-libs Code of Conduct][CoC] first. | |
124 | ||
125 | [CoC]: https://github.com/aio-libs/.github/blob/master/CODE_OF_CONDUCT.md | |
126 | options: | |
127 | - label: I agree to follow the aio-libs Code of Conduct | |
128 | required: true | |
129 | ... |
0 | --- | |
1 | name: 🚀 Feature request | |
2 | description: Suggest an idea for this project. | |
3 | labels: enhancement | |
4 | body: | |
5 | - type: markdown | |
6 | attributes: | |
7 | value: | | |
8 | **Thanks for taking a minute to file a feature for aiomysql!** | |
9 | ||
10 | ⚠ | |
11 | Verify first that your feature request is not [already reported on | |
12 | GitHub][issue search]. | |
13 | ||
14 | _Please fill out the form below with as many precise | |
15 | details as possible._ | |
16 | ||
17 | [issue search]: ../search?q=is%3Aissue&type=issues | |
18 | ||
19 | - type: textarea | |
20 | attributes: | |
21 | label: Is your feature request related to a problem? | |
22 | description: >- | |
23 | Please add a clear and concise description of what | |
24 | the problem is. _Ex. I'm always frustrated when [...]_ | |
25 | ||
26 | - type: textarea | |
27 | attributes: | |
28 | label: Describe the solution you'd like | |
29 | description: >- | |
30 | A clear and concise description of what you want to happen. | |
31 | validations: | |
32 | required: true | |
33 | ||
34 | - type: textarea | |
35 | attributes: | |
36 | label: Describe alternatives you've considered | |
37 | description: >- | |
38 | A clear and concise description of any alternative solutions | |
39 | or features you've considered. | |
40 | validations: | |
41 | required: true | |
42 | ||
43 | - type: textarea | |
44 | attributes: | |
45 | label: Additional context | |
46 | description: >- | |
47 | Add any other context or screenshots about | |
48 | the feature request here. | |
49 | ||
50 | - type: checkboxes | |
51 | attributes: | |
52 | label: Code of Conduct | |
53 | description: | | |
54 | Read the [aio-libs Code of Conduct][CoC] first. | |
55 | ||
56 | [CoC]: https://github.com/aio-libs/.github/blob/master/CODE_OF_CONDUCT.md | |
57 | options: | |
58 | - label: I agree to follow the aio-libs Code of Conduct | |
59 | required: true | |
60 | ... |
0 | version: 2 | |
1 | updates: | |
2 | - package-ecosystem: pip | |
3 | directory: "/" | |
4 | schedule: | |
5 | interval: daily | |
6 | open-pull-requests-limit: 10 | |
7 | target-branch: master | |
8 | - package-ecosystem: github-actions | |
9 | directory: / | |
10 | schedule: | |
11 | interval: daily | |
12 | open-pull-requests-limit: 10 |
0 | name: CI/CD | |
1 | ||
2 | on: | |
3 | push: | |
4 | branches-ignore: | |
5 | - dependabot/** | |
6 | pull_request: | |
7 | workflow_dispatch: | |
8 | inputs: | |
9 | release-version: | |
10 | # github.event_name == 'workflow_dispatch' | |
11 | # && github.event.inputs.release-version | |
12 | description: >- | |
13 | Target PEP440-compliant version to release. | |
14 | Please, don't prepend `v`. | |
15 | required: true | |
16 | release-commitish: | |
17 | # github.event_name == 'workflow_dispatch' | |
18 | # && github.event.inputs.release-commitish | |
19 | default: '' | |
20 | description: >- | |
21 | The commit to be released to PyPI and tagged | |
22 | in Git as `release-version`. Normally, you | |
23 | should keep this empty. | |
24 | required: false | |
25 | YOLO: | |
26 | default: false | |
27 | description: >- | |
28 | Flag whether test results should block the | |
29 | release (true/false). Only use this under | |
30 | extraordinary circumstances to ignore the | |
31 | test failures and cut the release regardless. | |
32 | required: false | |
33 | schedule: | |
34 | - cron: 1 0 * * * # Run daily at 0:01 UTC | |
35 | ||
36 | concurrency: | |
37 | group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.sha }} | |
38 | cancel-in-progress: true | |
39 | ||
40 | jobs: | |
41 | pre-setup: | |
42 | name: ⚙️ Pre-set global build settings | |
43 | runs-on: ubuntu-latest | |
44 | defaults: | |
45 | run: | |
46 | shell: python | |
47 | outputs: | |
48 | dist-version: >- | |
49 | ${{ | |
50 | steps.request-check.outputs.release-requested == 'true' | |
51 | && github.event.inputs.release-version | |
52 | || steps.scm-version.outputs.dist-version | |
53 | }} | |
54 | is-untagged-devel: >- | |
55 | ${{ steps.untagged-check.outputs.is-untagged-devel || false }} | |
56 | release-requested: >- | |
57 | ${{ | |
58 | steps.request-check.outputs.release-requested || false | |
59 | }} | |
60 | cache-key-files: >- | |
61 | ${{ steps.calc-cache-key-files.outputs.files-hash-key }} | |
62 | git-tag: ${{ steps.git-tag.outputs.tag }} | |
63 | sdist-artifact-name: ${{ steps.artifact-name.outputs.sdist }} | |
64 | wheel-artifact-name: ${{ steps.artifact-name.outputs.wheel }} | |
65 | steps: | |
66 | - name: Switch to using Python 3.10 by default | |
67 | uses: actions/setup-python@v3 | |
68 | with: | |
69 | python-version: >- | |
70 | 3.10 | |
71 | - name: >- | |
72 | Mark the build as untagged '${{ | |
73 | github.event.repository.default_branch | |
74 | }}' branch build | |
75 | id: untagged-check | |
76 | if: >- | |
77 | github.event_name == 'push' && | |
78 | github.ref == format( | |
79 | 'refs/heads/{0}', github.event.repository.default_branch | |
80 | ) | |
81 | run: >- | |
82 | print('::set-output name=is-untagged-devel::true') | |
83 | - name: Mark the build as "release request" | |
84 | id: request-check | |
85 | if: github.event_name == 'workflow_dispatch' | |
86 | run: >- | |
87 | print('::set-output name=release-requested::true') | |
88 | - name: Check out src from Git | |
89 | if: >- | |
90 | steps.request-check.outputs.release-requested != 'true' | |
91 | uses: actions/checkout@v3 | |
92 | with: | |
93 | fetch-depth: 0 | |
94 | ref: ${{ github.event.inputs.release-commitish }} | |
95 | - name: >- | |
96 | Calculate Python interpreter version hash value | |
97 | for use in the cache key | |
98 | if: >- | |
99 | steps.request-check.outputs.release-requested != 'true' | |
100 | id: calc-cache-key-py | |
101 | run: | | |
102 | from hashlib import sha512 | |
103 | from sys import version | |
104 | hash = sha512(version.encode()).hexdigest() | |
105 | print(f'::set-output name=py-hash-key::{hash}') | |
106 | - name: >- | |
107 | Calculate dependency files' combined hash value | |
108 | for use in the cache key | |
109 | if: >- | |
110 | steps.request-check.outputs.release-requested != 'true' | |
111 | id: calc-cache-key-files | |
112 | run: | | |
113 | print( | |
114 | "::set-output name=files-hash-key::${{ | |
115 | hashFiles( | |
116 | 'requirements-dev.txt', | |
117 | 'setup.cfg', | |
118 | 'pyproject.toml' | |
119 | ) | |
120 | }}", | |
121 | ) | |
122 | - name: Get pip cache dir | |
123 | id: pip-cache-dir | |
124 | if: >- | |
125 | steps.request-check.outputs.release-requested != 'true' | |
126 | run: >- | |
127 | echo "::set-output name=dir::$(python -m pip cache dir)" | |
128 | shell: bash | |
129 | - name: Set up pip cache | |
130 | if: >- | |
131 | steps.request-check.outputs.release-requested != 'true' | |
132 | uses: actions/cache@v3.0.2 | |
133 | with: | |
134 | path: ${{ steps.pip-cache-dir.outputs.dir }} | |
135 | key: >- | |
136 | ${{ runner.os }}-pip-${{ | |
137 | steps.calc-cache-key-py.outputs.py-hash-key }}-${{ | |
138 | steps.calc-cache-key-files.outputs.files-hash-key }} | |
139 | restore-keys: | | |
140 | ${{ runner.os }}-pip-${{ | |
141 | steps.calc-cache-key-py.outputs.py-hash-key | |
142 | }}- | |
143 | ${{ runner.os }}-pip- | |
144 | ${{ runner.os }}- | |
145 | - name: Drop Git tags from HEAD for non-release requests | |
146 | if: >- | |
147 | steps.request-check.outputs.release-requested != 'true' | |
148 | run: >- | |
149 | git tag --points-at HEAD | |
150 | | | |
151 | xargs git tag --delete | |
152 | shell: bash | |
153 | - name: Set up versioning prerequisites | |
154 | if: >- | |
155 | steps.request-check.outputs.release-requested != 'true' | |
156 | run: >- | |
157 | python -m | |
158 | pip install | |
159 | --user | |
160 | --upgrade | |
161 | setuptools-scm | |
162 | shell: bash | |
163 | - name: Set the current dist version from Git | |
164 | if: steps.request-check.outputs.release-requested != 'true' | |
165 | id: scm-version | |
166 | run: | | |
167 | import setuptools_scm | |
168 | ver = setuptools_scm.get_version( | |
169 | ${{ | |
170 | steps.untagged-check.outputs.is-untagged-devel == 'true' | |
171 | && 'local_scheme="no-local-version"' || '' | |
172 | }} | |
173 | ) | |
174 | print('::set-output name=dist-version::{ver}'.format(ver=ver)) | |
175 | - name: Set the target Git tag | |
176 | id: git-tag | |
177 | run: >- | |
178 | print('::set-output name=tag::v${{ | |
179 | steps.request-check.outputs.release-requested == 'true' | |
180 | && github.event.inputs.release-version | |
181 | || steps.scm-version.outputs.dist-version | |
182 | }}') | |
183 | - name: Set the expected dist artifact names | |
184 | id: artifact-name | |
185 | run: | | |
186 | print('::set-output name=sdist::aiomysql-${{ | |
187 | steps.request-check.outputs.release-requested == 'true' | |
188 | && github.event.inputs.release-version | |
189 | || steps.scm-version.outputs.dist-version | |
190 | }}.tar.gz') | |
191 | print('::set-output name=wheel::aiomysql-${{ | |
192 | steps.request-check.outputs.release-requested == 'true' | |
193 | && github.event.inputs.release-version | |
194 | || steps.scm-version.outputs.dist-version | |
195 | }}-py3-none-any.whl') | |
196 | ||
197 | build: | |
198 | name: >- | |
199 | 👷 dists ${{ needs.pre-setup.outputs.git-tag }} | |
200 | [mode: ${{ | |
201 | fromJSON(needs.pre-setup.outputs.is-untagged-devel) | |
202 | && 'nightly' || '' | |
203 | }}${{ | |
204 | fromJSON(needs.pre-setup.outputs.release-requested) | |
205 | && 'release' || '' | |
206 | }}${{ | |
207 | ( | |
208 | !fromJSON(needs.pre-setup.outputs.is-untagged-devel) | |
209 | && !fromJSON(needs.pre-setup.outputs.release-requested) | |
210 | ) && 'test' || '' | |
211 | }}] | |
212 | needs: | |
213 | - pre-setup # transitive, for accessing settings | |
214 | ||
215 | runs-on: ubuntu-latest | |
216 | ||
217 | env: | |
218 | PY_COLORS: 1 | |
219 | ||
220 | steps: | |
221 | - name: Switch to using Python v3.10 | |
222 | uses: actions/setup-python@v3 | |
223 | with: | |
224 | python-version: >- | |
225 | 3.10 | |
226 | - name: >- | |
227 | Calculate Python interpreter version hash value | |
228 | for use in the cache key | |
229 | id: calc-cache-key-py | |
230 | run: | | |
231 | from hashlib import sha512 | |
232 | from sys import version | |
233 | hash = sha512(version.encode()).hexdigest() | |
234 | print(f'::set-output name=py-hash-key::{hash}') | |
235 | shell: python | |
236 | - name: Get pip cache dir | |
237 | id: pip-cache-dir | |
238 | run: >- | |
239 | echo "::set-output name=dir::$(python -m pip cache dir)" | |
240 | - name: Set up pip cache | |
241 | uses: actions/cache@v3.0.2 | |
242 | with: | |
243 | path: ${{ steps.pip-cache-dir.outputs.dir }} | |
244 | key: >- | |
245 | ${{ runner.os }}-pip-${{ | |
246 | steps.calc-cache-key-py.outputs.py-hash-key }}-${{ | |
247 | needs.pre-setup.outputs.cache-key-files }} | |
248 | restore-keys: | | |
249 | ${{ runner.os }}-pip-${{ | |
250 | steps.calc-cache-key-py.outputs.py-hash-key | |
251 | }}- | |
252 | ${{ runner.os }}-pip- | |
253 | - name: Install build tools | |
254 | run: >- | |
255 | python -m | |
256 | pip install | |
257 | --user | |
258 | --upgrade | |
259 | build | |
260 | ||
261 | - name: Grab the source from Git | |
262 | uses: actions/checkout@v3 | |
263 | with: | |
264 | fetch-depth: >- | |
265 | ${{ | |
266 | steps.request-check.outputs.release-requested == 'true' | |
267 | && 1 || 0 | |
268 | }} | |
269 | ref: ${{ github.event.inputs.release-commitish }} | |
270 | ||
271 | - name: Setup git user as [bot] | |
272 | if: >- | |
273 | fromJSON(needs.pre-setup.outputs.is-untagged-devel) | |
274 | || fromJSON(needs.pre-setup.outputs.release-requested) | |
275 | uses: fregante/setup-git-user@6cef8bf084d00360a293e0cc3c56e1b45d6502b8 | |
276 | - name: >- | |
277 | Tag the release in the local Git repo | |
278 | as ${{ needs.pre-setup.outputs.git-tag }} | |
279 | for setuptools-scm to set the desired version | |
280 | if: >- | |
281 | fromJSON(needs.pre-setup.outputs.is-untagged-devel) | |
282 | || fromJSON(needs.pre-setup.outputs.release-requested) | |
283 | run: >- | |
284 | git tag | |
285 | -m '${{ needs.pre-setup.outputs.git-tag }}' | |
286 | '${{ needs.pre-setup.outputs.git-tag }}' | |
287 | -- | |
288 | ${{ github.event.inputs.release-commitish }} | |
289 | - name: Build dists | |
290 | run: >- | |
291 | python | |
292 | -m | |
293 | build | |
294 | - name: Verify that the artifacts with expected names got created | |
295 | run: >- | |
296 | ls -1 | |
297 | 'dist/${{ needs.pre-setup.outputs.sdist-artifact-name }}' | |
298 | 'dist/${{ needs.pre-setup.outputs.wheel-artifact-name }}' | |
299 | - name: Store the distribution packages | |
300 | uses: actions/upload-artifact@v3 | |
301 | with: | |
302 | name: python-package-distributions | |
303 | # NOTE: Exact expected file names are specified here | |
304 | # NOTE: as a safety measure — if anything weird ends | |
305 | # NOTE: up being in this dir or not all dists will be | |
306 | # NOTE: produced, this will fail the workflow. | |
307 | path: | | |
308 | dist/${{ needs.pre-setup.outputs.sdist-artifact-name }} | |
309 | dist/${{ needs.pre-setup.outputs.wheel-artifact-name }} | |
310 | retention-days: 30 | |
311 | ||
312 | lint: | |
313 | name: 🧹 Lint | |
314 | ||
315 | needs: | |
316 | - build | |
317 | - pre-setup # transitive, for accessing settings | |
318 | ||
319 | runs-on: ubuntu-latest | |
320 | ||
321 | env: | |
322 | PY_COLORS: 1 | |
323 | ||
324 | steps: | |
325 | - name: Switch to using Python 3.10 by default | |
326 | uses: actions/setup-python@v3 | |
327 | with: | |
328 | python-version: >- | |
329 | 3.10 | |
330 | - name: >- | |
331 | Calculate Python interpreter version hash value | |
332 | for use in the cache key | |
333 | id: calc-cache-key-py | |
334 | run: | | |
335 | from hashlib import sha512 | |
336 | from sys import version | |
337 | hash = sha512(version.encode()).hexdigest() | |
338 | print(f'::set-output name=py-hash-key::{hash}') | |
339 | shell: python | |
340 | - name: Get pip cache dir | |
341 | id: pip-cache-dir | |
342 | run: >- | |
343 | echo "::set-output name=dir::$(python -m pip cache dir)" | |
344 | - name: Set up pip cache | |
345 | uses: actions/cache@v3.0.2 | |
346 | with: | |
347 | path: ${{ steps.pip-cache-dir.outputs.dir }} | |
348 | key: >- | |
349 | ${{ runner.os }}-pip-${{ | |
350 | steps.calc-cache-key-py.outputs.py-hash-key }}-${{ | |
351 | needs.pre-setup.outputs.cache-key-files }} | |
352 | restore-keys: | | |
353 | ${{ runner.os }}-pip-${{ | |
354 | steps.calc-cache-key-py.outputs.py-hash-key | |
355 | }}- | |
356 | ${{ runner.os }}-pip- | |
357 | ||
358 | - name: Grab the source from Git | |
359 | uses: actions/checkout@v3 | |
360 | with: | |
361 | ref: ${{ github.event.inputs.release-commitish }} | |
362 | ||
363 | - name: Download all the dists | |
364 | uses: actions/download-artifact@v3 | |
365 | with: | |
366 | name: python-package-distributions | |
367 | path: dist/ | |
368 | ||
369 | - name: Install build tools | |
370 | run: >- | |
371 | python -m | |
372 | pip install | |
373 | --user | |
374 | --requirement requirements-dev.txt | |
375 | ||
376 | - name: flake8 Lint | |
377 | uses: py-actions/flake8@v2.0.0 | |
378 | with: | |
379 | flake8-version: 4.0.1 | |
380 | path: aiomysql | |
381 | args: tests examples | |
382 | ||
383 | - name: Check package description | |
384 | run: | | |
385 | python -m twine check --strict dist/* | |
386 | ||
387 | tests: | |
388 | name: >- | |
389 | 🧪 🐍${{ | |
390 | matrix.py | |
391 | }} @ ${{ | |
392 | matrix.os | |
393 | }} on ${{ | |
394 | join(matrix.db, '-') | |
395 | }} | |
396 | needs: | |
397 | - build | |
398 | - pre-setup # transitive, for accessing settings | |
399 | strategy: | |
400 | matrix: | |
401 | # service containers are only supported on ubuntu currently | |
402 | os: | |
403 | - ubuntu-latest | |
404 | py: | |
405 | - '3.7' | |
406 | - '3.8' | |
407 | - '3.9' | |
408 | - '3.10' | |
409 | - '3.11-dev' | |
410 | db: | |
411 | - [mysql, '5.7'] | |
412 | - [mysql, '8.0'] | |
413 | - [mariadb, '10.2'] | |
414 | - [mariadb, '10.3'] | |
415 | - [mariadb, '10.4'] | |
416 | - [mariadb, '10.5'] | |
417 | - [mariadb, '10.6'] | |
418 | - [mariadb, '10.7'] | |
419 | ||
420 | fail-fast: false | |
421 | runs-on: ${{ matrix.os }} | |
422 | timeout-minutes: 15 | |
423 | ||
424 | continue-on-error: >- | |
425 | ${{ | |
426 | ( | |
427 | ( | |
428 | needs.pre-setup.outputs.release-requested == 'true' && | |
429 | !toJSON(github.event.inputs.YOLO) | |
430 | ) || | |
431 | contains(matrix.py, '-dev') | |
432 | ) && true || false | |
433 | }} | |
434 | ||
435 | env: | |
436 | MYSQL_ROOT_PASSWORD: rootpw | |
437 | PY_COLORS: 1 | |
438 | ||
439 | services: | |
440 | mysql: | |
441 | image: "${{ join(matrix.db, ':') }}" | |
442 | ports: | |
443 | - 3306:3306 | |
444 | volumes: | |
445 | - "/tmp/run-${{ join(matrix.db, '-') }}/:/socket-mount/" | |
446 | options: '--name=mysqld' | |
447 | env: | |
448 | MYSQL_ROOT_PASSWORD: rootpw | |
449 | ||
450 | steps: | |
451 | - name: Setup Python ${{ matrix.py }} | |
452 | uses: actions/setup-python@v3 | |
453 | with: | |
454 | python-version: ${{ matrix.py }} | |
455 | ||
456 | - name: Figure out if the interpreter ABI is stable | |
457 | id: py-abi | |
458 | run: | | |
459 | from sys import version_info | |
460 | is_stable_abi = version_info.releaselevel == 'final' | |
461 | print( | |
462 | '::set-output name=is-stable-abi::{is_stable_abi}'. | |
463 | format(is_stable_abi=str(is_stable_abi).lower()) | |
464 | ) | |
465 | shell: python | |
466 | ||
467 | - name: >- | |
468 | Calculate Python interpreter version hash value | |
469 | for use in the cache key | |
470 | if: fromJSON(steps.py-abi.outputs.is-stable-abi) | |
471 | id: calc-cache-key-py | |
472 | run: | | |
473 | from hashlib import sha512 | |
474 | from sys import version | |
475 | hash = sha512(version.encode()).hexdigest() | |
476 | print('::set-output name=py-hash-key::{hash}'.format(hash=hash)) | |
477 | shell: python | |
478 | ||
479 | - name: Get pip cache dir | |
480 | if: fromJSON(steps.py-abi.outputs.is-stable-abi) | |
481 | id: pip-cache-dir | |
482 | run: >- | |
483 | echo "::set-output name=dir::$(python -m pip cache dir)" | |
484 | ||
485 | - name: Set up pip cache | |
486 | if: fromJSON(steps.py-abi.outputs.is-stable-abi) | |
487 | uses: actions/cache@v3.0.2 | |
488 | with: | |
489 | path: ${{ steps.pip-cache-dir.outputs.dir }} | |
490 | key: >- | |
491 | ${{ runner.os }}-pip-${{ | |
492 | steps.calc-cache-key-py.outputs.py-hash-key }}-${{ | |
493 | needs.pre-setup.outputs.cache-key-files }} | |
494 | restore-keys: | | |
495 | ${{ runner.os }}-pip-${{ | |
496 | steps.calc-cache-key-py.outputs.py-hash-key | |
497 | }}- | |
498 | ${{ runner.os }}-pip- | |
499 | ||
500 | - name: Update pip | |
501 | run: >- | |
502 | python -m | |
503 | pip install | |
504 | --user | |
505 | pip | |
506 | ||
507 | - name: Grab the source from Git | |
508 | uses: actions/checkout@v3 | |
509 | with: | |
510 | ref: ${{ github.event.inputs.release-commitish }} | |
511 | ||
512 | - name: Remove aiomysql source to avoid accidentally using it | |
513 | run: >- | |
514 | rm -rf aiomysql | |
515 | ||
516 | - name: Download all the dists | |
517 | uses: actions/download-artifact@v3 | |
518 | with: | |
519 | name: python-package-distributions | |
520 | path: dist/ | |
521 | ||
522 | - name: Install dependencies | |
523 | run: >- | |
524 | python -m | |
525 | pip install | |
526 | --user | |
527 | --requirement requirements-dev.txt | |
528 | ||
529 | - name: Install previously built wheel | |
530 | run: >- | |
531 | python -m | |
532 | pip install | |
533 | --user | |
534 | 'dist/${{ needs.pre-setup.outputs.wheel-artifact-name }}' | |
535 | ||
536 | - name: >- | |
537 | Log platform.platform() | |
538 | run: >- | |
539 | python -m platform | |
540 | - name: >- | |
541 | Log platform.version() | |
542 | run: >- | |
543 | python -c "import platform; | |
544 | print(platform.version())" | |
545 | - name: >- | |
546 | Log platform.uname() | |
547 | run: >- | |
548 | python -c "import platform; | |
549 | print(platform.uname())" | |
550 | - name: >- | |
551 | Log platform.release() | |
552 | run: >- | |
553 | python -c "import platform; | |
554 | print(platform.release())" | |
555 | - name: Log stdlib OpenSSL version | |
556 | run: >- | |
557 | python -c | |
558 | "import ssl; print('\nOPENSSL_VERSION: ' | |
559 | + ssl.OPENSSL_VERSION + '\nOPENSSL_VERSION_INFO: ' | |
560 | + repr(ssl.OPENSSL_VERSION_INFO) | |
561 | + '\nOPENSSL_VERSION_NUMBER: ' | |
562 | + repr(ssl.OPENSSL_VERSION_NUMBER))" | |
563 | ||
564 | # this ensures our database is ready. typically by the time the preparations have completed its first start logic. | |
565 | # unfortunately we need this hacky workaround as GitHub Actions service containers can't reference data from our repo. | |
566 | - name: Prepare mysql | |
567 | run: | | |
568 | # ensure server is started up | |
569 | while : | |
570 | do | |
571 | sleep 1 | |
572 | mysql -h127.0.0.1 -uroot "-p$MYSQL_ROOT_PASSWORD" -e 'select version()' && break | |
573 | done | |
574 | ||
575 | # inject tls configuration | |
576 | docker container stop mysqld | |
577 | docker container cp "${{ github.workspace }}/tests/ssl_resources/ssl" mysqld:/etc/mysql/ssl | |
578 | docker container cp "${{ github.workspace }}/tests/ssl_resources/tls.cnf" mysqld:/etc/mysql/conf.d/aiomysql-tls.cnf | |
579 | ||
580 | # use custom socket path | |
581 | # we need to ensure that the socket path is writable for the user running the DB process in the container | |
582 | sudo chmod 0777 /tmp/run-${{ join(matrix.db, '-') }} | |
583 | ||
584 | # mysql 5.7 container overrides the socket path in /etc/mysql/mysql.conf.d/mysqld.cnf | |
585 | if [ "${{ join(matrix.db, '-') }}" = "mysql-5.7" ] | |
586 | then | |
587 | docker container cp "${{ github.workspace }}/tests/ssl_resources/socket.cnf" mysqld:/etc/mysql/mysql.conf.d/zz-aiomysql-socket.cnf | |
588 | else | |
589 | docker container cp "${{ github.workspace }}/tests/ssl_resources/socket.cnf" mysqld:/etc/mysql/conf.d/aiomysql-socket.cnf | |
590 | fi | |
591 | ||
592 | docker container start mysqld | |
593 | ||
594 | # ensure server is started up | |
595 | while : | |
596 | do | |
597 | sleep 1 | |
598 | mysql -h127.0.0.1 -uroot "-p$MYSQL_ROOT_PASSWORD" -e 'select version()' && break | |
599 | done | |
600 | ||
601 | mysql -h127.0.0.1 -uroot "-p$MYSQL_ROOT_PASSWORD" -e "SET GLOBAL local_infile=on" | |
602 | ||
603 | - name: Run tests | |
604 | run: | | |
605 | # timeout ensures a more or less clean stop by sending a KeyboardInterrupt which will still provide useful logs | |
606 | timeout --preserve-status --signal=INT --verbose 5m \ | |
607 | pytest --capture=no --verbosity 2 --cov-report term --cov-report xml --cov aiomysql --cov tests ./tests --mysql-unix-socket "unix-${{ join(matrix.db, '') }}=/tmp/run-${{ join(matrix.db, '-') }}/mysql.sock" --mysql-address "tcp-${{ join(matrix.db, '') }}=127.0.0.1:3306" | |
608 | env: | |
609 | PYTHONUNBUFFERED: 1 | |
610 | timeout-minutes: 6 | |
611 | ||
612 | - name: Upload coverage | |
613 | if: ${{ github.event_name != 'schedule' }} | |
614 | uses: codecov/codecov-action@v3.1.0 | |
615 | with: | |
616 | file: ./coverage.xml | |
617 | flags: "${{ matrix.os }}_${{ matrix.py }}_${{ join(matrix.db, '-') }}" | |
618 | fail_ci_if_error: true | |
619 | ||
620 | check: # This job does nothing and is only used for the branch protection | |
621 | if: always() | |
622 | ||
623 | needs: | |
624 | - lint | |
625 | - tests | |
626 | ||
627 | runs-on: ubuntu-latest | |
628 | ||
629 | steps: | |
630 | - name: Decide whether the needed jobs succeeded or failed | |
631 | uses: re-actors/alls-green@v1.1.0 | |
632 | with: | |
633 | jobs: ${{ toJSON(needs) }} | |
634 | ||
635 | publish-pypi: | |
636 | name: Publish 🐍📦 ${{ needs.pre-setup.outputs.git-tag }} to PyPI | |
637 | needs: | |
638 | - check | |
639 | - pre-setup # transitive, for accessing settings | |
640 | if: >- | |
641 | fromJSON(needs.pre-setup.outputs.release-requested) | |
642 | runs-on: ubuntu-latest | |
643 | ||
644 | environment: | |
645 | name: pypi | |
646 | url: >- | |
647 | https://pypi.org/project/aiomysql/${{ | |
648 | needs.pre-setup.outputs.dist-version | |
649 | }} | |
650 | ||
651 | steps: | |
652 | - name: Download all the dists | |
653 | uses: actions/download-artifact@v3 | |
654 | with: | |
655 | name: python-package-distributions | |
656 | path: dist/ | |
657 | - name: >- | |
658 | Publish 🐍📦 ${{ needs.pre-setup.outputs.git-tag }} to PyPI | |
659 | uses: pypa/gh-action-pypi-publish@v1.5.0 | |
660 | with: | |
661 | password: ${{ secrets.PYPI_API_TOKEN }} | |
662 | print_hash: true | |
663 | ||
664 | publish-testpypi: | |
665 | name: Publish 🐍📦 ${{ needs.pre-setup.outputs.git-tag }} to TestPyPI | |
666 | needs: | |
667 | - check | |
668 | - pre-setup # transitive, for accessing settings | |
669 | if: >- | |
670 | fromJSON(needs.pre-setup.outputs.is-untagged-devel) | |
671 | || fromJSON(needs.pre-setup.outputs.release-requested) | |
672 | runs-on: ubuntu-latest | |
673 | ||
674 | environment: | |
675 | name: testpypi | |
676 | url: >- | |
677 | https://test.pypi.org/project/aiomysql/${{ | |
678 | needs.pre-setup.outputs.dist-version | |
679 | }} | |
680 | ||
681 | steps: | |
682 | - name: Download all the dists | |
683 | uses: actions/download-artifact@v3 | |
684 | with: | |
685 | name: python-package-distributions | |
686 | path: dist/ | |
687 | - name: >- | |
688 | Publish 🐍📦 ${{ needs.pre-setup.outputs.git-tag }} to TestPyPI | |
689 | uses: pypa/gh-action-pypi-publish@v1.5.0 | |
690 | with: | |
691 | password: ${{ secrets.PYPI_API_TOKEN }} | |
692 | repository_url: https://test.pypi.org/legacy/ | |
693 | print_hash: true | |
694 | ||
695 | post-release-repo-update: | |
696 | name: >- | |
697 | Publish post-release Git tag | |
698 | for ${{ needs.pre-setup.outputs.git-tag }} | |
699 | needs: | |
700 | - publish-pypi | |
701 | - pre-setup # transitive, for accessing settings | |
702 | runs-on: ubuntu-latest | |
703 | ||
704 | steps: | |
705 | - name: Fetch the src snapshot | |
706 | uses: actions/checkout@v3 | |
707 | with: | |
708 | fetch-depth: 1 | |
709 | ref: ${{ github.event.inputs.release-commitish }} | |
710 | - name: Setup git user as [bot] | |
711 | uses: fregante/setup-git-user@6cef8bf084d00360a293e0cc3c56e1b45d6502b8 | |
712 | ||
713 | - name: >- | |
714 | Tag the release in the local Git repo | |
715 | as v${{ needs.pre-setup.outputs.git-tag }} | |
716 | run: >- | |
717 | git tag | |
718 | -m '${{ needs.pre-setup.outputs.git-tag }}' | |
719 | '${{ needs.pre-setup.outputs.git-tag }}' | |
720 | -- | |
721 | ${{ github.event.inputs.release-commitish }} | |
722 | - name: >- | |
723 | Push ${{ needs.pre-setup.outputs.git-tag }} tag corresponding | |
724 | to the just published release back to GitHub | |
725 | run: >- | |
726 | git push --atomic origin '${{ needs.pre-setup.outputs.git-tag }}' | |
727 | ||
728 | publish-github-release: | |
729 | name: >- | |
730 | Publish a tag and GitHub release for | |
731 | ${{ needs.pre-setup.outputs.git-tag }} | |
732 | needs: | |
733 | - post-release-repo-update | |
734 | - pre-setup # transitive, for accessing settings | |
735 | runs-on: ubuntu-latest | |
736 | ||
737 | permissions: | |
738 | contents: write | |
739 | discussions: write | |
740 | ||
741 | steps: | |
742 | - name: Fetch the src snapshot | |
743 | uses: actions/checkout@v3 | |
744 | with: | |
745 | fetch-depth: 1 | |
746 | ref: ${{ github.event.inputs.release-commitish }} | |
747 | ||
748 | - name: Download all the dists | |
749 | uses: actions/download-artifact@v3 | |
750 | with: | |
751 | name: python-package-distributions | |
752 | path: dist/ | |
753 | ||
754 | - name: >- | |
755 | Publish a GitHub Release for | |
756 | ${{ needs.pre-setup.outputs.git-tag }} | |
757 | uses: ncipollo/release-action@58ae73b360456532aafd58ee170c045abbeaee37 | |
758 | with: | |
759 | artifacts: | | |
760 | dist/${{ needs.pre-setup.outputs.sdist-artifact-name }} | |
761 | dist/${{ needs.pre-setup.outputs.wheel-artifact-name }} | |
762 | artifactContentType: raw # Because whl and tgz are of different types | |
763 | # FIXME: Use Towncrier once it is integrated. | |
764 | bodyFile: CHANGES.txt | |
765 | discussionCategory: Announcements | |
766 | name: ${{ needs.pre-setup.outputs.git-tag }} | |
767 | tag: ${{ needs.pre-setup.outputs.git-tag }} |
60 | 60 | # rope |
61 | 61 | .ropeproject |
62 | 62 | |
63 | # pyenv | |
64 | .python-version | |
63 | 65 | |
64 | 66 | tests/fixtures/my.cnf |
65 | 67 | |
66 | .pytest_cache⏎ | |
68 | .pytest_cache |
0 | # .readthedocs.yaml | |
1 | # Read the Docs configuration file | |
2 | # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details | |
3 | ||
4 | # Required | |
5 | version: 2 | |
6 | ||
7 | build: | |
8 | os: ubuntu-20.04 | |
9 | tools: | |
10 | python: "3.10" | |
11 | ||
12 | sphinx: | |
13 | configuration: docs/conf.py | |
14 | fail_on_warning: false # FIXME | |
15 | ||
16 | formats: | |
17 | ||
18 | - epub | |
19 | ||
20 | python: | |
21 | install: | |
22 | - requirements: requirements-dev.txt | |
23 | - method: pip | |
24 | path: . |
0 | language: python | |
1 | ||
2 | python: | |
3 | - 3.5.3 | |
4 | - 3.6 | |
5 | ||
6 | env: | |
7 | matrix: | |
8 | - PYTHONASYNCIODEBUG=1 | |
9 | - PYTHONASYNCIODEBUG= | |
10 | ||
11 | services: | |
12 | - docker | |
13 | ||
14 | matrix: | |
15 | include: | |
16 | - python: 3.6 | |
17 | env: PYTHONASYNCIODEBUG= | |
18 | addons: | |
19 | mariadb: 5.5 | |
20 | - python: 3.6 | |
21 | env: PYTHONASYNCIODEBUG=1 | |
22 | addons: | |
23 | mariadb: 10.0 | |
24 | - python: 3.6 | |
25 | env: PYTHONASYNCIODEBUG= | |
26 | addons: | |
27 | mariadb: 10.1 | |
28 | - python: 3.6 | |
29 | env: PYTHONASYNCIODEBUG= | |
30 | addons: | |
31 | mysql: 5.7 | |
32 | ||
33 | ||
34 | before_script: | |
35 | - "mysql -e 'SELECT VERSION()'" | |
36 | - "mysql -e 'DROP DATABASE IF EXISTS test_pymysql; create database test_pymysql DEFAULT CHARACTER SET utf8 DEFAULT COLLATE utf8_general_ci;'" | |
37 | - "mysql -e 'DROP DATABASE IF EXISTS test_pymysql2; create database test_pymysql2 DEFAULT CHARACTER SET utf8 DEFAULT COLLATE utf8_general_ci;'" | |
38 | ||
39 | install: | |
40 | - pip install -Ur requirements-dev.txt | |
41 | - pip install . | |
42 | - pip install codecov | |
43 | ||
44 | deploy: | |
45 | provider: pypi | |
46 | user: aio-libs-bot | |
47 | distributions: "sdist bdist_wheel" | |
48 | password: | |
49 | secure: G9vr3UOuK7tJifGEzO1kForcz+DCq1IdtIVKr/e4gPenvENinCFrRRWw206BQf1ba1+EjvRqc/yJFfZrgFUJlxgrtahvFmTCzKiLHCwr2vJsEYyr6JLWKRE81//RKTOykDWwvCAjk6sgV8lYtKL6R5sTtjzLJq8CKsLoKZ97yniPKmNu/+7IJvp+vSOA9gIL+GWbDTP8lmNDLWwphFxq6mm4WQ0VWqwDb0SF3FG/QPGDYU19mPLsLqgf1cxaBuRtb/epRDLLG70M9l9/aBPbtAHdbxY+O+/Fv5RPxfo2xFB7ry7yQEsIGrOwot/TxsZDRwRnfPm8N4OV9AfnKjiF5sBpidRR5kQr2pgFP2xq7LEROOPydMbY+YbSHCRBGCHWsHusjwCCL1veVTZ10EB9j3j0O9C6rAaF6Ssdlfq3kzhbWUItQfIZ/h7C0Z0ucVqFB8uBug7jNxT8hD3pR4ftM6Y94HY4BFOlkmSUH9u7owCeUoV9WQT/QAZHOLswRpp1wjsu2c0zKh3wiiuzCYJ64cD/BQW8rQMp0QiGqsNebqR+3L6yNLLSMpDWp3q/pnVbjsI/yepvpVbpp3PltSfZJL0uUfE0OR+xU67JP4npVS8B/A7aTARcM/ljx7IYNYYf/peXQ6UmBOrR2OgPnRPv5LfG9NGdEy0WpQKCVuSydxk= | |
50 | on: | |
51 | tags: true | |
52 | repo: aio-libs/aiomysql | |
53 | all_branches: true | |
54 | python: 3.6 | |
55 | ||
56 | script: | |
57 | - make cov | |
58 | ||
59 | after_success: | |
60 | - codecov |
0 | 0 | Changes |
1 | 1 | ------- |
2 | 2 | |
3 | 0.1.1 (2022-05-08) | |
4 | ^^^^^^^^^^^^^^^^^^ | |
5 | ||
6 | * Fix SSL connection handshake charset not respecting client configuration #776 | |
7 | ||
8 | 0.1.0 (2022-04-11) | |
9 | ^^^^^^^^^^^^^^^^^^ | |
10 | ||
11 | * Don't send sys.argv[0] as program_name to MySQL server by default #620 | |
12 | ||
13 | * Allow running process as anonymous uid #587 | |
14 | ||
15 | * Fix timed out MySQL 8.0 connections raising InternalError rather than OperationalError #660 | |
16 | ||
17 | * Fix timed out MySQL 8.0 connections being returned from Pool #660 | |
18 | ||
19 | * Ensure connections are properly closed before raising an OperationalError when the server connection is lost #660 | |
20 | ||
21 | * Ensure connections are properly closed before raising an InternalError when packet sequence numbers are out of sync #660 | |
22 | ||
23 | * Unix sockets are now internally considered secure, allowing sha256_password and caching_sha2_password auth methods to be used #695 | |
24 | ||
25 | * Test suite now also tests unix socket connections #696 | |
26 | ||
27 | * Fix SSCursor raising InternalError when last result was not fully retrieved #635 | |
28 | ||
29 | * Remove deprecated no_delay argument #702 | |
30 | ||
31 | * Support PyMySQL up to version 1.0.2 #643 | |
32 | ||
33 | * Bump minimal PyMySQL version to 1.0.0 #713 | |
34 | ||
35 | * Align % formatting in Cursor.executemany() with Cursor.execute(), literal % now need to be doubled in Cursor.executemany() #714 | |
36 | ||
37 | * Fixed unlimited Pool size not working, this is now working as documented by passing maxsize=0 to create_pool #119 | |
38 | ||
39 | * Added Pool.closed property as present in aiopg #463 | |
40 | ||
41 | * Fixed SQLAlchemy connection context iterator #410 | |
42 | ||
43 | * Fix error packet handling for SSCursor #428 | |
44 | ||
45 | * Required python version is now properly documented in python_requires instead of failing on setup.py execution #731 | |
46 | ||
47 | * Add rsa extras_require depending on PyMySQL[rsa] #557 | |
48 | ||
49 | * Migrate to PEP 517 build system #746 | |
50 | ||
51 | * Self-reported `__version__` now returns version generated by `setuptools-scm` during build, otherwise `'unknown'` #748 | |
52 | ||
53 | * Fix SSCursor raising query timeout error on wrong query #428 | |
54 | ||
55 | ||
56 | 0.0.22 (2021-11-14) | |
57 | ^^^^^^^^^^^^^^^^^^^ | |
58 | ||
59 | * Support python 3.10 #505 | |
60 | ||
61 | ||
62 | 0.0.21 (2020-11-26) | |
63 | ^^^^^^^^^^^^^^^^^^^ | |
64 | ||
65 | * Allow to use custom Cursor subclasses #374 | |
66 | ||
67 | * Fill Connection class with actual client version #388 | |
68 | ||
69 | * Fix legacy __aiter__ methods #403 | |
70 | ||
71 | * Fix & update docs #418 #437 | |
72 | ||
73 | * Ignore pyenv's .python-version file #424 | |
74 | ||
75 | * Replace asyncio.streams.IncompleteReadError with asyncio.IncompleteReadError #460 #454 | |
76 | ||
77 | * Add support for SQLAlchemy default parameters #455 #466 | |
78 | ||
79 | * Update dependencies #485 | |
80 | ||
81 | * Support Python 3.7 & 3.8 #493 | |
82 | ||
83 | ||
3 | 84 | 0.0.20 (2018-12-19) |
4 | 85 | ^^^^^^^^^^^^^^^^^^^ |
5 | 86 |
50 | 50 | |
51 | 51 | There are several ways to make a virtual environment. |
52 | 52 | |
53 | If you like to use *virtualenv* please run:: | |
53 | If you like to use *virtualenv* please run: | |
54 | ||
55 | .. code-block:: sh | |
54 | 56 | |
55 | 57 | $ cd aiomysql |
56 | $ virtualenv --python=`which python3` venv | |
58 | $ virtualenv --python="$(which python3)" venv | |
57 | 59 | |
58 | For standard python *venv*:: | |
60 | For standard python *venv*: | |
61 | ||
62 | .. code-block:: sh | |
59 | 63 | |
60 | 64 | $ cd aiomysql |
61 | 65 | $ python3 -m venv venv |
62 | 66 | |
63 | For *virtualenvwrapper*:: | |
67 | For *virtualenvwrapper*: | |
68 | ||
69 | .. code-block:: sh | |
64 | 70 | |
65 | 71 | $ cd aiomysql |
66 | $ mkvirtualenv --python=`which python3` aiomysql | |
72 | $ mkvirtualenv --python="$(which python3)" aiomysql | |
67 | 73 | |
68 | 74 | There are other tools like *pyvenv* but you know the rule of thumb |
69 | 75 | now: create a python3 virtual environment and activate it. |
70 | 76 | |
71 | After that please install libraries required for development:: | |
77 | After that please install libraries required for development: | |
78 | ||
79 | .. code-block:: sh | |
72 | 80 | |
73 | 81 | $ pip install -r requirements-dev.txt |
74 | 82 | |
82 | 90 | password in `aiomysql/tests/base.py` file or install corresponding environment |
83 | 91 | variables. Tests require two databases to be created before running suit: |
84 | 92 | |
85 | :: | |
93 | .. code-block:: sh | |
86 | 94 | |
87 | 95 | $ mysql -u root |
88 | 96 | mysql> CREATE DATABASE test_pymysql DEFAULT CHARACTER SET utf8 DEFAULT COLLATE utf8_general_ci; |
93 | 101 | ----------------------- |
94 | 102 | |
95 | 103 | After all the preconditions are met you can run tests typing the next |
96 | command:: | |
104 | command: | |
105 | ||
106 | .. code-block:: sh | |
97 | 107 | |
98 | 108 | $ make test |
99 | 109 | |
112 | 122 | |
113 | 123 | We are trying hard to have good test coverage; please don't make it worse. |
114 | 124 | |
115 | Use:: | |
125 | Use: | |
126 | ||
127 | .. code-block:: sh | |
116 | 128 | |
117 | 129 | $ make cov |
118 | 130 | |
129 | 141 | |
130 | 142 | We encourage documentation improvements. |
131 | 143 | |
132 | Please before making a Pull Request about documentation changes run:: | |
144 | Please before making a Pull Request about documentation changes run: | |
145 | ||
146 | .. code-block:: sh | |
133 | 147 | |
134 | 148 | $ make doc |
135 | 149 |
0 | include LICENSE | |
1 | include CHANGES.txt | |
2 | include README.rst | |
3 | graft aiomysql | |
4 | global-exclude *.pyc *.swp |
1 | 1 | |
2 | 2 | FLAGS= |
3 | 3 | |
4 | checkrst: | |
5 | python -m twine check --strict dist/* | |
4 | 6 | |
5 | flake: | |
7 | ||
8 | flake:checkrst | |
6 | 9 | flake8 aiomysql tests examples |
7 | 10 | |
8 | 11 | test: flake |
10 | 13 | |
11 | 14 | vtest: |
12 | 15 | py.test -s -v $(FLAGS) ./tests/ |
16 | ||
13 | 17 | |
14 | 18 | cov cover coverage: flake |
15 | 19 | py.test -s -v --cov-report term --cov-report html --cov aiomysql ./tests |
39 | 43 | stop_mysql: |
40 | 44 | docker-compose -f docker-compose.yml stop mysql |
41 | 45 | |
46 | # TODO: this depends on aiomysql being installed, e.g. in a venv. | |
47 | # TODO: maybe this can be solved better. | |
42 | 48 | doc: |
49 | @echo "----------------------------------------------------------------" | |
50 | @echo "Doc builds require installing the aiomysql package in the" | |
51 | @echo "environment. Make sure you've installed your current dev version" | |
52 | @echo "into your environment, e.g. using venv, then run this command in" | |
53 | @echo "the virtual environment." | |
54 | @echo "----------------------------------------------------------------" | |
55 | git fetch --tags --all | |
43 | 56 | make -C docs html |
44 | 57 | @echo "open file://`pwd`/docs/_build/html/index.html" |
45 | 58 |
0 | 0 | aiomysql |
1 | 1 | ======== |
2 | .. image:: https://travis-ci.com/aio-libs/aiomysql.svg?branch=master | |
3 | :target: https://travis-ci.com/aio-libs/aiomysql | |
2 | .. image:: https://github.com/aio-libs/aiomysql/actions/workflows/ci-cd.yml/badge.svg?branch=master | |
3 | :target: https://github.com/aio-libs/aiomysql/actions/workflows/ci-cd.yml | |
4 | 4 | .. image:: https://codecov.io/gh/aio-libs/aiomysql/branch/master/graph/badge.svg |
5 | 5 | :target: https://codecov.io/gh/aio-libs/aiomysql |
6 | 6 | :alt: Code coverage |
27 | 27 | Documentation |
28 | 28 | ------------- |
29 | 29 | https://aiomysql.readthedocs.io/ |
30 | ||
31 | ||
32 | Mailing List | |
33 | ------------ | |
34 | https://groups.google.com/forum/#!forum/aio-libs | |
35 | ||
36 | 30 | |
37 | 31 | Basic Example |
38 | 32 | ------------- |
109 | 103 | Requirements |
110 | 104 | ------------ |
111 | 105 | |
112 | * Python_ 3.5.3+ | |
106 | * Python_ 3.7+ | |
113 | 107 | * PyMySQL_ |
114 | 108 | |
115 | 109 | |
116 | 110 | .. _Python: https://www.python.org |
117 | .. _asyncio: http://docs.python.org/3.4/library/asyncio.html | |
111 | .. _asyncio: http://docs.python.org/3.5/library/asyncio.html | |
118 | 112 | .. _aiopg: https://github.com/aio-libs/aiopg |
119 | 113 | .. _PyMySQL: https://github.com/PyMySQL/PyMySQL |
120 | 114 | .. _Tornado-MySQL: https://github.com/PyMySQL/Tornado-MySQL |
0 | /_scm_version.py |
31 | 31 | from .connection import Connection, connect |
32 | 32 | from .cursors import Cursor, SSCursor, DictCursor, SSDictCursor |
33 | 33 | from .pool import create_pool, Pool |
34 | from ._version import version | |
34 | 35 | |
35 | __version__ = '0.0.20' | |
36 | __version__ = version | |
36 | 37 | |
37 | 38 | __all__ = [ |
38 | 39 |
0 | # This stub file is necessary because `_scm_version.py` | |
1 | # autogenerated on build and absent on mypy checks time | |
2 | version: str |
14 | 14 | from pymysql.constants import SERVER_STATUS |
15 | 15 | from pymysql.constants import CLIENT |
16 | 16 | from pymysql.constants import COMMAND |
17 | from pymysql.constants import CR | |
17 | 18 | from pymysql.constants import FIELD_TYPE |
18 | from pymysql.util import byte2int, int2byte | |
19 | 19 | from pymysql.converters import (escape_item, encoders, decoders, |
20 | 20 | escape_string, escape_bytes_prefixed, through) |
21 | 21 | from pymysql.err import (Warning, Error, |
27 | 27 | from pymysql.connections import TEXT_TYPES, MAX_PACKET_LEN, DEFAULT_CHARSET |
28 | 28 | from pymysql.connections import _auth |
29 | 29 | |
30 | from pymysql.connections import pack_int24 | |
31 | ||
32 | 30 | from pymysql.connections import MysqlPacket |
33 | 31 | from pymysql.connections import FieldDescriptorPacket |
34 | 32 | from pymysql.connections import EOFPacketWrapper |
35 | 33 | from pymysql.connections import OKPacketWrapper |
36 | 34 | from pymysql.connections import LoadLocalPacketWrapper |
37 | from pymysql.connections import lenenc_int | |
38 | 35 | |
39 | 36 | # from aiomysql.utils import _convert_to_str |
40 | 37 | from .cursors import Cursor |
41 | from .utils import _ConnectionContextManager, _ContextManager | |
38 | from .utils import _pack_int24, _lenenc_int, _ConnectionContextManager, _ContextManager | |
42 | 39 | from .log import logger |
43 | 40 | |
44 | DEFAULT_USER = getpass.getuser() | |
41 | try: | |
42 | DEFAULT_USER = getpass.getuser() | |
43 | except KeyError: | |
44 | DEFAULT_USER = "unknown" | |
45 | 45 | |
46 | 46 | |
47 | 47 | def connect(host="localhost", user=None, password="", |
50 | 50 | read_default_file=None, conv=decoders, use_unicode=None, |
51 | 51 | client_flag=0, cursorclass=Cursor, init_command=None, |
52 | 52 | connect_timeout=None, read_default_group=None, |
53 | no_delay=None, autocommit=False, echo=False, | |
53 | autocommit=False, echo=False, | |
54 | 54 | local_infile=False, loop=None, ssl=None, auth_plugin='', |
55 | 55 | program_name='', server_public_key=None): |
56 | 56 | """See connections.Connection.__init__() for information about |
63 | 63 | init_command=init_command, |
64 | 64 | connect_timeout=connect_timeout, |
65 | 65 | read_default_group=read_default_group, |
66 | no_delay=no_delay, autocommit=autocommit, echo=echo, | |
66 | autocommit=autocommit, echo=echo, | |
67 | 67 | local_infile=local_infile, loop=loop, ssl=ssl, |
68 | 68 | auth_plugin=auth_plugin, program_name=program_name) |
69 | 69 | return _ConnectionContextManager(coro) |
73 | 73 | conn = Connection(*args, **kwargs) |
74 | 74 | await conn._connect() |
75 | 75 | return conn |
76 | ||
77 | ||
78 | async def _open_connection(host=None, port=None, **kwds): | |
79 | """This is based on asyncio.open_connection, allowing us to use a custom | |
80 | StreamReader. | |
81 | ||
82 | `limit` arg has been removed as we don't currently use it. | |
83 | """ | |
84 | loop = asyncio.events.get_running_loop() | |
85 | reader = _StreamReader(loop=loop) | |
86 | protocol = asyncio.StreamReaderProtocol(reader, loop=loop) | |
87 | transport, _ = await loop.create_connection( | |
88 | lambda: protocol, host, port, **kwds) | |
89 | writer = asyncio.StreamWriter(transport, protocol, reader, loop) | |
90 | return reader, writer | |
91 | ||
92 | ||
93 | async def _open_unix_connection(path=None, **kwds): | |
94 | """This is based on asyncio.open_unix_connection, allowing us to use a custom | |
95 | StreamReader. | |
96 | ||
97 | `limit` arg has been removed as we don't currently use it. | |
98 | """ | |
99 | loop = asyncio.events.get_running_loop() | |
100 | ||
101 | reader = _StreamReader(loop=loop) | |
102 | protocol = asyncio.StreamReaderProtocol(reader, loop=loop) | |
103 | transport, _ = await loop.create_unix_connection( | |
104 | lambda: protocol, path, **kwds) | |
105 | writer = asyncio.StreamWriter(transport, protocol, reader, loop) | |
106 | return reader, writer | |
107 | ||
108 | ||
109 | class _StreamReader(asyncio.StreamReader): | |
110 | """This StreamReader exposes whether EOF was received, allowing us to | |
111 | discard the associated connection instead of returning it from the pool | |
112 | when checking free connections in Pool._fill_free_pool(). | |
113 | ||
114 | `limit` arg has been removed as we don't currently use it. | |
115 | """ | |
116 | def __init__(self, loop=None): | |
117 | self._eof_received = False | |
118 | super().__init__(loop=loop) | |
119 | ||
120 | def feed_eof(self) -> None: | |
121 | self._eof_received = True | |
122 | super().feed_eof() | |
123 | ||
124 | @property | |
125 | def eof_received(self): | |
126 | return self._eof_received | |
76 | 127 | |
77 | 128 | |
78 | 129 | class Connection: |
88 | 139 | read_default_file=None, conv=decoders, use_unicode=None, |
89 | 140 | client_flag=0, cursorclass=Cursor, init_command=None, |
90 | 141 | connect_timeout=None, read_default_group=None, |
91 | no_delay=None, autocommit=False, echo=False, | |
142 | autocommit=False, echo=False, | |
92 | 143 | local_infile=False, loop=None, ssl=None, auth_plugin='', |
93 | 144 | program_name='', server_public_key=None): |
94 | 145 | """ |
119 | 170 | when connecting. |
120 | 171 | :param read_default_group: Group to read from in the configuration |
121 | 172 | file. |
122 | :param no_delay: Disable Nagle's algorithm on the socket | |
123 | 173 | :param autocommit: Autocommit mode. None means use server default. |
124 | 174 | (default: False) |
125 | 175 | :param local_infile: boolean to enable the use of LOAD DATA LOCAL |
130 | 180 | when using IAM authentication with Amazon RDS. |
131 | 181 | (default: Server Default) |
132 | 182 | :param program_name: Program name string to provide when |
133 | handshaking with MySQL. (default: sys.argv[0]) | |
183 | handshaking with MySQL. (omitted by default) | |
134 | 184 | :param server_public_key: SHA256 authentication plugin public |
135 | 185 | key value. |
136 | 186 | :param loop: asyncio loop |
155 | 205 | port = int(_config("port", fallback=port)) |
156 | 206 | charset = _config("default-character-set", fallback=charset) |
157 | 207 | |
158 | # pymysql port | |
159 | if no_delay is not None: | |
160 | warnings.warn("no_delay option is deprecated", DeprecationWarning) | |
161 | no_delay = bool(no_delay) | |
162 | else: | |
163 | no_delay = True | |
164 | ||
165 | 208 | self._host = host |
166 | 209 | self._port = port |
167 | 210 | self._user = user or DEFAULT_USER |
168 | 211 | self._password = password or "" |
169 | 212 | self._db = db |
170 | self._no_delay = no_delay | |
171 | 213 | self._echo = echo |
172 | 214 | self._last_usage = self._loop.time() |
173 | 215 | self._client_auth_plugin = auth_plugin |
174 | 216 | self._server_auth_plugin = "" |
175 | 217 | self._auth_plugin_used = "" |
218 | self._secure = False | |
176 | 219 | self.server_public_key = server_public_key |
177 | 220 | self.salt = None |
178 | 221 | |
179 | # TODO somehow import version from __init__.py | |
222 | from . import __version__ | |
180 | 223 | self._connect_attrs = { |
181 | 224 | '_client_name': 'aiomysql', |
182 | 225 | '_pid': str(os.getpid()), |
183 | '_client_version': '0.0.16', | |
226 | '_client_version': __version__, | |
184 | 227 | } |
185 | 228 | if program_name: |
186 | 229 | self._connect_attrs["program_name"] = program_name |
187 | elif sys.argv: | |
188 | self._connect_attrs["program_name"] = sys.argv[0] | |
189 | 230 | |
190 | 231 | self._unix_socket = unix_socket |
191 | 232 | if charset: |
303 | 344 | if self._writer is None: |
304 | 345 | # connection has been closed |
305 | 346 | return |
306 | send_data = struct.pack('<i', 1) + int2byte(COMMAND.COM_QUIT) | |
347 | send_data = struct.pack('<i', 1) + bytes([COMMAND.COM_QUIT]) | |
307 | 348 | self._writer.write(send_data) |
308 | 349 | await self._writer.drain() |
309 | 350 | self.close() |
469 | 510 | |
470 | 511 | async def _connect(self): |
471 | 512 | # TODO: Set close callback |
472 | # raise OperationalError(2006, | |
513 | # raise OperationalError(CR.CR_SERVER_GONE_ERROR, | |
473 | 514 | # "MySQL server has gone away (%r)" % (e,)) |
474 | 515 | try: |
475 | if self._unix_socket and self._host in ('localhost', '127.0.0.1'): | |
516 | if self._unix_socket: | |
476 | 517 | self._reader, self._writer = await \ |
477 | 518 | asyncio.wait_for( |
478 | asyncio.open_unix_connection( | |
479 | self._unix_socket, | |
480 | loop=self._loop), | |
519 | _open_unix_connection( | |
520 | self._unix_socket), | |
481 | 521 | timeout=self.connect_timeout) |
482 | 522 | self.host_info = "Localhost via UNIX socket: " + \ |
483 | 523 | self._unix_socket |
524 | self._secure = True | |
484 | 525 | else: |
485 | 526 | self._reader, self._writer = await \ |
486 | 527 | asyncio.wait_for( |
487 | asyncio.open_connection( | |
528 | _open_connection( | |
488 | 529 | self._host, |
489 | self._port, | |
490 | loop=self._loop), | |
530 | self._port), | |
491 | 531 | timeout=self.connect_timeout) |
492 | 532 | self._set_keep_alive() |
533 | self._set_nodelay(True) | |
493 | 534 | self.host_info = "socket %s:%d" % (self._host, self._port) |
494 | ||
495 | # do not set no delay in case of unix_socket | |
496 | if self._no_delay and not self._unix_socket: | |
497 | self._set_nodelay(True) | |
498 | 535 | |
499 | 536 | self._next_seq_id = 0 |
500 | 537 | |
546 | 583 | """ |
547 | 584 | # Internal note: when you build packet manually and calls |
548 | 585 | # _write_bytes() directly, you should set self._next_seq_id properly. |
549 | data = pack_int24(len(payload)) + int2byte(self._next_seq_id) + payload | |
586 | data = _pack_int24(len(payload)) + bytes([self._next_seq_id]) + payload | |
550 | 587 | self._write_bytes(data) |
551 | 588 | self._next_seq_id = (self._next_seq_id + 1) % 256 |
552 | 589 | |
570 | 607 | # we increment in both write_packet and read_packet. The count |
571 | 608 | # is reset at new COMMAND PHASE. |
572 | 609 | if packet_number != self._next_seq_id: |
610 | self.close() | |
611 | if packet_number == 0: | |
612 | # MySQL 8.0 sends error packet with seqno==0 when shutdown | |
613 | raise OperationalError( | |
614 | CR.CR_SERVER_LOST, | |
615 | "Lost connection to MySQL server during query") | |
616 | ||
573 | 617 | raise InternalError( |
574 | 618 | "Packet sequence number wrong - got %d expected %d" % |
575 | 619 | (packet_number, self._next_seq_id)) |
589 | 633 | break |
590 | 634 | |
591 | 635 | packet = packet_type(buff, self._encoding) |
592 | packet.check_error() | |
636 | if packet.is_error_packet(): | |
637 | if self._result is not None and \ | |
638 | self._result.unbuffered_active is True: | |
639 | self._result.unbuffered_active = False | |
640 | packet.raise_for_error() | |
593 | 641 | return packet |
594 | 642 | |
595 | 643 | async def _read_bytes(self, num_bytes): |
596 | 644 | try: |
597 | 645 | data = await self._reader.readexactly(num_bytes) |
598 | except asyncio.streams.IncompleteReadError as e: | |
646 | except asyncio.IncompleteReadError as e: | |
599 | 647 | msg = "Lost connection to MySQL server during query" |
600 | raise OperationalError(2013, msg) from e | |
648 | self.close() | |
649 | raise OperationalError(CR.CR_SERVER_LOST, msg) from e | |
601 | 650 | except (IOError, OSError) as e: |
602 | 651 | msg = "Lost connection to MySQL server during query (%s)" % (e,) |
603 | raise OperationalError(2013, msg) from e | |
652 | self.close() | |
653 | raise OperationalError(CR.CR_SERVER_LOST, msg) from e | |
604 | 654 | return data |
605 | 655 | |
606 | 656 | def _write_bytes(self, data): |
648 | 698 | if self._result is not None: |
649 | 699 | if self._result.unbuffered_active: |
650 | 700 | warnings.warn("Previous unbuffered result was left incomplete") |
651 | self._result._finish_unbuffered_query() | |
701 | await self._result._finish_unbuffered_query() | |
652 | 702 | while self._result.has_next: |
653 | 703 | await self.next_result() |
654 | 704 | self._result = None |
682 | 732 | if self.user is None: |
683 | 733 | raise ValueError("Did not specify a username") |
684 | 734 | |
685 | if self._ssl_context: | |
686 | # capablities, max packet, charset | |
687 | data = struct.pack('<IIB', self.client_flag, 16777216, 33) | |
688 | data += b'\x00' * (32 - len(data)) | |
689 | ||
690 | self.write_packet(data) | |
735 | charset_id = charset_by_name(self.charset).id | |
736 | data_init = struct.pack('<iIB23s', self.client_flag, MAX_PACKET_LEN, | |
737 | charset_id, b'') | |
738 | ||
739 | if self._ssl_context and self.server_capabilities & CLIENT.SSL: | |
740 | self.write_packet(data_init) | |
691 | 741 | |
692 | 742 | # Stop sending events to data_received |
693 | 743 | self._writer.transport.pause_reading() |
704 | 754 | # TCP connection not at start. Passing in a socket to |
705 | 755 | # open_connection will cause it to negotiate TLS on an existing |
706 | 756 | # connection not initiate a new one. |
707 | self._reader, self._writer = await asyncio.open_connection( | |
708 | sock=raw_sock, ssl=self._ssl_context, loop=self._loop, | |
757 | self._reader, self._writer = await _open_connection( | |
758 | sock=raw_sock, ssl=self._ssl_context, | |
709 | 759 | server_hostname=self._host |
710 | 760 | ) |
711 | 761 | |
712 | charset_id = charset_by_name(self.charset).id | |
762 | self._secure = True | |
763 | ||
713 | 764 | if isinstance(self.user, str): |
714 | 765 | _user = self.user.encode(self.encoding) |
715 | 766 | else: |
716 | 767 | _user = self.user |
717 | ||
718 | data_init = struct.pack('<iIB23s', self.client_flag, MAX_PACKET_LEN, | |
719 | charset_id, b'') | |
720 | 768 | |
721 | 769 | data = data_init + _user + b'\0' |
722 | 770 | |
748 | 796 | authresp = self._password.encode('latin1') + b'\0' |
749 | 797 | |
750 | 798 | if self.server_capabilities & CLIENT.PLUGIN_AUTH_LENENC_CLIENT_DATA: |
751 | data += lenenc_int(len(authresp)) + authresp | |
799 | data += _lenenc_int(len(authresp)) + authresp | |
752 | 800 | elif self.server_capabilities & CLIENT.SECURE_CONNECTION: |
753 | 801 | data += struct.pack('B', len(authresp)) + authresp |
754 | 802 | else: # pragma: no cover |
899 | 947 | |
900 | 948 | logger.debug("caching sha2: Trying full auth...") |
901 | 949 | |
902 | if self._ssl_context: | |
950 | if self._secure: | |
903 | 951 | logger.debug("caching sha2: Sending plain " |
904 | 952 | "password via secure connection") |
905 | 953 | self.write_packet(self._password.encode('latin1') + b'\0') |
930 | 978 | pkt.check_error() |
931 | 979 | |
932 | 980 | async def sha256_password_auth(self, pkt): |
933 | if self._ssl_context: | |
981 | if self._secure: | |
934 | 982 | logger.debug("sha256: Sending plain password") |
935 | 983 | data = self._password.encode('latin1') + b'\0' |
936 | 984 | self.write_packet(data) |
988 | 1036 | packet = await self._read_packet() |
989 | 1037 | data = packet.get_all_data() |
990 | 1038 | # logger.debug(dump_packet(data)) |
991 | self.protocol_version = byte2int(data[i:i + 1]) | |
1039 | self.protocol_version = data[i] | |
992 | 1040 | i += 1 |
993 | 1041 | |
994 | 1042 | server_end = data.find(b'\0', i) |
1194 | 1242 | # in fact, no way to stop MySQL from sending all the data after |
1195 | 1243 | # executing a query, so we just spin, and wait for an EOF packet. |
1196 | 1244 | while self.unbuffered_active: |
1197 | packet = await self.connection._read_packet() | |
1245 | try: | |
1246 | packet = await self.connection._read_packet() | |
1247 | except OperationalError as e: | |
1248 | # TODO: replace these numbers with constants when available | |
1249 | # TODO: in a new PyMySQL release | |
1250 | if e.args[0] in ( | |
1251 | 3024, # ER.QUERY_TIMEOUT | |
1252 | 1969, # ER.STATEMENT_TIMEOUT | |
1253 | ): | |
1254 | # if the query timed out we can simply ignore this error | |
1255 | self.unbuffered_active = False | |
1256 | self.connection = None | |
1257 | return | |
1258 | ||
1259 | raise | |
1260 | ||
1198 | 1261 | if self._check_packet_is_eof(packet): |
1199 | 1262 | self.unbuffered_active = False |
1200 | 1263 | # release reference to kill cyclic reference. |
13 | 13 | # https://github.com/PyMySQL/PyMySQL/blob/master/pymysql/cursors.py#L11-L18 |
14 | 14 | |
15 | 15 | #: Regular expression for :meth:`Cursor.executemany`. |
16 | #: executemany only suports simple bulk insert. | |
16 | #: executemany only supports simple bulk insert. | |
17 | 17 | #: You can use it to load large dataset. |
18 | 18 | RE_INSERT_VALUES = re.compile( |
19 | 19 | r"\s*((?:INSERT|REPLACE)\s.+\sVALUES?\s+)" + |
273 | 273 | |
274 | 274 | m = RE_INSERT_VALUES.match(query) |
275 | 275 | if m: |
276 | q_prefix = m.group(1) | |
276 | q_prefix = m.group(1) % () | |
277 | 277 | q_values = m.group(2).rstrip() |
278 | 278 | q_postfix = m.group(3) or '' |
279 | 279 | assert q_values[0] == '(' and q_values[-1] == ')' |
0 | # copied from aiopg | |
0 | # based on aiopg pool | |
1 | 1 | # https://github.com/aio-libs/aiopg/blob/master/aiopg/pool.py |
2 | 2 | |
3 | 3 | import asyncio |
35 | 35 | def __init__(self, minsize, maxsize, echo, pool_recycle, loop, **kwargs): |
36 | 36 | if minsize < 0: |
37 | 37 | raise ValueError("minsize should be zero or greater") |
38 | if maxsize < minsize: | |
38 | if maxsize < minsize and maxsize != 0: | |
39 | 39 | raise ValueError("maxsize should be not less than minsize") |
40 | 40 | self._minsize = minsize |
41 | 41 | self._loop = loop |
42 | 42 | self._conn_kwargs = kwargs |
43 | 43 | self._acquiring = 0 |
44 | self._free = collections.deque(maxlen=maxsize) | |
45 | self._cond = asyncio.Condition(loop=loop) | |
44 | self._free = collections.deque(maxlen=maxsize or None) | |
45 | self._cond = asyncio.Condition() | |
46 | 46 | self._used = set() |
47 | 47 | self._terminated = set() |
48 | 48 | self._closing = False |
77 | 77 | conn = self._free.popleft() |
78 | 78 | await conn.ensure_closed() |
79 | 79 | self._cond.notify() |
80 | ||
81 | @property | |
82 | def closed(self): | |
83 | """ | |
84 | The readonly property that returns ``True`` if connections is closed. | |
85 | """ | |
86 | return self._closed | |
80 | 87 | |
81 | 88 | def close(self): |
82 | 89 | """Close pool. |
142 | 149 | await self._cond.wait() |
143 | 150 | |
144 | 151 | async def _fill_free_pool(self, override_min): |
145 | # iterate over free connections and remove timeouted ones | |
152 | # iterate over free connections and remove timed out ones | |
146 | 153 | free_size = len(self._free) |
147 | 154 | n = 0 |
148 | 155 | while n < free_size: |
149 | 156 | conn = self._free[-1] |
150 | 157 | if conn._reader.at_eof() or conn._reader.exception(): |
158 | self._free.pop() | |
159 | conn.close() | |
160 | ||
161 | # On MySQL 8.0 a timed out connection sends an error packet before | |
162 | # closing the connection, preventing us from relying on at_eof(). | |
163 | # This relies on our custom StreamReader, as eof_received is not | |
164 | # present in asyncio.StreamReader. | |
165 | elif conn._reader.eof_received: | |
151 | 166 | self._free.pop() |
152 | 167 | conn.close() |
153 | 168 | |
173 | 188 | if self._free: |
174 | 189 | return |
175 | 190 | |
176 | if override_min and self.size < self.maxsize: | |
191 | if override_min and (not self.maxsize or self.size < self.maxsize): | |
177 | 192 | self._acquiring += 1 |
178 | 193 | try: |
179 | 194 | conn = await connect(echo=self._echo, loop=self._loop, |
48 | 48 | ) |
49 | 49 | |
50 | 50 | In the case that a plain SQL string is passed, a tuple or |
51 | individual values in \*multiparams may be passed:: | |
51 | individual values in *multiparams may be passed:: | |
52 | 52 | |
53 | 53 | await conn.execute( |
54 | 54 | "INSERT INTO table (id, value) VALUES (%d, %s)", |
5 | 5 | from .connection import SAConnection |
6 | 6 | from .exc import InvalidRequestError, ArgumentError |
7 | 7 | from ..utils import _PoolContextManager, _PoolAcquireContextManager |
8 | from ..cursors import Cursor | |
8 | from ..cursors import ( | |
9 | Cursor, DeserializationCursor, DictCursor, SSCursor, SSDictCursor) | |
9 | 10 | |
10 | 11 | |
11 | 12 | try: |
12 | 13 | from sqlalchemy.dialects.mysql.pymysql import MySQLDialect_pymysql |
14 | from sqlalchemy.dialects.mysql.mysqldb import MySQLCompiler_mysqldb | |
13 | 15 | except ImportError: # pragma: no cover |
14 | 16 | raise ImportError('aiomysql.sa requires sqlalchemy') |
15 | 17 | |
16 | 18 | |
19 | class MySQLCompiler_pymysql(MySQLCompiler_mysqldb): | |
20 | def construct_params(self, params=None, _group_number=None, _check=True): | |
21 | pd = super().construct_params(params, _group_number, _check) | |
22 | ||
23 | for column in self.prefetch: | |
24 | pd[column.key] = self._exec_default(column.default) | |
25 | ||
26 | return pd | |
27 | ||
28 | def _exec_default(self, default): | |
29 | if default.is_callable: | |
30 | return default.arg(self.dialect) | |
31 | else: | |
32 | return default.arg | |
33 | ||
34 | ||
17 | 35 | _dialect = MySQLDialect_pymysql(paramstyle='pyformat') |
36 | _dialect.statement_compiler = MySQLCompiler_pymysql | |
18 | 37 | _dialect.default_paramstyle = 'pyformat' |
19 | 38 | |
20 | 39 | |
25 | 44 | |
26 | 45 | Returns Engine instance with embedded connection pool. |
27 | 46 | |
28 | The pool has *minsize* opened connections to PostgreSQL server. | |
47 | The pool has *minsize* opened connections to MySQL server. | |
29 | 48 | """ |
49 | deprecated_cursor_classes = [ | |
50 | DeserializationCursor, DictCursor, SSCursor, SSDictCursor, | |
51 | ] | |
52 | ||
53 | cursorclass = kwargs.get('cursorclass', Cursor) | |
54 | if not issubclass(cursorclass, Cursor) or any( | |
55 | issubclass(cursorclass, cursor_class) | |
56 | for cursor_class in deprecated_cursor_classes | |
57 | ): | |
58 | raise ArgumentError('SQLAlchemy engine does not support ' | |
59 | 'this cursor class') | |
60 | ||
30 | 61 | coro = _create_engine(minsize=minsize, maxsize=maxsize, loop=loop, |
31 | 62 | dialect=dialect, pool_recycle=pool_recycle, |
32 | 63 | compiled_cache=compiled_cache, **kwargs) |
33 | compatible_cursor_classes = [Cursor] | |
34 | # Without provided kwarg, default is default cursor from Connection class | |
35 | if kwargs.get('cursorclass', Cursor) not in compatible_cursor_classes: | |
36 | raise ArgumentError('SQLAlchemy engine does not support ' | |
37 | 'this cursor class') | |
38 | 64 | return _EngineContextManager(coro) |
39 | 65 | |
40 | 66 |
446 | 446 | else: |
447 | 447 | return None |
448 | 448 | |
449 | async def __aiter__(self): | |
449 | def __aiter__(self): | |
450 | 450 | return self |
451 | 451 | |
452 | 452 | async def __anext__(self): |
0 | 0 | from collections.abc import Coroutine |
1 | ||
2 | import struct | |
3 | ||
4 | ||
5 | def _pack_int24(n): | |
6 | return struct.pack("<I", n)[:3] | |
7 | ||
8 | ||
9 | def _lenenc_int(i): | |
10 | if i < 0: | |
11 | raise ValueError( | |
12 | "Encoding %d is less than 0 - no representation in LengthEncodedInteger" % i | |
13 | ) | |
14 | elif i < 0xFB: | |
15 | return bytes([i]) | |
16 | elif i < (1 << 16): | |
17 | return b"\xfc" + struct.pack("<H", i) | |
18 | elif i < (1 << 24): | |
19 | return b"\xfd" + struct.pack("<I", i)[:3] | |
20 | elif i < (1 << 64): | |
21 | return b"\xfe" + struct.pack("<Q", i) | |
22 | else: | |
23 | raise ValueError( | |
24 | "Encoding %x is larger than %x - no representation in LengthEncodedInteger" | |
25 | % (i, (1 << 64)) | |
26 | ) | |
1 | 27 | |
2 | 28 | |
3 | 29 | class _ContextManager(Coroutine): |
69 | 95 | |
70 | 96 | |
71 | 97 | class _SAConnectionContextManager(_ContextManager): |
72 | async def __aiter__(self): | |
73 | result = await self._coro | |
74 | return result | |
98 | def __aiter__(self): | |
99 | return self | |
100 | ||
101 | async def __anext__(self): | |
102 | if self._obj is None: | |
103 | self._obj = await self._coro | |
104 | ||
105 | try: | |
106 | return await self._obj.__anext__() | |
107 | except StopAsyncIteration: | |
108 | await self._obj.close() | |
109 | self._obj = None | |
110 | raise | |
75 | 111 | |
76 | 112 | |
77 | 113 | class _TransactionContextManager(_ContextManager): |
29 | 29 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom |
30 | 30 | # ones. |
31 | 31 | |
32 | import re, os.path | |
33 | ||
34 | def get_release(): | |
35 | regexp = re.compile(r"^__version__\W*=\W*'([\d.abrc]+)'") | |
36 | here = os.path.dirname(__file__) | |
37 | root = os.path.dirname(here) | |
38 | init_py = os.path.join(root, 'aiomysql', '__init__.py') | |
39 | with open(init_py) as f: | |
40 | for line in f: | |
41 | match = regexp.match(line) | |
42 | if match is not None: | |
43 | return match.group(1) | |
44 | else: | |
45 | raise RuntimeError('Cannot find version in aiomysql/__init__.py') | |
46 | ||
47 | ||
48 | def get_version(release): | |
49 | parts = release.split('.') | |
50 | return '.'.join(parts[:2]) | |
32 | from aiomysql import __version__ | |
33 | ||
51 | 34 | |
52 | 35 | extensions = [ |
53 | 36 | 'sphinx.ext.autodoc', |
81 | 64 | # |version| and |release|, also used in various other places throughout the |
82 | 65 | # built documents. |
83 | 66 | # |
84 | release = get_release() | |
85 | version = get_version(release) | |
67 | release = __version__ | |
68 | version = '.'.join(__version__.split('.')[:2]) | |
86 | 69 | |
87 | 70 | # The language for content autogenerated by Sphinx. Refer to documentation |
88 | 71 | # for a list of supported languages. |
22 | 22 | |
23 | 23 | loop = asyncio.get_event_loop() |
24 | 24 | |
25 | @asyncio.coroutine | |
26 | def test_example(): | |
27 | conn = yield from aiomysql.connect(host='127.0.0.1', port=3306, | |
28 | user='root', password='', db='mysql', | |
29 | loop=loop) | |
30 | ||
31 | cur = yield from conn.cursor() | |
32 | yield from cur.execute("SELECT Host,User FROM user") | |
25 | async def test_example(): | |
26 | conn = await aiomysql.connect(host='127.0.0.1', port=3306, | |
27 | user='root', password='', db='mysql', | |
28 | loop=loop) | |
29 | ||
30 | cur = await conn.cursor() | |
31 | await cur.execute("SELECT Host,User FROM user") | |
33 | 32 | print(cur.description) |
34 | r = yield from cur.fetchall() | |
33 | r = await cur.fetchall() | |
35 | 34 | print(r) |
36 | yield from cur.close() | |
35 | await cur.close() | |
37 | 36 | conn.close() |
38 | 37 | |
39 | 38 | loop.run_until_complete(test_example()) |
45 | 44 | read_default_file=None, conv=decoders, use_unicode=None, |
46 | 45 | client_flag=0, cursorclass=Cursor, init_command=None, |
47 | 46 | connect_timeout=None, read_default_group=None, |
48 | no_delay=False, autocommit=False, echo=False, | |
47 | autocommit=False, echo=False | |
49 | 48 | ssl=None, auth_plugin='', program_name='', |
50 | 49 | server_public_key=None, loop=None) |
51 | 50 | |
79 | 78 | when connecting. |
80 | 79 | :param str read_default_group: Group to read from in the configuration |
81 | 80 | file. |
82 | :param bool no_delay: disable Nagle's algorithm on the socket | |
83 | 81 | :param autocommit: Autocommit mode. None means use server default. |
84 | 82 | (default: ``False``) |
85 | 83 | :param ssl: Optional SSL Context to force SSL |
88 | 86 | when using IAM authentication with Amazon RDS. |
89 | 87 | (default: Server Default) |
90 | 88 | :param program_name: Program name string to provide when |
91 | handshaking with MySQL. (default: sys.argv[0]) | |
89 | handshaking with MySQL. (omitted by default) | |
90 | ||
91 | .. versionchanged:: 1.0 | |
92 | ``sys.argv[0]`` is no longer passed by default | |
92 | 93 | :param server_public_key: SHA256 authenticaiton plugin public key value. |
93 | 94 | :param loop: asyncio event loop instance or ``None`` for default one. |
94 | 95 | :returns: :class:`Connection` instance. |
25 | 25 | |
26 | 26 | loop = asyncio.get_event_loop() |
27 | 27 | |
28 | @asyncio.coroutine | |
29 | def test_example(): | |
30 | conn = yield from aiomysql.connect(host='127.0.0.1', port=3306, | |
31 | user='root', password='', | |
32 | db='mysql', loop=loop) | |
28 | async def test_example(): | |
29 | conn = await aiomysql.connect(host='127.0.0.1', port=3306, | |
30 | user='root', password='', | |
31 | db='mysql', loop=loop) | |
33 | 32 | |
34 | 33 | # create default cursor |
35 | cursor = yield from conn.cursor() | |
34 | cursor = await conn.cursor() | |
36 | 35 | |
37 | 36 | # execute sql query |
38 | yield from cursor.execute("SELECT Host, User FROM user") | |
37 | await cursor.execute("SELECT Host, User FROM user") | |
39 | 38 | |
40 | 39 | # fetch all results |
41 | r = yield from cursor.fetchall() | |
40 | r = await cursor.fetchall() | |
42 | 41 | |
43 | 42 | # detach cursor from connection |
44 | yield from cursor.close() | |
43 | await cursor.close() | |
45 | 44 | |
46 | 45 | # close connection |
47 | 46 | conn.close() |
136 | 135 | |
137 | 136 | For example, getting all rows where id is 5:: |
138 | 137 | |
139 | yield from cursor.execute("SELECT * FROM t1 WHERE id=%s", (5,)) | |
138 | await cursor.execute("SELECT * FROM t1 WHERE id=%s", (5,)) | |
140 | 139 | |
141 | 140 | :param str query: sql statement |
142 | 141 | :param list args: tuple or list of arguments for sql query |
156 | 155 | ] |
157 | 156 | stmt = "INSERT INTO employees (name, phone) |
158 | 157 | VALUES ('%s','%s')" |
159 | yield from cursor.executemany(stmt, data) | |
158 | await cursor.executemany(stmt, data) | |
160 | 159 | |
161 | 160 | `INSERT` statements are optimized by batching the data, that is |
162 | 161 | using the MySQL multiple rows syntax. |
182 | 181 | query using :meth:`Cursor.execute()` to get any OUT or INOUT values. |
183 | 182 | Basic usage example:: |
184 | 183 | |
185 | conn = yield from aiomysql.connect(host='127.0.0.1', port=3306, | |
186 | user='root', password='', | |
187 | db='mysql', loop=self.loop) | |
188 | ||
189 | cur = yield from conn.cursor() | |
190 | yield from cur.execute("""CREATE PROCEDURE myinc(p1 INT) | |
191 | BEGIN | |
192 | SELECT p1 + 1; | |
193 | END | |
194 | """) | |
195 | ||
196 | yield from cur.callproc('myinc', [1]) | |
197 | (ret, ) = yield from cur.fetchone() | |
184 | conn = await aiomysql.connect(host='127.0.0.1', port=3306, | |
185 | user='root', password='', | |
186 | db='mysql', loop=self.loop) | |
187 | ||
188 | cur = await conn.cursor() | |
189 | await cur.execute("""CREATE PROCEDURE myinc(p1 INT) | |
190 | BEGIN | |
191 | SELECT p1 + 1; | |
192 | END | |
193 | """) | |
194 | ||
195 | await cur.callproc('myinc', [1]) | |
196 | (ret, ) = await cur.fetchone() | |
198 | 197 | assert 2, ret |
199 | 198 | |
200 | yield from cur.close() | |
199 | await cur.close() | |
201 | 200 | conn.close() |
202 | 201 | |
203 | 202 | Compatibility warning: The act of calling a stored procedure |
228 | 227 | due to the specified number of rows not being available, fewer rows |
229 | 228 | may be returned :: |
230 | 229 | |
231 | cursor = yield from connection.cursor() | |
232 | yield from cursor.execute("SELECT * FROM test;") | |
230 | cursor = await connection.cursor() | |
231 | await cursor.execute("SELECT * FROM test;") | |
233 | 232 | r = cursor.fetchmany(2) |
234 | 233 | print(r) |
235 | 234 | # [(1, 100, "abc'def"), (2, None, 'dada')] |
236 | r = yield from cursor.fetchmany(2) | |
235 | r = await cursor.fetchmany(2) | |
237 | 236 | print(r) |
238 | 237 | # [(3, 42, 'bar')] |
239 | r = yield from cursor.fetchmany(2) | |
238 | r = await cursor.fetchmany(2) | |
240 | 239 | print(r) |
241 | 240 | # [] |
242 | 241 | |
247 | 246 | |
248 | 247 | :ref:`Coroutine <coroutine>` returns all rows of a query result set:: |
249 | 248 | |
250 | yield from cursor.execute("SELECT * FROM test;") | |
251 | r = yield from cursor.fetchall() | |
249 | await cursor.execute("SELECT * FROM test;") | |
250 | r = await cursor.fetchall() | |
252 | 251 | print(r) |
253 | 252 | # [(1, 100, "abc'def"), (2, None, 'dada'), (3, 42, 'bar')] |
254 | 253 | |
273 | 272 | probably to catch both exceptions in your code:: |
274 | 273 | |
275 | 274 | try: |
276 | yield from cur.scroll(1000 * 1000) | |
275 | await cur.scroll(1000 * 1000) | |
277 | 276 | except (ProgrammingError, IndexError), exc: |
278 | 277 | deal_with_it(exc) |
279 | 278 | |
291 | 290 | |
292 | 291 | loop = asyncio.get_event_loop() |
293 | 292 | |
294 | @asyncio.coroutine | |
295 | def test_example(): | |
296 | conn = yield from aiomysql.connect(host='127.0.0.1', port=3306, | |
297 | user='root', password='', | |
298 | db='mysql', loop=loop) | |
293 | async def test_example(): | |
294 | conn = await aiomysql.connect(host='127.0.0.1', port=3306, | |
295 | user='root', password='', | |
296 | db='mysql', loop=loop) | |
299 | 297 | |
300 | 298 | # create dict cursor |
301 | cursor = yield from conn.cursor(aiomysql.DictCursor) | |
299 | cursor = await conn.cursor(aiomysql.DictCursor) | |
302 | 300 | |
303 | 301 | # execute sql query |
304 | yield from cursor.execute( | |
302 | await cursor.execute( | |
305 | 303 | "SELECT * from people where name='bob'") |
306 | 304 | |
307 | 305 | # fetch all results |
308 | r = yield from cursor.fetchone() | |
306 | r = await cursor.fetchone() | |
309 | 307 | print(r) |
310 | 308 | # {'age': 20, 'DOB': datetime.datetime(1990, 2, 6, 23, 4, 56), |
311 | 309 | # 'name': 'bob'} |
331 | 329 | |
332 | 330 | loop = asyncio.get_event_loop() |
333 | 331 | |
334 | @asyncio.coroutine | |
335 | def test_example(): | |
336 | conn = yield from aiomysql.connect(host='127.0.0.1', port=3306, | |
337 | user='root', password='', | |
338 | db='mysql', loop=loop) | |
332 | async def test_example(): | |
333 | conn = await aiomysql.connect(host='127.0.0.1', port=3306, | |
334 | user='root', password='', | |
335 | db='mysql', loop=loop) | |
339 | 336 | |
340 | 337 | # create your dict cursor |
341 | cursor = yield from conn.cursor(AttrDictCursor) | |
338 | cursor = await conn.cursor(AttrDictCursor) | |
342 | 339 | |
343 | 340 | # execute sql query |
344 | yield from cursor.execute( | |
341 | await cursor.execute( | |
345 | 342 | "SELECT * from people where name='bob'") |
346 | 343 | |
347 | 344 | # fetch all results |
348 | r = yield from cursor.fetchone() | |
345 | r = await cursor.fetchone() | |
349 | 346 | print(r) |
350 | 347 | # {'age': 20, 'DOB': datetime.datetime(1990, 2, 6, 23, 4, 56), |
351 | 348 | # 'name': 'bob'} |
6 | 6 | ==================================== |
7 | 7 | |
8 | 8 | .. _GitHub: https://github.com/aio-libs/aiomysql |
9 | .. _asyncio: http://docs.python.org/3.4/library/asyncio.html | |
9 | .. _asyncio: http://docs.python.org/3.5/library/asyncio.html | |
10 | 10 | .. _aiopg: https://github.com/aio-libs/aiopg |
11 | 11 | .. _Tornado-MySQL: https://github.com/PyMySQL/Tornado-MySQL |
12 | 12 | .. _aio-libs: https://github.com/aio-libs |
18 | 18 | same api, look and feel. |
19 | 19 | |
20 | 20 | Internally **aiomysql** is copy of PyMySQL, underlying io calls switched |
21 | to async, basically ``yield from`` and ``asyncio.coroutine`` added in | |
21 | to async, basically ``await`` and ``async def coroutine`` added in | |
22 | 22 | proper places. :term:`sqlalchemy` support ported from aiopg_. |
23 | 23 | |
24 | 24 | |
35 | 35 | ------ |
36 | 36 | |
37 | 37 | **aiomysql** based on :term:`PyMySQL` , and provides same api, you just need |
38 | to use ``yield from conn.f()`` instead of just call ``conn.f()`` for | |
38 | to use ``await conn.f()`` instead of just call ``conn.f()`` for | |
39 | 39 | every method. |
40 | 40 | |
41 | 41 | Properties are unchanged, so ``conn.prop`` is correct as well as |
50 | 50 | |
51 | 51 | loop = asyncio.get_event_loop() |
52 | 52 | |
53 | @asyncio.coroutine | |
54 | def test_example(): | |
55 | conn = yield from aiomysql.connect(host='127.0.0.1', port=3306, | |
53 | ||
54 | async def test_example(): | |
55 | conn = await aiomysql.connect(host='127.0.0.1', port=3306, | |
56 | 56 | user='root', password='', db='mysql', |
57 | 57 | loop=loop) |
58 | 58 | |
59 | cur = yield from conn.cursor() | |
60 | yield from cur.execute("SELECT Host,User FROM user") | |
59 | cur = await conn.cursor() | |
60 | await cur.execute("SELECT Host,User FROM user") | |
61 | 61 | print(cur.description) |
62 | r = yield from cur.fetchall() | |
62 | r = await cur.fetchall() | |
63 | 63 | print(r) |
64 | yield from cur.close() | |
64 | await cur.close() | |
65 | 65 | conn.close() |
66 | 66 | |
67 | 67 | loop.run_until_complete(test_example()) |
95 | 95 | <https://github.com/aio-libs/aiomysql/issues>`_ if you have found a bug |
96 | 96 | or have some suggestion for library improvement. |
97 | 97 | |
98 | The library uses `Travis <https://travis-ci.org/aio-libs/aiomysql>`_ for | |
99 | Continious Integration and `Coveralls | |
100 | <https://coveralls.io/r/aio-libs/aiomysql?branch=master>`_ for | |
98 | The library uses `GitHub Actions | |
99 | <https://github.com/aio-libs/aiomysql/actions>`_ for Continuous Integration | |
100 | and `Codecov <https://app.codecov.io/gh/aio-libs/aiomysql/branch/master>`_ for | |
101 | 101 | coverage reports. |
102 | 102 | |
103 | 103 | |
104 | 104 | Dependencies |
105 | 105 | ------------ |
106 | 106 | |
107 | - Python 3.3 and :mod:`asyncio` or Python 3.4+ | |
107 | - Python 3.7+ | |
108 | 108 | - :term:`PyMySQL` |
109 | 109 | - aiomysql.sa requires :term:`sqlalchemy`. |
110 | 110 |
0 | @ECHO OFF | |
1 | ||
2 | REM Command file for Sphinx documentation | |
3 | ||
4 | if "%SPHINXBUILD%" == "" ( | |
5 | set SPHINXBUILD=sphinx-build | |
6 | ) | |
7 | set BUILDDIR=_build | |
8 | set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% . | |
9 | set I18NSPHINXOPTS=%SPHINXOPTS% . | |
10 | if NOT "%PAPER%" == "" ( | |
11 | set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% | |
12 | set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS% | |
13 | ) | |
14 | ||
15 | if "%1" == "" goto help | |
16 | ||
17 | if "%1" == "help" ( | |
18 | :help | |
19 | echo.Please use `make ^<target^>` where ^<target^> is one of | |
20 | echo. html to make standalone HTML files | |
21 | echo. dirhtml to make HTML files named index.html in directories | |
22 | echo. singlehtml to make a single large HTML file | |
23 | echo. pickle to make pickle files | |
24 | echo. json to make JSON files | |
25 | echo. htmlhelp to make HTML files and a HTML help project | |
26 | echo. qthelp to make HTML files and a qthelp project | |
27 | echo. devhelp to make HTML files and a Devhelp project | |
28 | echo. epub to make an epub | |
29 | echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter | |
30 | echo. text to make text files | |
31 | echo. man to make manual pages | |
32 | echo. texinfo to make Texinfo files | |
33 | echo. gettext to make PO message catalogs | |
34 | echo. changes to make an overview over all changed/added/deprecated items | |
35 | echo. xml to make Docutils-native XML files | |
36 | echo. pseudoxml to make pseudoxml-XML files for display purposes | |
37 | echo. linkcheck to check all external links for integrity | |
38 | echo. doctest to run all doctests embedded in the documentation if enabled | |
39 | goto end | |
40 | ) | |
41 | ||
42 | if "%1" == "clean" ( | |
43 | for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i | |
44 | del /q /s %BUILDDIR%\* | |
45 | goto end | |
46 | ) | |
47 | ||
48 | ||
49 | %SPHINXBUILD% 2> nul | |
50 | if errorlevel 9009 ( | |
51 | echo. | |
52 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx | |
53 | echo.installed, then set the SPHINXBUILD environment variable to point | |
54 | echo.to the full path of the 'sphinx-build' executable. Alternatively you | |
55 | echo.may add the Sphinx directory to PATH. | |
56 | echo. | |
57 | echo.If you don't have Sphinx installed, grab it from | |
58 | echo.http://sphinx-doc.org/ | |
59 | exit /b 1 | |
60 | ) | |
61 | ||
62 | if "%1" == "html" ( | |
63 | %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html | |
64 | if errorlevel 1 exit /b 1 | |
65 | echo. | |
66 | echo.Build finished. The HTML pages are in %BUILDDIR%/html. | |
67 | goto end | |
68 | ) | |
69 | ||
70 | if "%1" == "dirhtml" ( | |
71 | %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml | |
72 | if errorlevel 1 exit /b 1 | |
73 | echo. | |
74 | echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. | |
75 | goto end | |
76 | ) | |
77 | ||
78 | if "%1" == "singlehtml" ( | |
79 | %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml | |
80 | if errorlevel 1 exit /b 1 | |
81 | echo. | |
82 | echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. | |
83 | goto end | |
84 | ) | |
85 | ||
86 | if "%1" == "pickle" ( | |
87 | %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle | |
88 | if errorlevel 1 exit /b 1 | |
89 | echo. | |
90 | echo.Build finished; now you can process the pickle files. | |
91 | goto end | |
92 | ) | |
93 | ||
94 | if "%1" == "json" ( | |
95 | %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json | |
96 | if errorlevel 1 exit /b 1 | |
97 | echo. | |
98 | echo.Build finished; now you can process the JSON files. | |
99 | goto end | |
100 | ) | |
101 | ||
102 | if "%1" == "htmlhelp" ( | |
103 | %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp | |
104 | if errorlevel 1 exit /b 1 | |
105 | echo. | |
106 | echo.Build finished; now you can run HTML Help Workshop with the ^ | |
107 | .hhp project file in %BUILDDIR%/htmlhelp. | |
108 | goto end | |
109 | ) | |
110 | ||
111 | if "%1" == "qthelp" ( | |
112 | %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp | |
113 | if errorlevel 1 exit /b 1 | |
114 | echo. | |
115 | echo.Build finished; now you can run "qcollectiongenerator" with the ^ | |
116 | .qhcp project file in %BUILDDIR%/qthelp, like this: | |
117 | echo.^> qcollectiongenerator %BUILDDIR%\qthelp\aiomysql.qhcp | |
118 | echo.To view the help file: | |
119 | echo.^> assistant -collectionFile %BUILDDIR%\qthelp\aiomysql.ghc | |
120 | goto end | |
121 | ) | |
122 | ||
123 | if "%1" == "devhelp" ( | |
124 | %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp | |
125 | if errorlevel 1 exit /b 1 | |
126 | echo. | |
127 | echo.Build finished. | |
128 | goto end | |
129 | ) | |
130 | ||
131 | if "%1" == "epub" ( | |
132 | %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub | |
133 | if errorlevel 1 exit /b 1 | |
134 | echo. | |
135 | echo.Build finished. The epub file is in %BUILDDIR%/epub. | |
136 | goto end | |
137 | ) | |
138 | ||
139 | if "%1" == "latex" ( | |
140 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex | |
141 | if errorlevel 1 exit /b 1 | |
142 | echo. | |
143 | echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. | |
144 | goto end | |
145 | ) | |
146 | ||
147 | if "%1" == "latexpdf" ( | |
148 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex | |
149 | cd %BUILDDIR%/latex | |
150 | make all-pdf | |
151 | cd %BUILDDIR%/.. | |
152 | echo. | |
153 | echo.Build finished; the PDF files are in %BUILDDIR%/latex. | |
154 | goto end | |
155 | ) | |
156 | ||
157 | if "%1" == "latexpdfja" ( | |
158 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex | |
159 | cd %BUILDDIR%/latex | |
160 | make all-pdf-ja | |
161 | cd %BUILDDIR%/.. | |
162 | echo. | |
163 | echo.Build finished; the PDF files are in %BUILDDIR%/latex. | |
164 | goto end | |
165 | ) | |
166 | ||
167 | if "%1" == "text" ( | |
168 | %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text | |
169 | if errorlevel 1 exit /b 1 | |
170 | echo. | |
171 | echo.Build finished. The text files are in %BUILDDIR%/text. | |
172 | goto end | |
173 | ) | |
174 | ||
175 | if "%1" == "man" ( | |
176 | %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man | |
177 | if errorlevel 1 exit /b 1 | |
178 | echo. | |
179 | echo.Build finished. The manual pages are in %BUILDDIR%/man. | |
180 | goto end | |
181 | ) | |
182 | ||
183 | if "%1" == "texinfo" ( | |
184 | %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo | |
185 | if errorlevel 1 exit /b 1 | |
186 | echo. | |
187 | echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo. | |
188 | goto end | |
189 | ) | |
190 | ||
191 | if "%1" == "gettext" ( | |
192 | %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale | |
193 | if errorlevel 1 exit /b 1 | |
194 | echo. | |
195 | echo.Build finished. The message catalogs are in %BUILDDIR%/locale. | |
196 | goto end | |
197 | ) | |
198 | ||
199 | if "%1" == "changes" ( | |
200 | %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes | |
201 | if errorlevel 1 exit /b 1 | |
202 | echo. | |
203 | echo.The overview file is in %BUILDDIR%/changes. | |
204 | goto end | |
205 | ) | |
206 | ||
207 | if "%1" == "linkcheck" ( | |
208 | %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck | |
209 | if errorlevel 1 exit /b 1 | |
210 | echo. | |
211 | echo.Link check complete; look for any errors in the above output ^ | |
212 | or in %BUILDDIR%/linkcheck/output.txt. | |
213 | goto end | |
214 | ) | |
215 | ||
216 | if "%1" == "doctest" ( | |
217 | %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest | |
218 | if errorlevel 1 exit /b 1 | |
219 | echo. | |
220 | echo.Testing of doctests in the sources finished, look at the ^ | |
221 | results in %BUILDDIR%/doctest/output.txt. | |
222 | goto end | |
223 | ) | |
224 | ||
225 | if "%1" == "xml" ( | |
226 | %SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml | |
227 | if errorlevel 1 exit /b 1 | |
228 | echo. | |
229 | echo.Build finished. The XML files are in %BUILDDIR%/xml. | |
230 | goto end | |
231 | ) | |
232 | ||
233 | if "%1" == "pseudoxml" ( | |
234 | %SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml | |
235 | if errorlevel 1 exit /b 1 | |
236 | echo. | |
237 | echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml. | |
238 | goto end | |
239 | ) | |
240 | ||
241 | :end | |
0 | @ECHO OFF | |
1 | ||
2 | REM Command file for Sphinx documentation | |
3 | ||
4 | if "%SPHINXBUILD%" == "" ( | |
5 | set SPHINXBUILD=sphinx-build | |
6 | ) | |
7 | set BUILDDIR=_build | |
8 | set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% . | |
9 | set I18NSPHINXOPTS=%SPHINXOPTS% . | |
10 | if NOT "%PAPER%" == "" ( | |
11 | set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% | |
12 | set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS% | |
13 | ) | |
14 | ||
15 | if "%1" == "" goto help | |
16 | ||
17 | if "%1" == "help" ( | |
18 | :help | |
19 | echo.Please use `make ^<target^>` where ^<target^> is one of | |
20 | echo. html to make standalone HTML files | |
21 | echo. dirhtml to make HTML files named index.html in directories | |
22 | echo. singlehtml to make a single large HTML file | |
23 | echo. pickle to make pickle files | |
24 | echo. json to make JSON files | |
25 | echo. htmlhelp to make HTML files and a HTML help project | |
26 | echo. qthelp to make HTML files and a qthelp project | |
27 | echo. devhelp to make HTML files and a Devhelp project | |
28 | echo. epub to make an epub | |
29 | echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter | |
30 | echo. text to make text files | |
31 | echo. man to make manual pages | |
32 | echo. texinfo to make Texinfo files | |
33 | echo. gettext to make PO message catalogs | |
34 | echo. changes to make an overview over all changed/added/deprecated items | |
35 | echo. xml to make Docutils-native XML files | |
36 | echo. pseudoxml to make pseudoxml-XML files for display purposes | |
37 | echo. linkcheck to check all external links for integrity | |
38 | echo. doctest to run all doctests embedded in the documentation if enabled | |
39 | goto end | |
40 | ) | |
41 | ||
42 | if "%1" == "clean" ( | |
43 | for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i | |
44 | del /q /s %BUILDDIR%\* | |
45 | goto end | |
46 | ) | |
47 | ||
48 | ||
49 | %SPHINXBUILD% 2> nul | |
50 | if errorlevel 9009 ( | |
51 | echo. | |
52 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx | |
53 | echo.installed, then set the SPHINXBUILD environment variable to point | |
54 | echo.to the full path of the 'sphinx-build' executable. Alternatively you | |
55 | echo.may add the Sphinx directory to PATH. | |
56 | echo. | |
57 | echo.If you don't have Sphinx installed, grab it from | |
58 | echo.http://sphinx-doc.org/ | |
59 | exit /b 1 | |
60 | ) | |
61 | ||
62 | if "%1" == "html" ( | |
63 | %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html | |
64 | if errorlevel 1 exit /b 1 | |
65 | echo. | |
66 | echo.Build finished. The HTML pages are in %BUILDDIR%/html. | |
67 | goto end | |
68 | ) | |
69 | ||
70 | if "%1" == "dirhtml" ( | |
71 | %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml | |
72 | if errorlevel 1 exit /b 1 | |
73 | echo. | |
74 | echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. | |
75 | goto end | |
76 | ) | |
77 | ||
78 | if "%1" == "singlehtml" ( | |
79 | %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml | |
80 | if errorlevel 1 exit /b 1 | |
81 | echo. | |
82 | echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. | |
83 | goto end | |
84 | ) | |
85 | ||
86 | if "%1" == "pickle" ( | |
87 | %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle | |
88 | if errorlevel 1 exit /b 1 | |
89 | echo. | |
90 | echo.Build finished; now you can process the pickle files. | |
91 | goto end | |
92 | ) | |
93 | ||
94 | if "%1" == "json" ( | |
95 | %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json | |
96 | if errorlevel 1 exit /b 1 | |
97 | echo. | |
98 | echo.Build finished; now you can process the JSON files. | |
99 | goto end | |
100 | ) | |
101 | ||
102 | if "%1" == "htmlhelp" ( | |
103 | %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp | |
104 | if errorlevel 1 exit /b 1 | |
105 | echo. | |
106 | echo.Build finished; now you can run HTML Help Workshop with the ^ | |
107 | .hhp project file in %BUILDDIR%/htmlhelp. | |
108 | goto end | |
109 | ) | |
110 | ||
111 | if "%1" == "qthelp" ( | |
112 | %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp | |
113 | if errorlevel 1 exit /b 1 | |
114 | echo. | |
115 | echo.Build finished; now you can run "qcollectiongenerator" with the ^ | |
116 | .qhcp project file in %BUILDDIR%/qthelp, like this: | |
117 | echo.^> qcollectiongenerator %BUILDDIR%\qthelp\aiomysql.qhcp | |
118 | echo.To view the help file: | |
119 | echo.^> assistant -collectionFile %BUILDDIR%\qthelp\aiomysql.ghc | |
120 | goto end | |
121 | ) | |
122 | ||
123 | if "%1" == "devhelp" ( | |
124 | %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp | |
125 | if errorlevel 1 exit /b 1 | |
126 | echo. | |
127 | echo.Build finished. | |
128 | goto end | |
129 | ) | |
130 | ||
131 | if "%1" == "epub" ( | |
132 | %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub | |
133 | if errorlevel 1 exit /b 1 | |
134 | echo. | |
135 | echo.Build finished. The epub file is in %BUILDDIR%/epub. | |
136 | goto end | |
137 | ) | |
138 | ||
139 | if "%1" == "latex" ( | |
140 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex | |
141 | if errorlevel 1 exit /b 1 | |
142 | echo. | |
143 | echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. | |
144 | goto end | |
145 | ) | |
146 | ||
147 | if "%1" == "latexpdf" ( | |
148 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex | |
149 | cd %BUILDDIR%/latex | |
150 | make all-pdf | |
151 | cd %BUILDDIR%/.. | |
152 | echo. | |
153 | echo.Build finished; the PDF files are in %BUILDDIR%/latex. | |
154 | goto end | |
155 | ) | |
156 | ||
157 | if "%1" == "latexpdfja" ( | |
158 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex | |
159 | cd %BUILDDIR%/latex | |
160 | make all-pdf-ja | |
161 | cd %BUILDDIR%/.. | |
162 | echo. | |
163 | echo.Build finished; the PDF files are in %BUILDDIR%/latex. | |
164 | goto end | |
165 | ) | |
166 | ||
167 | if "%1" == "text" ( | |
168 | %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text | |
169 | if errorlevel 1 exit /b 1 | |
170 | echo. | |
171 | echo.Build finished. The text files are in %BUILDDIR%/text. | |
172 | goto end | |
173 | ) | |
174 | ||
175 | if "%1" == "man" ( | |
176 | %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man | |
177 | if errorlevel 1 exit /b 1 | |
178 | echo. | |
179 | echo.Build finished. The manual pages are in %BUILDDIR%/man. | |
180 | goto end | |
181 | ) | |
182 | ||
183 | if "%1" == "texinfo" ( | |
184 | %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo | |
185 | if errorlevel 1 exit /b 1 | |
186 | echo. | |
187 | echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo. | |
188 | goto end | |
189 | ) | |
190 | ||
191 | if "%1" == "gettext" ( | |
192 | %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale | |
193 | if errorlevel 1 exit /b 1 | |
194 | echo. | |
195 | echo.Build finished. The message catalogs are in %BUILDDIR%/locale. | |
196 | goto end | |
197 | ) | |
198 | ||
199 | if "%1" == "changes" ( | |
200 | %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes | |
201 | if errorlevel 1 exit /b 1 | |
202 | echo. | |
203 | echo.The overview file is in %BUILDDIR%/changes. | |
204 | goto end | |
205 | ) | |
206 | ||
207 | if "%1" == "linkcheck" ( | |
208 | %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck | |
209 | if errorlevel 1 exit /b 1 | |
210 | echo. | |
211 | echo.Link check complete; look for any errors in the above output ^ | |
212 | or in %BUILDDIR%/linkcheck/output.txt. | |
213 | goto end | |
214 | ) | |
215 | ||
216 | if "%1" == "doctest" ( | |
217 | %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest | |
218 | if errorlevel 1 exit /b 1 | |
219 | echo. | |
220 | echo.Testing of doctests in the sources finished, look at the ^ | |
221 | results in %BUILDDIR%/doctest/output.txt. | |
222 | goto end | |
223 | ) | |
224 | ||
225 | if "%1" == "xml" ( | |
226 | %SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml | |
227 | if errorlevel 1 exit /b 1 | |
228 | echo. | |
229 | echo.Build finished. The XML files are in %BUILDDIR%/xml. | |
230 | goto end | |
231 | ) | |
232 | ||
233 | if "%1" == "pseudoxml" ( | |
234 | %SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml | |
235 | if errorlevel 1 exit /b 1 | |
236 | echo. | |
237 | echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml. | |
238 | goto end | |
239 | ) | |
240 | ||
241 | :end |
13 | 13 | |
14 | 14 | loop = asyncio.get_event_loop() |
15 | 15 | |
16 | @asyncio.coroutine | |
17 | def go() | |
18 | pool = yield from aiomysql.create_pool(host='127.0.0.1', port=3306, | |
19 | user='root', password='', | |
20 | db='mysql', loop=loop, autocommit=False) | |
16 | async def go(): | |
17 | pool = await aiomysql.create_pool(host='127.0.0.1', port=3306, | |
18 | user='root', password='', | |
19 | db='mysql', loop=loop, autocommit=False) | |
21 | 20 | |
22 | with (yield from pool) as conn: | |
23 | cur = yield from conn.cursor() | |
24 | yield from cur.execute("SELECT 10") | |
21 | async with pool.acquire() as conn: | |
22 | cur = await conn.cursor() | |
23 | await cur.execute("SELECT 10") | |
25 | 24 | # print(cur.description) |
26 | (r,) = yield from cur.fetchone() | |
27 | assert r == 10 | |
25 | (r,) = await cur.fetchone() | |
26 | assert r == 10 | |
28 | 27 | pool.close() |
29 | yield from pool.wait_closed() | |
28 | await pool.wait_closed() | |
30 | 29 | |
31 | 30 | loop.run_until_complete(go()) |
32 | 31 | |
44 | 43 | :param kwargs: The function accepts all parameters that |
45 | 44 | :func:`aiomysql.connect` does plus optional keyword-only parameters |
46 | 45 | *loop*, *minsize*, *maxsize*. |
46 | :param float pool_recycle: number of seconds after which connection is | |
47 | recycled, helps to deal with stale connections in pool, default | |
48 | value is -1, means recycling logic is disabled. | |
47 | 49 | :returns: :class:`Pool` instance. |
48 | 50 | |
49 | 51 | |
61 | 63 | |
62 | 64 | The most important way to use it is getting connection in *with statement*:: |
63 | 65 | |
64 | with (yield from pool) as conn: | |
65 | cur = yield from conn.cursor() | |
66 | async with pool.acquire() as conn: | |
67 | cur = await conn.cursor() | |
66 | 68 | |
67 | 69 | |
68 | 70 | See also :meth:`Pool.acquire` and :meth:`Pool.release` for acquring |
29 | 29 | |
30 | 30 | metadata = sa.MetaData() |
31 | 31 | |
32 | tbl = sa.Table('tbl', metadata, | |
33 | sa.Column('id', sa.Integer, primary_key=True), | |
34 | sa.Column('val', sa.String(255))) | |
35 | ||
36 | ||
37 | @asyncio.coroutine | |
38 | def go(): | |
39 | engine = yield from create_engine(user='root', | |
40 | db='test_pymysql', | |
41 | host='127.0.0.1', | |
42 | password='') | |
43 | ||
44 | with (yield from engine) as conn: | |
45 | yield from conn.execute(tbl.insert().values(val='abc')) | |
46 | ||
47 | res = yield from conn.execute(tbl.select()) | |
48 | for row in res: | |
49 | print(row.id, row.val) | |
50 | ||
51 | await conn.commit() | |
52 | ||
53 | asyncio.get_event_loop().run_until_complete(go()) | |
32 | tbl = sa.Table( | |
33 | "tbl", | |
34 | metadata, | |
35 | sa.Column("id", sa.Integer, primary_key=True), | |
36 | sa.Column("val", sa.String(255)), | |
37 | ) | |
38 | ||
39 | ||
40 | async def go(): | |
41 | engine = await create_engine( | |
42 | user="root", | |
43 | db="test_pymysql", | |
44 | host="127.0.0.1", | |
45 | password="", | |
46 | ) | |
47 | ||
48 | async with engine.acquire() as conn: | |
49 | async with conn.begin() as transaction: | |
50 | await conn.execute(tbl.insert().values(val="abc")) | |
51 | await transaction.commit() | |
52 | ||
53 | res = await conn.execute(tbl.select()) | |
54 | async for row in res: | |
55 | print(row.id, row.val) | |
56 | ||
57 | engine.close() | |
58 | await engine.wait_closed() | |
59 | ||
60 | ||
61 | asyncio.run(go()) | |
54 | 62 | |
55 | 63 | |
56 | 64 | So you can execute SQL query built by |
201 | 209 | to be used in the execution. Typically, the format is either a |
202 | 210 | dictionary passed to \*multiparams:: |
203 | 211 | |
204 | yield from conn.execute( | |
212 | await conn.execute( | |
205 | 213 | table.insert(), |
206 | 214 | {"id":1, "value":"v1"} |
207 | 215 | ) |
208 | 216 | |
209 | 217 | ...or individual key/values interpreted by \**params:: |
210 | 218 | |
211 | yield from conn.execute( | |
219 | await conn.execute( | |
212 | 220 | table.insert(), id=1, value="v1" |
213 | 221 | ) |
214 | 222 | |
215 | 223 | In the case that a plain SQL string is passed, a tuple or |
216 | 224 | individual values in \*multiparams may be passed:: |
217 | 225 | |
218 | yield from conn.execute( | |
226 | await conn.execute( | |
219 | 227 | "INSERT INTO table (id, value) VALUES (%d, %s)", |
220 | 228 | (1, "v1") |
221 | 229 | ) |
222 | 230 | |
223 | yield from conn.execute( | |
231 | await conn.execute( | |
224 | 232 | "INSERT INTO table (id, value) VALUES (%s, %s)", |
225 | 233 | 1, "v1" |
226 | 234 | ) |
256 | 264 | an emulated transaction within the scope of the enclosing |
257 | 265 | transaction, that is:: |
258 | 266 | |
259 | trans = yield from conn.begin() # outermost transaction | |
260 | trans2 = yield from conn.begin() # "inner" | |
261 | yield from trans2.commit() # does nothing | |
262 | yield from trans.commit() # actually commits | |
267 | trans = await conn.begin() # outermost transaction | |
268 | trans2 = await conn.begin() # "inner" | |
269 | await trans2.commit() # does nothing | |
270 | await trans.commit() # actually commits | |
263 | 271 | |
264 | 272 | Calls to :meth:`.Transaction.commit` only have an effect |
265 | 273 | when invoked via the outermost :class:`.Transaction` object, though the |
363 | 371 | case-sensitive column name, or by :class:`sqlalchemy.schema.Column`` |
364 | 372 | object. e.g.:: |
365 | 373 | |
366 | for row in (yield from conn.execute(...)): | |
374 | async for row in conn.execute(...): | |
367 | 375 | col1 = row[0] # access via integer position |
368 | 376 | col2 = row['col2'] # access via name |
369 | 377 | col3 = row[mytable.c.mycol] # access via Column object. |
530 | 538 | calling the :meth:`SAConnection.begin` method of |
531 | 539 | :class:`SAConnection`:: |
532 | 540 | |
533 | with (yield from engine) as conn: | |
534 | trans = yield from conn.begin() | |
541 | async with engine.acquire() as conn: | |
542 | trans = await conn.begin() | |
535 | 543 | try: |
536 | yield from conn.execute("insert into x (a, b) values (1, 2)") | |
544 | await conn.execute("insert into x (a, b) values (1, 2)") | |
537 | 545 | except Exception: |
538 | yield from trans.rollback() | |
546 | await trans.rollback() | |
539 | 547 | else: |
540 | yield from trans.commit() | |
548 | await trans.commit() | |
541 | 549 | |
542 | 550 | The object provides :meth:`.rollback` and :meth:`.commit` |
543 | 551 | methods in order to control transaction boundaries. |
5 | 5 | Python database access modules all have similar interfaces, described by the |
6 | 6 | :term:`DBAPI`. Most relational databases use the same synchronous interface, |
7 | 7 | *aiomysql* tries to provide same api you just need |
8 | to use ``yield from conn.f()`` instead of just call ``conn.f()`` for | |
8 | to use ``await conn.f()`` instead of just call ``conn.f()`` for | |
9 | 9 | every method. |
10 | 10 | |
11 | 11 | Installation |
28 | 28 | |
29 | 29 | loop = asyncio.get_event_loop() |
30 | 30 | |
31 | @asyncio.coroutine | |
32 | def test_example(): | |
33 | conn = yield from aiomysql.connect(host='127.0.0.1', port=3306, | |
31 | async def test_example(): | |
32 | conn = await aiomysql.connect(host='127.0.0.1', port=3306, | |
34 | 33 | user='root', password='', db='mysql', |
35 | 34 | loop=loop) |
36 | 35 | |
37 | cur = yield from conn.cursor() | |
38 | yield from cur.execute("SELECT Host,User FROM user") | |
36 | cur = await conn.cursor() | |
37 | await cur.execute("SELECT Host,User FROM user") | |
39 | 38 | print(cur.description) |
40 | r = yield from cur.fetchall() | |
39 | r = await cur.fetchall() | |
41 | 40 | print(r) |
42 | yield from cur.close() | |
41 | await cur.close() | |
43 | 42 | conn.close() |
44 | 43 | |
45 | 44 | loop.run_until_complete(test_example()) |
59 | 58 | ``SELECT Host,User FROM user;`` statement, which returns a list of `host` and |
60 | 59 | `user` from :term:`MySQL` system table ``user``:: |
61 | 60 | |
62 | cur = yield from conn.cursor() | |
63 | yield from cur.execute("SELECT Host,User FROM user") | |
61 | cur = await conn.cursor() | |
62 | await cur.execute("SELECT Host,User FROM user") | |
64 | 63 | print(cur.description) |
65 | r = yield from cur.fetchall() | |
64 | r = await cur.fetchall() | |
66 | 65 | |
67 | 66 | The cursor object's :meth:`Cursor.execute()` method sends the query the server |
68 | 67 | and :meth:`Cursor.fetchall()` retrieves rows. |
71 | 70 | connection object's :meth:`Connection.close()` method to disconnect |
72 | 71 | from the server:: |
73 | 72 | |
74 | yield from cur.close() | |
73 | await cur.close() | |
75 | 74 | conn.close() |
76 | 75 | |
77 | 76 | After that, ``conn`` becomes invalid and should not be used to access the |
21 | 21 | await cur.execute("INSERT INTO music_style VALUES(3,'power metal');") |
22 | 22 | await conn.commit() |
23 | 23 | |
24 | # insert 3 row by one long query using *executemane* method | |
24 | # insert 3 row by one long query using *executemany* method | |
25 | 25 | data = [(4, 'gothic metal'), (5, 'doom metal'), (6, 'post metal')] |
26 | 26 | await cur.executemany( |
27 | 27 | "INSERT INTO music_style (id, name)" |
6 | 6 | |
7 | 7 | @asyncio.coroutine |
8 | 8 | def test_example(): |
9 | conn = yield from aiomysql.connect(host='127.0.0.1', port=3306, | |
10 | user='root', password='', db='mysql', | |
11 | loop=loop) | |
9 | conn = yield from aiomysql.connect( | |
10 | host='127.0.0.1', | |
11 | port=3306, | |
12 | user='root', | |
13 | password='', | |
14 | db='mysql', | |
15 | loop=loop | |
16 | ) | |
12 | 17 | |
13 | 18 | cur = yield from conn.cursor() |
14 | 19 | yield from cur.execute("SELECT Host,User FROM user") |
6 | 6 | |
7 | 7 | @asyncio.coroutine |
8 | 8 | def test_example(): |
9 | pool = yield from aiomysql.create_pool(host='127.0.0.1', port=3306, | |
10 | user='root', password='', | |
11 | db='mysql', loop=loop) | |
12 | with (yield from pool) as conn: | |
13 | cur = yield from conn.cursor() | |
14 | yield from cur.execute("SELECT 10") | |
15 | # print(cur.description) | |
16 | (r,) = yield from cur.fetchone() | |
17 | assert r == 10 | |
18 | pool.close() | |
19 | yield from pool.wait_closed() | |
9 | pool = yield from aiomysql.create_pool( | |
10 | host='127.0.0.1', | |
11 | port=3306, | |
12 | user='root', | |
13 | password='', | |
14 | db='mysql', | |
15 | loop=loop | |
16 | ) | |
17 | with (yield from pool) as conn: | |
18 | cur = yield from conn.cursor() | |
19 | yield from cur.execute("SELECT 10") | |
20 | # print(cur.description) | |
21 | (r,) = yield from cur.fetchone() | |
22 | assert r == 10 | |
23 | pool.close() | |
24 | yield from pool.wait_closed() | |
20 | 25 | |
21 | 26 | |
22 | 27 | loop.run_until_complete(test_example()) |
0 | [build-system] | |
1 | requires = [ | |
2 | # Essentials | |
3 | "setuptools >= 42", | |
4 | ||
5 | # Plugins | |
6 | "setuptools_scm[toml] >= 6.4", | |
7 | "setuptools_scm_git_archive >= 1.1", | |
8 | ] | |
9 | build-backend = "setuptools.build_meta" | |
10 | ||
11 | [tool.setuptools_scm] | |
12 | write_to = "aiomysql/_scm_version.py" |
0 | coverage==4.5.1 | |
1 | flake8==3.5.0 | |
2 | ipdb==0.11 | |
3 | ipython==7.0.1 | |
4 | pytest==3.9.1 | |
5 | pytest-cov==2.6.0 | |
6 | pytest-sugar==0.9.1 | |
7 | PyMySQL>=0.9,<=0.9.2 | |
8 | docker==3.5.1 | |
9 | sphinx==1.8.1 | |
10 | sphinxcontrib-asyncio==0.2.0 | |
11 | sqlalchemy==1.2.12 | |
12 | uvloop==0.11.2; python_version >= '3.5' | |
0 | coverage==6.3.2 | |
1 | flake8==4.0.1 | |
2 | ipdb==0.13.9 | |
3 | pytest==7.1.2 | |
4 | pytest-cov==3.0.0 | |
5 | pytest-sugar==0.9.4 | |
6 | PyMySQL==1.0.2 | |
7 | sphinx>=1.8.1, <4.5.1 | |
8 | sphinxcontrib-asyncio==0.3.0 | |
9 | SQLAlchemy==1.3.24 | |
10 | uvloop==0.16.0; python_version < '3.11' | |
11 | twine==4.0.0 |
0 | [metadata] | |
1 | name = aiomysql | |
2 | version = attr: aiomysql.__version__ | |
3 | url = https://github.com/aio-libs/aiomysql | |
4 | download_url = https://pypi.python.org/pypi/aiomysql | |
5 | project_urls = | |
6 | CI: GitHub = https://github.com/aio-libs/aiomysql/actions | |
7 | Docs: RTD = https://aiomysql.readthedocs.io/ | |
8 | GitHub: repo = https://github.com/aio-libs/aiomysql | |
9 | GitHub: issues = https://github.com/aio-libs/aiomysql/issues | |
10 | GitHub: discussions = https://github.com/aio-libs/aiomysql/discussions | |
11 | description = MySQL driver for asyncio. | |
12 | long_description = file: README.rst, CHANGES.txt | |
13 | long_description_content_type = text/x-rst | |
14 | author = Nikolay Novik | |
15 | author_email = nickolainovik@gmail.com | |
16 | classifiers = | |
17 | License :: OSI Approved :: MIT License | |
18 | Intended Audience :: Developers | |
19 | Programming Language :: Python :: 3 | |
20 | Programming Language :: Python :: 3.7 | |
21 | Programming Language :: Python :: 3.8 | |
22 | Programming Language :: Python :: 3.9 | |
23 | Programming Language :: Python :: 3.10 | |
24 | Operating System :: POSIX | |
25 | Environment :: Web Environment | |
26 | Development Status :: 3 - Alpha | |
27 | Topic :: Database | |
28 | Topic :: Database :: Front-Ends | |
29 | Framework :: AsyncIO | |
30 | license = MIT | |
31 | keywords = | |
32 | mysql | |
33 | mariadb | |
34 | asyncio | |
35 | aiomysql | |
36 | platforms = | |
37 | POSIX | |
38 | ||
39 | [options] | |
40 | python_requires = >=3.7 | |
41 | include_package_data = True | |
42 | ||
43 | packages = find: | |
44 | ||
45 | # runtime requirements | |
46 | install_requires = | |
47 | PyMySQL>=1.0 | |
48 | ||
49 | [options.extras_require] | |
50 | sa = | |
51 | sqlalchemy>=1.0,<1.4 | |
52 | rsa = | |
53 | PyMySQL[rsa]>=1.0 | |
54 | ||
55 | [options.packages.find] | |
56 | exclude = | |
57 | tests | |
58 | tests.* |
0 | import os | |
1 | import re | |
2 | import sys | |
3 | from setuptools import setup, find_packages | |
4 | ||
5 | ||
6 | install_requires = ['PyMySQL>=0.9,<=0.9.2'] | |
7 | ||
8 | PY_VER = sys.version_info | |
9 | ||
10 | ||
11 | if not PY_VER >= (3, 5, 3): | |
12 | raise RuntimeError("aiomysql doesn't support Python earlier than 3.5.3") | |
13 | ||
14 | ||
15 | def read(f): | |
16 | return open(os.path.join(os.path.dirname(__file__), f)).read().strip() | |
17 | ||
18 | ||
19 | extras_require = {'sa': ['sqlalchemy>=1.0'], } | |
20 | ||
21 | ||
22 | def read_version(): | |
23 | regexp = re.compile(r"^__version__\W*=\W*'([\d.abrc]+)'") | |
24 | init_py = os.path.join(os.path.dirname(__file__), | |
25 | 'aiomysql', '__init__.py') | |
26 | with open(init_py) as f: | |
27 | for line in f: | |
28 | match = regexp.match(line) | |
29 | if match is not None: | |
30 | return match.group(1) | |
31 | else: | |
32 | raise RuntimeError('Cannot find version in aiomysql/__init__.py') | |
33 | ||
34 | ||
35 | classifiers = [ | |
36 | 'License :: OSI Approved :: MIT License', | |
37 | 'Intended Audience :: Developers', | |
38 | 'Programming Language :: Python :: 3', | |
39 | 'Programming Language :: Python :: 3.5', | |
40 | 'Programming Language :: Python :: 3.6', | |
41 | 'Operating System :: POSIX', | |
42 | 'Environment :: Web Environment', | |
43 | 'Development Status :: 3 - Alpha', | |
44 | 'Topic :: Database', | |
45 | 'Topic :: Database :: Front-Ends', | |
46 | 'Framework :: AsyncIO', | |
47 | ] | |
48 | ||
49 | ||
50 | setup(name='aiomysql', | |
51 | version=read_version(), | |
52 | description=('MySQL driver for asyncio.'), | |
53 | long_description='\n\n'.join((read('README.rst'), read('CHANGES.txt'))), | |
54 | classifiers=classifiers, | |
55 | platforms=['POSIX'], | |
56 | author="Nikolay Novik", | |
57 | author_email="nickolainovik@gmail.com", | |
58 | url='https://github.com/aio-libs/aiomysql', | |
59 | download_url='https://pypi.python.org/pypi/aiomysql', | |
60 | license='MIT', | |
61 | packages=find_packages(exclude=['tests', 'tests.*']), | |
62 | install_requires=install_requires, | |
63 | extras_require=extras_require, | |
64 | include_package_data=True) |
0 | 0 | import asyncio |
1 | 1 | import unittest |
2 | ||
3 | from functools import wraps | |
4 | ||
5 | ||
6 | def run_until_complete(fun): | |
7 | if not asyncio.iscoroutinefunction(fun): | |
8 | fun = asyncio.coroutine(fun) | |
9 | ||
10 | @wraps(fun) | |
11 | def wrapper(test, *args, **kw): | |
12 | loop = test.loop | |
13 | ret = loop.run_until_complete( | |
14 | asyncio.wait_for(fun(test, *args, **kw), 15, loop=loop)) | |
15 | return ret | |
16 | return wrapper | |
17 | 2 | |
18 | 3 | |
19 | 4 | class BaseTest(unittest.TestCase): |
0 | import asyncio | |
1 | 0 | import os |
2 | 1 | import aiomysql |
3 | 2 | from tests._testutils import BaseTest |
5 | 4 | |
6 | 5 | class AIOPyMySQLTestCase(BaseTest): |
7 | 6 | |
8 | @asyncio.coroutine | |
9 | def _connect_all(self): | |
10 | conn1 = yield from aiomysql.connect(loop=self.loop, host=self.host, | |
11 | port=self.port, user=self.user, | |
12 | db=self.db, | |
13 | password=self.password, | |
14 | use_unicode=True, echo=True) | |
15 | conn2 = yield from aiomysql.connect(loop=self.loop, host=self.host, | |
16 | port=self.port, user=self.user, | |
17 | db=self.other_db, | |
18 | password=self.password, | |
19 | use_unicode=False, echo=True) | |
20 | conn3 = yield from aiomysql.connect(loop=self.loop, host=self.host, | |
21 | port=self.port, user=self.user, | |
22 | db=self.db, | |
23 | password=self.password, | |
24 | use_unicode=True, echo=True, | |
25 | local_infile=True) | |
7 | async def _connect_all(self): | |
8 | conn1 = await aiomysql.connect(loop=self.loop, host=self.host, | |
9 | port=self.port, user=self.user, | |
10 | db=self.db, | |
11 | password=self.password, | |
12 | use_unicode=True, echo=True) | |
13 | conn2 = await aiomysql.connect(loop=self.loop, host=self.host, | |
14 | port=self.port, user=self.user, | |
15 | db=self.other_db, | |
16 | password=self.password, | |
17 | use_unicode=False, echo=True) | |
18 | conn3 = await aiomysql.connect(loop=self.loop, host=self.host, | |
19 | port=self.port, user=self.user, | |
20 | db=self.db, | |
21 | password=self.password, | |
22 | use_unicode=True, echo=True, | |
23 | local_infile=True) | |
26 | 24 | |
27 | 25 | self.connections = [conn1, conn2, conn3] |
28 | 26 | |
44 | 42 | self.doCleanups() |
45 | 43 | super(AIOPyMySQLTestCase, self).tearDown() |
46 | 44 | |
47 | @asyncio.coroutine | |
48 | def connect(self, host=None, user=None, password=None, | |
49 | db=None, use_unicode=True, no_delay=None, port=None, **kwargs): | |
45 | async def connect(self, host=None, user=None, password=None, | |
46 | db=None, use_unicode=True, port=None, | |
47 | **kwargs): | |
50 | 48 | if host is None: |
51 | 49 | host = self.host |
52 | 50 | if user is None: |
57 | 55 | db = self.db |
58 | 56 | if port is None: |
59 | 57 | port = self.port |
60 | conn = yield from aiomysql.connect(loop=self.loop, host=host, | |
61 | user=user, password=password, | |
62 | db=db, use_unicode=use_unicode, | |
63 | no_delay=no_delay, port=port, | |
64 | **kwargs) | |
58 | conn = await aiomysql.connect(loop=self.loop, host=host, | |
59 | user=user, password=password, | |
60 | db=db, use_unicode=use_unicode, | |
61 | port=port, | |
62 | **kwargs) | |
65 | 63 | self.addCleanup(conn.close) |
66 | 64 | return conn |
67 | 65 | |
68 | @asyncio.coroutine | |
69 | def create_pool(self, host=None, user=None, password=None, | |
70 | db=None, use_unicode=True, no_delay=None, | |
71 | port=None, **kwargs): | |
66 | async def create_pool(self, host=None, user=None, password=None, | |
67 | db=None, use_unicode=True, | |
68 | port=None, **kwargs): | |
72 | 69 | if host is None: |
73 | 70 | host = self.host |
74 | 71 | if user is None: |
79 | 76 | db = self.db |
80 | 77 | if port is None: |
81 | 78 | port = self.port |
82 | pool = yield from aiomysql.create_pool(loop=self.loop, host=host, | |
83 | user=user, password=password, | |
84 | db=db, use_unicode=use_unicode, | |
85 | no_delay=no_delay, port=port, | |
86 | **kwargs) | |
79 | pool = await aiomysql.create_pool(loop=self.loop, host=host, | |
80 | user=user, password=password, | |
81 | db=db, use_unicode=use_unicode, | |
82 | port=port, | |
83 | **kwargs) | |
87 | 84 | self.addCleanup(pool.close) |
88 | 85 | return pool |
0 | 0 | import asyncio |
1 | 1 | import gc |
2 | 2 | import os |
3 | import re | |
3 | 4 | import ssl |
4 | import socket | |
5 | 5 | import sys |
6 | import time | |
7 | import uuid | |
8 | ||
9 | from docker import APIClient | |
10 | 6 | |
11 | 7 | import aiomysql |
12 | 8 | import pymysql |
13 | 9 | import pytest |
14 | 10 | |
15 | 11 | |
16 | PY_35 = sys.version_info >= (3, 5) | |
17 | if PY_35: | |
12 | # version gate can be removed once uvloop supports python 3.11 | |
13 | # https://github.com/MagicStack/uvloop/issues/450 | |
14 | # https://github.com/MagicStack/uvloop/pull/459 | |
15 | PY_311 = sys.version_info >= (3, 11) | |
16 | if PY_311: | |
17 | uvloop = None | |
18 | else: | |
18 | 19 | import uvloop |
19 | else: | |
20 | uvloop = None | |
21 | ||
22 | ||
23 | @pytest.fixture(scope='session') | |
24 | def unused_port(): | |
25 | def f(): | |
26 | with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: | |
27 | s.bind(('127.0.0.1', 0)) | |
28 | return s.getsockname()[1] | |
29 | return f | |
20 | ||
21 | ||
22 | @pytest.fixture | |
23 | def disable_gc(): | |
24 | gc_enabled = gc.isenabled() | |
25 | if gc_enabled: | |
26 | gc.disable() | |
27 | gc.collect() | |
28 | yield | |
29 | if gc_enabled: | |
30 | gc.collect() | |
31 | gc.enable() | |
30 | 32 | |
31 | 33 | |
32 | 34 | def pytest_generate_tests(metafunc): |
34 | 36 | loop_type = ['asyncio', 'uvloop'] if uvloop else ['asyncio'] |
35 | 37 | metafunc.parametrize("loop_type", loop_type) |
36 | 38 | |
37 | if 'mysql_tag' in metafunc.fixturenames: | |
38 | tags = set(metafunc.config.option.mysql_tag) | |
39 | if not tags: | |
40 | tags = ['5.6', '8.0'] | |
41 | elif 'all' in tags: | |
42 | tags = ['5.6', '5.7', '8.0'] | |
43 | else: | |
44 | tags = list(tags) | |
45 | metafunc.parametrize("mysql_tag", tags, scope='session') | |
46 | ||
47 | ||
48 | # This is here unless someone fixes the generate_tests bit | |
49 | @pytest.yield_fixture(scope='session') | |
50 | def mysql_tag(): | |
51 | return '5.6' | |
52 | ||
53 | ||
54 | @pytest.yield_fixture | |
39 | if "mysql_address" in metafunc.fixturenames: | |
40 | mysql_addresses = [] | |
41 | ids = [] | |
42 | ||
43 | opt_mysql_unix_socket = \ | |
44 | list(metafunc.config.getoption("mysql_unix_socket")) | |
45 | for i in range(len(opt_mysql_unix_socket)): | |
46 | if "=" in opt_mysql_unix_socket[i]: | |
47 | label, path = opt_mysql_unix_socket[i].split("=", 1) | |
48 | mysql_addresses.append(path) | |
49 | ids.append(label) | |
50 | else: | |
51 | mysql_addresses.append(opt_mysql_unix_socket[i]) | |
52 | ids.append("unix{}".format(i)) | |
53 | ||
54 | opt_mysql_address = list(metafunc.config.getoption("mysql_address")) | |
55 | for i in range(len(opt_mysql_address)): | |
56 | if "=" in opt_mysql_address[i]: | |
57 | label, addr = opt_mysql_address[i].split("=", 1) | |
58 | ids.append(label) | |
59 | else: | |
60 | addr = opt_mysql_address[i] | |
61 | ids.append("tcp{}".format(i)) | |
62 | ||
63 | if ":" in addr: | |
64 | addr = addr.split(":", 1) | |
65 | mysql_addresses.append((addr[0], int(addr[1]))) | |
66 | else: | |
67 | mysql_addresses.append((addr, 3306)) | |
68 | ||
69 | # default to connecting to localhost | |
70 | if len(mysql_addresses) == 0: | |
71 | mysql_addresses = [("127.0.0.1", 3306)] | |
72 | ids = ["tcp-local"] | |
73 | ||
74 | assert len(mysql_addresses) == len(set(mysql_addresses)), \ | |
75 | "mysql targets are not unique" | |
76 | assert len(ids) == len(set(ids)), \ | |
77 | "mysql target names are not unique" | |
78 | ||
79 | metafunc.parametrize("mysql_address", | |
80 | mysql_addresses, | |
81 | ids=ids, | |
82 | scope="session", | |
83 | ) | |
84 | ||
85 | ||
86 | @pytest.fixture | |
55 | 87 | def loop(request, loop_type): |
56 | 88 | loop = asyncio.new_event_loop() |
57 | 89 | asyncio.set_event_loop(None) |
76 | 108 | if collector.funcnamefilter(name): |
77 | 109 | if not callable(obj): |
78 | 110 | return |
79 | item = pytest.Function(name, parent=collector) | |
111 | item = pytest.Function.from_parent(collector, name=name) | |
80 | 112 | if 'run_loop' in item.keywords: |
81 | 113 | return list(collector._genfunctions(name, obj)) |
82 | 114 | |
102 | 134 | item.fixturenames.append('loop') |
103 | 135 | |
104 | 136 | |
105 | def pytest_ignore_collect(path, config): | |
106 | if 'pep492' in str(path): | |
107 | if sys.version_info < (3, 5, 0): | |
108 | return True | |
137 | def pytest_configure(config): | |
138 | config.addinivalue_line( | |
139 | "markers", | |
140 | "run_loop" | |
141 | ) | |
142 | config.addinivalue_line( | |
143 | "markers", | |
144 | "mysql_version(db, version): run only on specific database versions" | |
145 | ) | |
109 | 146 | |
110 | 147 | |
111 | 148 | def pytest_addoption(parser): |
112 | parser.addoption("--mysql_tag", action="append", default=[], | |
113 | help=("MySQL server versions. " | |
114 | "May be used several times. " | |
115 | "Available values: 5.6, 5.7, 8.0, all")) | |
116 | parser.addoption("--no-pull", action="store_true", default=False, | |
117 | help="Don't perform docker images pulling") | |
118 | ||
119 | ||
120 | @pytest.fixture | |
121 | def mysql_params(): | |
122 | params = {"host": os.environ.get('MYSQL_HOST', 'localhost'), | |
123 | "port": int(os.environ.get('MYSQL_PORT', 3306)), | |
124 | "user": os.environ.get('MYSQL_USER', 'root'), | |
149 | parser.addoption( | |
150 | "--mysql-address", | |
151 | action="append", | |
152 | default=[], | |
153 | help="list of addresses to connect to: [name=]host[:port]", | |
154 | ) | |
155 | parser.addoption( | |
156 | "--mysql-unix-socket", | |
157 | action="append", | |
158 | default=[], | |
159 | help="list of unix sockets to connect to: [name=]/path/to/socket", | |
160 | ) | |
161 | ||
162 | ||
163 | @pytest.fixture | |
164 | def mysql_params(mysql_server): | |
165 | params = {**mysql_server['conn_params'], | |
125 | 166 | "db": os.environ.get('MYSQL_DB', 'test_pymysql'), |
126 | "password": os.environ.get('MYSQL_PASSWORD', ''), | |
127 | 167 | "local_infile": True, |
128 | 168 | "use_unicode": True, |
129 | 169 | } |
131 | 171 | |
132 | 172 | |
133 | 173 | # TODO: fix this workaround |
134 | @asyncio.coroutine | |
135 | def _cursor_wrapper(conn): | |
136 | cur = yield from conn.cursor() | |
137 | return cur | |
138 | ||
139 | ||
140 | @pytest.yield_fixture | |
174 | async def _cursor_wrapper(conn): | |
175 | return await conn.cursor() | |
176 | ||
177 | ||
178 | @pytest.fixture | |
141 | 179 | def cursor(connection, loop): |
142 | 180 | cur = loop.run_until_complete(_cursor_wrapper(connection)) |
143 | 181 | yield cur |
144 | 182 | loop.run_until_complete(cur.close()) |
145 | 183 | |
146 | 184 | |
147 | @pytest.yield_fixture | |
185 | @pytest.fixture | |
148 | 186 | def connection(mysql_params, loop): |
149 | 187 | coro = aiomysql.connect(loop=loop, **mysql_params) |
150 | 188 | conn = loop.run_until_complete(coro) |
152 | 190 | loop.run_until_complete(conn.ensure_closed()) |
153 | 191 | |
154 | 192 | |
155 | @pytest.yield_fixture | |
193 | @pytest.fixture | |
156 | 194 | def connection_creator(mysql_params, loop): |
157 | 195 | connections = [] |
158 | 196 | |
159 | @asyncio.coroutine | |
160 | def f(**kw): | |
197 | async def f(**kw): | |
161 | 198 | conn_kw = mysql_params.copy() |
162 | 199 | conn_kw.update(kw) |
163 | 200 | _loop = conn_kw.pop('loop', loop) |
164 | conn = yield from aiomysql.connect(loop=_loop, **conn_kw) | |
201 | conn = await aiomysql.connect(loop=_loop, **conn_kw) | |
165 | 202 | connections.append(conn) |
166 | 203 | return conn |
167 | 204 | |
168 | 205 | yield f |
169 | 206 | |
170 | 207 | for conn in connections: |
171 | loop.run_until_complete(conn.ensure_closed()) | |
172 | ||
173 | ||
174 | @pytest.yield_fixture | |
208 | try: | |
209 | loop.run_until_complete(conn.ensure_closed()) | |
210 | except ConnectionResetError: | |
211 | pass | |
212 | ||
213 | ||
214 | @pytest.fixture | |
175 | 215 | def pool_creator(mysql_params, loop): |
176 | 216 | pools = [] |
177 | 217 | |
178 | @asyncio.coroutine | |
179 | def f(**kw): | |
218 | async def f(**kw): | |
180 | 219 | conn_kw = mysql_params.copy() |
181 | 220 | conn_kw.update(kw) |
182 | 221 | _loop = conn_kw.pop('loop', loop) |
183 | pool = yield from aiomysql.create_pool(loop=_loop, **conn_kw) | |
222 | pool = await aiomysql.create_pool(loop=_loop, **conn_kw) | |
184 | 223 | pools.append(pool) |
185 | 224 | return pool |
186 | 225 | |
191 | 230 | loop.run_until_complete(pool.wait_closed()) |
192 | 231 | |
193 | 232 | |
194 | @pytest.yield_fixture | |
233 | @pytest.fixture | |
195 | 234 | def table_cleanup(loop, connection): |
196 | 235 | table_list = [] |
197 | 236 | cursor = loop.run_until_complete(_cursor_wrapper(connection)) |
207 | 246 | |
208 | 247 | |
209 | 248 | @pytest.fixture(scope='session') |
210 | def session_id(): | |
211 | """Unique session identifier, random string.""" | |
212 | return str(uuid.uuid4()) | |
213 | ||
214 | ||
215 | @pytest.fixture(scope='session') | |
216 | def docker(): | |
217 | return APIClient(version='auto') | |
218 | ||
219 | ||
220 | @pytest.fixture(autouse=True) | |
221 | def ensure_mysql_verison(request, mysql_tag): | |
222 | if request.node.get_marker('mysql_verison'): | |
223 | if request.node.get_marker('mysql_verison').args[0] != mysql_tag: | |
224 | pytest.skip('Not applicable for ' | |
225 | 'MySQL version: {0}'.format(mysql_tag)) | |
226 | ||
227 | ||
228 | @pytest.fixture(scope='session') | |
229 | def mysql_server(unused_port, docker, session_id, mysql_tag, request): | |
230 | print('\nSTARTUP CONTAINER - {0}\n'.format(mysql_tag)) | |
231 | ||
232 | if not request.config.option.no_pull: | |
233 | docker.pull('mysql:{}'.format(mysql_tag)) | |
234 | ||
235 | # bound IPs do not work on OSX | |
236 | host = "127.0.0.1" | |
237 | host_port = unused_port() | |
238 | ||
239 | # As TLS is optional, might as well always configure it | |
240 | ssl_directory = os.path.join(os.path.dirname(__file__), | |
241 | 'ssl_resources', 'ssl') | |
242 | ca_file = os.path.join(ssl_directory, 'ca.pem') | |
243 | tls_cnf = os.path.join(os.path.dirname(__file__), | |
244 | 'ssl_resources', 'tls.cnf') | |
245 | ||
246 | ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1) | |
247 | ctx.check_hostname = False | |
248 | ctx.load_verify_locations(cafile=ca_file) | |
249 | # ctx.verify_mode = ssl.CERT_NONE | |
250 | ||
251 | container_args = dict( | |
252 | image='mysql:{}'.format(mysql_tag), | |
253 | name='aiomysql-test-server-{}-{}'.format(mysql_tag, session_id), | |
254 | ports=[3306], | |
255 | detach=True, | |
256 | host_config=docker.create_host_config( | |
257 | port_bindings={3306: (host, host_port)}, | |
258 | binds={ | |
259 | ssl_directory: {'bind': '/etc/mysql/ssl', 'mode': 'ro'}, | |
260 | tls_cnf: {'bind': '/etc/mysql/conf.d/tls.cnf', 'mode': 'ro'}, | |
261 | } | |
262 | ), | |
263 | environment={'MYSQL_ROOT_PASSWORD': 'rootpw'} | |
264 | ) | |
265 | ||
266 | container = docker.create_container(**container_args) | |
249 | def mysql_server(mysql_address): | |
250 | unix_socket = type(mysql_address) is str | |
251 | ||
252 | if not unix_socket: | |
253 | ssl_directory = os.path.join(os.path.dirname(__file__), | |
254 | 'ssl_resources', 'ssl') | |
255 | ca_file = os.path.join(ssl_directory, 'ca.pem') | |
256 | ||
257 | ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1_2) | |
258 | ctx.check_hostname = False | |
259 | ctx.load_verify_locations(cafile=ca_file) | |
260 | # ctx.verify_mode = ssl.CERT_NONE | |
261 | ||
262 | server_params = { | |
263 | 'user': 'root', | |
264 | 'password': os.environ.get("MYSQL_ROOT_PASSWORD"), | |
265 | } | |
266 | ||
267 | if unix_socket: | |
268 | server_params["unix_socket"] = mysql_address | |
269 | else: | |
270 | server_params["host"] = mysql_address[0] | |
271 | server_params["port"] = mysql_address[1] | |
272 | server_params["ssl"] = ctx | |
267 | 273 | |
268 | 274 | try: |
269 | docker.start(container=container['Id']) | |
270 | ||
271 | # MySQL restarts at least 4 times in the container before its ready | |
272 | time.sleep(10) | |
273 | ||
274 | server_params = { | |
275 | 'host': host, | |
276 | 'port': host_port, | |
277 | 'user': 'root', | |
278 | 'password': 'rootpw', | |
279 | 'ssl': ctx | |
280 | } | |
281 | delay = 0.001 | |
282 | for i in range(100): | |
283 | try: | |
284 | connection = pymysql.connect( | |
285 | db='mysql', | |
286 | charset='utf8mb4', | |
287 | cursorclass=pymysql.cursors.DictCursor, | |
288 | **server_params) | |
289 | ||
290 | with connection.cursor() as cursor: | |
291 | cursor.execute("SHOW VARIABLES LIKE '%ssl%';") | |
292 | ||
293 | result = cursor.fetchall() | |
294 | result = {item['Variable_name']: | |
295 | item['Value'] for item in result} | |
296 | ||
297 | assert result['have_ssl'] == "YES", \ | |
298 | "SSL Not Enabled on docker'd MySQL" | |
299 | ||
300 | cursor.execute("SHOW STATUS LIKE 'Ssl_version%'") | |
301 | ||
302 | result = cursor.fetchone() | |
303 | # As we connected with TLS, it should start with that :D | |
304 | assert result['Value'].startswith('TLS'), \ | |
305 | "Not connected to the database with TLS" | |
306 | ||
307 | # Create Databases | |
308 | cursor.execute('CREATE DATABASE test_pymysql ' | |
309 | 'DEFAULT CHARACTER SET utf8 ' | |
310 | 'DEFAULT COLLATE utf8_general_ci;') | |
311 | cursor.execute('CREATE DATABASE test_pymysql2 ' | |
312 | 'DEFAULT CHARACTER SET utf8 ' | |
313 | 'DEFAULT COLLATE utf8_general_ci;') | |
314 | ||
315 | # Do MySQL8+ Specific Setup | |
316 | if mysql_tag in ('8.0',): | |
317 | # Create Users to test SHA256 | |
318 | cursor.execute('CREATE USER user_sha256 ' | |
319 | 'IDENTIFIED WITH "sha256_password" ' | |
320 | 'BY "pass_sha256"') | |
321 | cursor.execute('CREATE USER nopass_sha256 ' | |
322 | 'IDENTIFIED WITH "sha256_password"') | |
323 | cursor.execute('CREATE USER user_caching_sha2 ' | |
324 | 'IDENTIFIED ' | |
325 | 'WITH "caching_sha2_password" ' | |
326 | 'BY "pass_caching_sha2"') | |
327 | cursor.execute('CREATE USER nopass_caching_sha2 ' | |
328 | 'IDENTIFIED ' | |
329 | 'WITH "caching_sha2_password" ' | |
330 | 'PASSWORD EXPIRE NEVER') | |
331 | cursor.execute('FLUSH PRIVILEGES') | |
332 | ||
333 | break | |
334 | except Exception as err: | |
335 | time.sleep(delay) | |
336 | delay *= 2 | |
337 | else: | |
338 | pytest.fail("Cannot start MySQL server") | |
339 | ||
340 | container['host'] = host | |
341 | container['port'] = host_port | |
342 | container['conn_params'] = server_params | |
343 | ||
344 | yield container | |
345 | finally: | |
346 | print('\nTEARDOWN CONTAINER - {0}\n'.format(mysql_tag)) | |
347 | docker.kill(container=container['Id']) | |
348 | docker.remove_container(container['Id']) | |
275 | connection = pymysql.connect( | |
276 | db='mysql', | |
277 | charset='utf8mb4', | |
278 | cursorclass=pymysql.cursors.DictCursor, | |
279 | **server_params) | |
280 | ||
281 | with connection.cursor() as cursor: | |
282 | cursor.execute("SELECT VERSION() AS version") | |
283 | server_version = cursor.fetchone()["version"] | |
284 | server_version_tuple = tuple( | |
285 | (int(dig) if dig is not None else 0) | |
286 | for dig in | |
287 | re.match(r"^(\d+)\.(\d+)(?:\.(\d+))?", server_version).group(1, 2, 3) | |
288 | ) | |
289 | server_version_tuple_short = (server_version_tuple[0], | |
290 | server_version_tuple[1]) | |
291 | if server_version_tuple_short in [(5, 7), (8, 0)]: | |
292 | db_type = "mysql" | |
293 | elif server_version_tuple[0] == 10: | |
294 | db_type = "mariadb" | |
295 | else: | |
296 | pytest.fail("Unable to determine database type from {!r}" | |
297 | .format(server_version_tuple)) | |
298 | ||
299 | if not unix_socket: | |
300 | cursor.execute("SHOW VARIABLES LIKE '%ssl%';") | |
301 | ||
302 | result = cursor.fetchall() | |
303 | result = {item['Variable_name']: | |
304 | item['Value'] for item in result} | |
305 | ||
306 | assert result['have_ssl'] == "YES", \ | |
307 | "SSL Not Enabled on MySQL" | |
308 | ||
309 | cursor.execute("SHOW STATUS LIKE 'Ssl_version%'") | |
310 | ||
311 | result = cursor.fetchone() | |
312 | # As we connected with TLS, it should start with that :D | |
313 | assert result['Value'].startswith('TLS'), \ | |
314 | "Not connected to the database with TLS" | |
315 | ||
316 | # Drop possibly existing old databases | |
317 | cursor.execute('DROP DATABASE IF EXISTS test_pymysql;') | |
318 | cursor.execute('DROP DATABASE IF EXISTS test_pymysql2;') | |
319 | ||
320 | # Create Databases | |
321 | cursor.execute('CREATE DATABASE test_pymysql ' | |
322 | 'DEFAULT CHARACTER SET utf8 ' | |
323 | 'DEFAULT COLLATE utf8_general_ci;') | |
324 | cursor.execute('CREATE DATABASE test_pymysql2 ' | |
325 | 'DEFAULT CHARACTER SET utf8 ' | |
326 | 'DEFAULT COLLATE utf8_general_ci;') | |
327 | ||
328 | # Do MySQL8+ Specific Setup | |
329 | if db_type == "mysql" and server_version_tuple_short == (8, 0): | |
330 | # Drop existing users | |
331 | cursor.execute('DROP USER IF EXISTS user_sha256;') | |
332 | cursor.execute('DROP USER IF EXISTS nopass_sha256;') | |
333 | cursor.execute('DROP USER IF EXISTS user_caching_sha2;') | |
334 | cursor.execute('DROP USER IF EXISTS nopass_caching_sha2;') | |
335 | ||
336 | # Create Users to test SHA256 | |
337 | cursor.execute('CREATE USER user_sha256 ' | |
338 | 'IDENTIFIED WITH "sha256_password" ' | |
339 | 'BY "pass_sha256"') | |
340 | cursor.execute('CREATE USER nopass_sha256 ' | |
341 | 'IDENTIFIED WITH "sha256_password"') | |
342 | cursor.execute('CREATE USER user_caching_sha2 ' | |
343 | 'IDENTIFIED ' | |
344 | 'WITH "caching_sha2_password" ' | |
345 | 'BY "pass_caching_sha2"') | |
346 | cursor.execute('CREATE USER nopass_caching_sha2 ' | |
347 | 'IDENTIFIED ' | |
348 | 'WITH "caching_sha2_password" ' | |
349 | 'PASSWORD EXPIRE NEVER') | |
350 | cursor.execute('FLUSH PRIVILEGES') | |
351 | except Exception: | |
352 | pytest.fail("Cannot initialize MySQL environment") | |
353 | ||
354 | return { | |
355 | "conn_params": server_params, | |
356 | "server_version": server_version, | |
357 | "server_version_tuple": server_version_tuple, | |
358 | "server_version_tuple_short": server_version_tuple_short, | |
359 | "db_type": db_type, | |
360 | } |
0 | # | |
1 | # The MySQL database server configuration file. | |
2 | # | |
3 | [client] | |
4 | user = {user} | |
5 | port = {port} | |
6 | host = {host} | |
7 | password = {password} | |
8 | database = {db} | |
9 | default-character-set = utf8 | |
10 | ||
11 | [client_with_unix_socket] | |
12 | user = {user} | |
13 | port = {port} | |
14 | host = {host} | |
15 | password = {password} | |
16 | database = {db} | |
17 | default-character-set = utf8 |
0 | # | |
1 | # The MySQL database server configuration file. | |
2 | # | |
3 | [client] | |
4 | user = {user} | |
5 | port = {port} | |
6 | host = {host} | |
7 | password = {password} | |
8 | database = {db} | |
9 | socket = /var/run/mysqld/mysqld.sock | |
10 | default-character-set = utf8 | |
11 | ||
12 | [client_with_unix_socket] | |
13 | user = {user} | |
14 | password = {password} | |
15 | database = {db} | |
16 | socket = /var/run/mysqld/mysqld.sock | |
17 | default-character-set = utf8 |
0 | # | |
1 | # The MySQL database server configuration file. | |
2 | # | |
3 | [client] | |
4 | user = {user} | |
5 | socket = {unix_socket} | |
6 | password = {password} | |
7 | database = {db} | |
8 | default-character-set = utf8 | |
9 | ||
10 | [client_with_unix_socket] | |
11 | user = {user} | |
12 | socket = {unix_socket} | |
13 | password = {password} | |
14 | database = {db} | |
15 | default-character-set = utf8 |
0 | import asyncio | |
0 | import pytest | |
1 | from sqlalchemy import bindparam | |
2 | from sqlalchemy import MetaData, Table, Column, Integer, String | |
3 | ||
1 | 4 | from aiomysql import sa |
2 | from sqlalchemy import bindparam | |
3 | 5 | |
4 | import os | |
5 | import unittest | |
6 | ||
7 | from sqlalchemy import MetaData, Table, Column, Integer, String | |
8 | 6 | |
9 | 7 | meta = MetaData() |
10 | 8 | tbl = Table('sa_tbl_cache_test', meta, |
13 | 11 | Column('val', String(255))) |
14 | 12 | |
15 | 13 | |
16 | class TestCompiledCache(unittest.TestCase): | |
17 | def setUp(self): | |
18 | self.loop = asyncio.new_event_loop() | |
19 | asyncio.set_event_loop(None) | |
20 | self.host = os.environ.get('MYSQL_HOST', 'localhost') | |
21 | self.port = int(os.environ.get('MYSQL_PORT', 3306)) | |
22 | self.user = os.environ.get('MYSQL_USER', 'root') | |
23 | self.db = os.environ.get('MYSQL_DB', 'test_pymysql') | |
24 | self.password = os.environ.get('MYSQL_PASSWORD', '') | |
25 | self.engine = self.loop.run_until_complete(self.make_engine()) | |
26 | self.loop.run_until_complete(self.start()) | |
14 | @pytest.fixture() | |
15 | def make_engine(mysql_params, connection): | |
16 | async def _make_engine(**kwargs): | |
17 | if "unix_socket" in mysql_params: | |
18 | conn_args = {"unix_socket": mysql_params["unix_socket"]} | |
19 | else: | |
20 | conn_args = { | |
21 | "host": mysql_params['host'], | |
22 | "port": mysql_params['port'], | |
23 | } | |
27 | 24 | |
28 | def tearDown(self): | |
29 | self.engine.terminate() | |
30 | self.loop.run_until_complete(self.engine.wait_closed()) | |
31 | self.loop.close() | |
25 | return (await sa.create_engine(db=mysql_params['db'], | |
26 | user=mysql_params['user'], | |
27 | password=mysql_params['password'], | |
28 | minsize=10, | |
29 | **conn_args, | |
30 | **kwargs)) | |
32 | 31 | |
33 | async def make_engine(self, **kwargs): | |
34 | return (await sa.create_engine(db=self.db, | |
35 | user=self.user, | |
36 | password=self.password, | |
37 | host=self.host, | |
38 | port=self.port, | |
39 | loop=self.loop, | |
40 | minsize=10, | |
41 | **kwargs)) | |
32 | return _make_engine | |
42 | 33 | |
43 | async def start(self): | |
44 | async with self.engine.acquire() as conn: | |
45 | tx = await conn.begin() | |
46 | await conn.execute("DROP TABLE IF EXISTS " | |
47 | "sa_tbl_cache_test") | |
48 | await conn.execute("CREATE TABLE sa_tbl_cache_test" | |
49 | "(id serial, val varchar(255))") | |
50 | await conn.execute(tbl.insert().values(val='some_val_1')) | |
51 | await conn.execute(tbl.insert().values(val='some_val_2')) | |
52 | await conn.execute(tbl.insert().values(val='some_val_3')) | |
53 | await tx.commit() | |
54 | 34 | |
55 | def test_cache(self): | |
56 | async def go(): | |
57 | cache = dict() | |
58 | engine = await self.make_engine(compiled_cache=cache) | |
59 | async with engine.acquire() as conn: | |
60 | # check select with params not added to cache | |
61 | q = tbl.select().where(tbl.c.val == 'some_val_1') | |
62 | cursor = await conn.execute(q) | |
63 | row = await cursor.fetchone() | |
64 | self.assertEqual('some_val_1', row.val) | |
65 | self.assertEqual(0, len(cache)) | |
35 | async def start(engine): | |
36 | async with engine.acquire() as conn: | |
37 | tx = await conn.begin() | |
38 | await conn.execute("DROP TABLE IF EXISTS " | |
39 | "sa_tbl_cache_test") | |
40 | await conn.execute("CREATE TABLE sa_tbl_cache_test" | |
41 | "(id serial, val varchar(255))") | |
42 | await conn.execute(tbl.insert().values(val='some_val_1')) | |
43 | await conn.execute(tbl.insert().values(val='some_val_2')) | |
44 | await conn.execute(tbl.insert().values(val='some_val_3')) | |
45 | await tx.commit() | |
66 | 46 | |
67 | # check select with bound params added to cache | |
68 | select_by_val = tbl.select().where( | |
69 | tbl.c.val == bindparam('value') | |
70 | ) | |
71 | cursor = await conn.execute( | |
72 | select_by_val, {'value': 'some_val_3'} | |
73 | ) | |
74 | row = await cursor.fetchone() | |
75 | self.assertEqual('some_val_3', row.val) | |
76 | self.assertEqual(1, len(cache)) | |
77 | 47 | |
78 | cursor = await conn.execute( | |
79 | select_by_val, value='some_val_2' | |
80 | ) | |
81 | row = await cursor.fetchone() | |
82 | self.assertEqual('some_val_2', row.val) | |
83 | self.assertEqual(1, len(cache)) | |
48 | @pytest.mark.run_loop | |
49 | async def test_dialect(make_engine): | |
50 | cache = dict() | |
51 | engine = await make_engine(compiled_cache=cache) | |
52 | await start(engine) | |
84 | 53 | |
85 | select_all = tbl.select() | |
86 | cursor = await conn.execute(select_all) | |
87 | rows = await cursor.fetchall() | |
88 | self.assertEqual(3, len(rows)) | |
89 | self.assertEqual(2, len(cache)) | |
54 | async with engine.acquire() as conn: | |
55 | # check select with params not added to cache | |
56 | q = tbl.select().where(tbl.c.val == 'some_val_1') | |
57 | cursor = await conn.execute(q) | |
58 | row = await cursor.fetchone() | |
59 | assert 'some_val_1' == row.val | |
60 | assert 0 == len(cache) | |
90 | 61 | |
91 | # check insert with bound params not added to cache | |
92 | await conn.execute(tbl.insert().values(val='some_val_4')) | |
93 | self.assertEqual(2, len(cache)) | |
62 | # check select with bound params added to cache | |
63 | select_by_val = tbl.select().where( | |
64 | tbl.c.val == bindparam('value') | |
65 | ) | |
66 | cursor = await conn.execute( | |
67 | select_by_val, {'value': 'some_val_3'} | |
68 | ) | |
69 | row = await cursor.fetchone() | |
70 | assert 'some_val_3' == row.val | |
71 | assert 1 == len(cache) | |
94 | 72 | |
95 | # check insert with bound params added to cache | |
96 | q = tbl.insert().values(val=bindparam('value')) | |
97 | await conn.execute(q, value='some_val_5') | |
98 | self.assertEqual(3, len(cache)) | |
73 | cursor = await conn.execute( | |
74 | select_by_val, value='some_val_2' | |
75 | ) | |
76 | row = await cursor.fetchone() | |
77 | assert 'some_val_2' == row.val | |
78 | assert 1 == len(cache) | |
99 | 79 | |
100 | await conn.execute(q, value='some_val_6') | |
101 | self.assertEqual(3, len(cache)) | |
80 | select_all = tbl.select() | |
81 | cursor = await conn.execute(select_all) | |
82 | rows = await cursor.fetchall() | |
83 | assert 3 == len(rows) | |
84 | assert 2 == len(cache) | |
102 | 85 | |
103 | await conn.execute(q, {'value': 'some_val_7'}) | |
104 | self.assertEqual(3, len(cache)) | |
86 | # check insert with bound params not added to cache | |
87 | await conn.execute(tbl.insert().values(val='some_val_4')) | |
88 | assert 2 == len(cache) | |
105 | 89 | |
106 | cursor = await conn.execute(select_all) | |
107 | rows = await cursor.fetchall() | |
108 | self.assertEqual(7, len(rows)) | |
109 | self.assertEqual(3, len(cache)) | |
90 | # check insert with bound params added to cache | |
91 | q = tbl.insert().values(val=bindparam('value')) | |
92 | await conn.execute(q, value='some_val_5') | |
93 | assert 3 == len(cache) | |
110 | 94 | |
111 | # check update with params not added to cache | |
112 | q = tbl.update().where( | |
113 | tbl.c.val == 'some_val_1' | |
114 | ).values(val='updated_val_1') | |
115 | await conn.execute(q) | |
116 | self.assertEqual(3, len(cache)) | |
117 | cursor = await conn.execute( | |
118 | select_by_val, value='updated_val_1' | |
119 | ) | |
120 | row = await cursor.fetchone() | |
121 | self.assertEqual('updated_val_1', row.val) | |
95 | await conn.execute(q, value='some_val_6') | |
96 | assert 3 == len(cache) | |
122 | 97 | |
123 | # check update with bound params added to cache | |
124 | q = tbl.update().where( | |
125 | tbl.c.val == bindparam('value') | |
126 | ).values(val=bindparam('update')) | |
127 | await conn.execute( | |
128 | q, value='some_val_2', update='updated_val_2' | |
129 | ) | |
130 | self.assertEqual(4, len(cache)) | |
131 | cursor = await conn.execute( | |
132 | select_by_val, value='updated_val_2' | |
133 | ) | |
134 | row = await cursor.fetchone() | |
135 | self.assertEqual('updated_val_2', row.val) | |
98 | await conn.execute(q, {'value': 'some_val_7'}) | |
99 | assert 3 == len(cache) | |
136 | 100 | |
137 | self.loop.run_until_complete(go()) | |
101 | cursor = await conn.execute(select_all) | |
102 | rows = await cursor.fetchall() | |
103 | assert 7 == len(rows) | |
104 | assert 3 == len(cache) | |
105 | ||
106 | # check update with params not added to cache | |
107 | q = tbl.update().where( | |
108 | tbl.c.val == 'some_val_1' | |
109 | ).values(val='updated_val_1') | |
110 | await conn.execute(q) | |
111 | assert 3 == len(cache) | |
112 | cursor = await conn.execute( | |
113 | select_by_val, value='updated_val_1' | |
114 | ) | |
115 | row = await cursor.fetchone() | |
116 | assert 'updated_val_1' == row.val | |
117 | ||
118 | # check update with bound params added to cache | |
119 | q = tbl.update().where( | |
120 | tbl.c.val == bindparam('value') | |
121 | ).values(val=bindparam('update')) | |
122 | await conn.execute( | |
123 | q, value='some_val_2', update='updated_val_2' | |
124 | ) | |
125 | assert 4 == len(cache) | |
126 | cursor = await conn.execute( | |
127 | select_by_val, value='updated_val_2' | |
128 | ) | |
129 | row = await cursor.fetchone() | |
130 | assert 'updated_val_2' == row.val |
0 | import asyncio | |
1 | import aiomysql | |
2 | from aiomysql import connect, sa, Cursor | |
3 | ||
4 | import os | |
5 | import unittest | |
6 | 0 | from unittest import mock |
7 | 1 | |
2 | import pytest | |
8 | 3 | from sqlalchemy import MetaData, Table, Column, Integer, String |
9 | 4 | from sqlalchemy.schema import DropTable, CreateTable |
10 | 5 | from sqlalchemy.sql.expression import bindparam |
11 | 6 | |
7 | import aiomysql | |
8 | from aiomysql import sa, Cursor | |
12 | 9 | |
13 | 10 | meta = MetaData() |
14 | 11 | tbl = Table('sa_tbl', meta, |
17 | 14 | Column('name', String(255))) |
18 | 15 | |
19 | 16 | |
20 | class TestSAConnection(unittest.TestCase): | |
21 | def setUp(self): | |
22 | self.loop = asyncio.new_event_loop() | |
23 | asyncio.set_event_loop(None) | |
24 | self.host = os.environ.get('MYSQL_HOST', 'localhost') | |
25 | self.port = int(os.environ.get('MYSQL_PORT', 3306)) | |
26 | self.user = os.environ.get('MYSQL_USER', 'root') | |
27 | self.db = os.environ.get('MYSQL_DB', 'test_pymysql') | |
28 | self.password = os.environ.get('MYSQL_PASSWORD', '') | |
29 | ||
30 | def tearDown(self): | |
31 | self.loop.close() | |
32 | ||
33 | async def connect(self, **kwargs): | |
34 | conn = await connect(db=self.db, | |
35 | user=self.user, | |
36 | password=self.password, | |
37 | host=self.host, | |
38 | loop=self.loop, | |
39 | port=self.port, | |
40 | **kwargs) | |
17 | @pytest.fixture() | |
18 | def sa_connect(connection_creator): | |
19 | async def connect(**kwargs): | |
20 | conn = await connection_creator(**kwargs) | |
41 | 21 | await conn.autocommit(True) |
42 | 22 | cur = await conn.cursor() |
43 | 23 | await cur.execute("DROP TABLE IF EXISTS sa_tbl") |
51 | 31 | engine = mock.Mock() |
52 | 32 | engine.dialect = sa.engine._dialect |
53 | 33 | return sa.SAConnection(conn, engine) |
54 | ||
55 | def test_execute_text_select(self): | |
56 | async def go(): | |
57 | conn = await self.connect() | |
58 | res = await conn.execute("SELECT * FROM sa_tbl;") | |
59 | self.assertIsInstance(res.cursor, Cursor) | |
60 | self.assertEqual(('id', 'name'), res.keys()) | |
61 | rows = await res.fetchall() | |
62 | self.assertTrue(res.closed) | |
63 | self.assertIsNone(res.cursor) | |
64 | self.assertEqual(1, len(rows)) | |
65 | row = rows[0] | |
66 | self.assertEqual(1, row[0]) | |
67 | self.assertEqual(1, row['id']) | |
68 | self.assertEqual(1, row.id) | |
69 | self.assertEqual('first', row[1]) | |
70 | self.assertEqual('first', row['name']) | |
71 | self.assertEqual('first', row.name) | |
72 | # TODO: fix this | |
73 | await conn._connection.commit() | |
74 | self.loop.run_until_complete(go()) | |
75 | ||
76 | def test_execute_sa_select(self): | |
77 | async def go(): | |
78 | conn = await self.connect() | |
79 | res = await conn.execute(tbl.select()) | |
80 | self.assertIsInstance(res.cursor, Cursor) | |
81 | self.assertEqual(('id', 'name'), res.keys()) | |
82 | rows = await res.fetchall() | |
83 | self.assertTrue(res.closed) | |
84 | self.assertIsNone(res.cursor) | |
85 | self.assertTrue(res.returns_rows) | |
86 | ||
87 | self.assertEqual(1, len(rows)) | |
88 | row = rows[0] | |
89 | self.assertEqual(1, row[0]) | |
90 | self.assertEqual(1, row['id']) | |
91 | self.assertEqual(1, row.id) | |
92 | self.assertEqual('first', row[1]) | |
93 | self.assertEqual('first', row['name']) | |
94 | self.assertEqual('first', row.name) | |
95 | # TODO: fix this | |
96 | await conn._connection.commit() | |
97 | ||
98 | self.loop.run_until_complete(go()) | |
99 | ||
100 | def test_execute_sa_insert_with_dict(self): | |
101 | async def go(): | |
102 | conn = await self.connect() | |
103 | await conn.execute(tbl.insert(), {"id": 2, "name": "second"}) | |
104 | ||
105 | res = await conn.execute(tbl.select()) | |
106 | rows = await res.fetchall() | |
107 | self.assertEqual(2, len(rows)) | |
108 | self.assertEqual((1, 'first'), rows[0]) | |
109 | self.assertEqual((2, 'second'), rows[1]) | |
110 | ||
111 | self.loop.run_until_complete(go()) | |
112 | ||
113 | def test_execute_sa_insert_with_tuple(self): | |
114 | async def go(): | |
115 | conn = await self.connect() | |
116 | await conn.execute(tbl.insert(), (2, "second")) | |
117 | ||
118 | res = await conn.execute(tbl.select()) | |
119 | rows = await res.fetchall() | |
120 | self.assertEqual(2, len(rows)) | |
121 | self.assertEqual((1, 'first'), rows[0]) | |
122 | self.assertEqual((2, 'second'), rows[1]) | |
123 | ||
124 | self.loop.run_until_complete(go()) | |
125 | ||
126 | def test_execute_sa_insert_named_params(self): | |
127 | async def go(): | |
128 | conn = await self.connect() | |
129 | await conn.execute(tbl.insert(), id=2, name="second") | |
130 | ||
131 | res = await conn.execute(tbl.select()) | |
132 | rows = await res.fetchall() | |
133 | self.assertEqual(2, len(rows)) | |
134 | self.assertEqual((1, 'first'), rows[0]) | |
135 | self.assertEqual((2, 'second'), rows[1]) | |
136 | ||
137 | self.loop.run_until_complete(go()) | |
138 | ||
139 | def test_execute_sa_insert_positional_params(self): | |
140 | async def go(): | |
141 | conn = await self.connect() | |
142 | await conn.execute(tbl.insert(), 2, "second") | |
143 | ||
144 | res = await conn.execute(tbl.select()) | |
145 | rows = await res.fetchall() | |
146 | self.assertEqual(2, len(rows)) | |
147 | self.assertEqual((1, 'first'), rows[0]) | |
148 | self.assertEqual((2, 'second'), rows[1]) | |
149 | ||
150 | self.loop.run_until_complete(go()) | |
151 | ||
152 | def test_scalar(self): | |
153 | async def go(): | |
154 | conn = await self.connect() | |
155 | res = await conn.scalar(tbl.count()) | |
156 | self.assertEqual(1, res) | |
157 | ||
158 | self.loop.run_until_complete(go()) | |
159 | ||
160 | def test_scalar_None(self): | |
161 | async def go(): | |
162 | conn = await self.connect() | |
163 | await conn.execute(tbl.delete()) | |
164 | res = await conn.scalar(tbl.select()) | |
165 | self.assertIsNone(res) | |
166 | # TODO: fix this | |
167 | await conn._connection.commit() | |
168 | ||
169 | self.loop.run_until_complete(go()) | |
170 | ||
171 | def test_row_proxy(self): | |
172 | async def go(): | |
173 | conn = await self.connect() | |
174 | res = await conn.execute(tbl.select()) | |
175 | rows = await res.fetchall() | |
176 | row = rows[0] | |
177 | row2 = await (await conn.execute(tbl.select())).first() | |
178 | self.assertEqual(2, len(row)) | |
179 | self.assertEqual(['id', 'name'], list(row)) | |
180 | self.assertIn('id', row) | |
181 | self.assertNotIn('unknown', row) | |
182 | self.assertEqual('first', row.name) | |
183 | self.assertEqual('first', row[tbl.c.name]) | |
184 | with self.assertRaises(AttributeError): | |
185 | row.unknown | |
186 | self.assertEqual("(1, 'first')", repr(row)) | |
187 | self.assertEqual((1, 'first'), row.as_tuple()) | |
188 | self.assertNotEqual((555, 'other'), row.as_tuple()) | |
189 | self.assertEqual(row2, row) | |
190 | self.assertFalse(row2 != row) | |
191 | self.assertNotEqual(5, row) | |
192 | # TODO: fix this | |
193 | await conn._connection.commit() | |
194 | ||
195 | self.loop.run_until_complete(go()) | |
196 | ||
197 | def test_insert(self): | |
198 | async def go(): | |
199 | conn = await self.connect() | |
200 | res = await conn.execute(tbl.insert().values(name='second')) | |
201 | self.assertEqual(1, res.rowcount) | |
202 | self.assertEqual(2, res.lastrowid) | |
203 | ||
204 | self.loop.run_until_complete(go()) | |
205 | ||
206 | def test_raw_insert(self): | |
207 | async def go(): | |
208 | conn = await self.connect() | |
209 | await conn.execute( | |
210 | "INSERT INTO sa_tbl (name) VALUES ('third')") | |
211 | res = await conn.execute(tbl.select()) | |
212 | self.assertEqual(2, res.rowcount) | |
213 | self.assertEqual(('id', 'name'), res.keys()) | |
214 | self.assertTrue(res.returns_rows) | |
215 | ||
216 | rows = await res.fetchall() | |
217 | self.assertEqual(2, len(rows)) | |
218 | self.assertEqual(2, rows[1].id) | |
219 | self.loop.run_until_complete(go()) | |
220 | ||
221 | def test_raw_insert_with_params(self): | |
222 | async def go(): | |
223 | conn = await self.connect() | |
224 | res = await conn.execute( | |
225 | "INSERT INTO sa_tbl (id, name) VALUES (%s, %s)", | |
226 | 2, 'third') | |
227 | res = await conn.execute(tbl.select()) | |
228 | self.assertEqual(2, res.rowcount) | |
229 | self.assertEqual(('id', 'name'), res.keys()) | |
230 | self.assertTrue(res.returns_rows) | |
231 | ||
232 | rows = await res.fetchall() | |
233 | self.assertEqual(2, len(rows)) | |
234 | self.assertEqual(2, rows[1].id) | |
235 | self.loop.run_until_complete(go()) | |
236 | ||
237 | def test_raw_insert_with_params_dict(self): | |
238 | async def go(): | |
239 | conn = await self.connect() | |
240 | res = await conn.execute( | |
241 | "INSERT INTO sa_tbl (id, name) VALUES (%(id)s, %(name)s)", | |
242 | {'id': 2, 'name': 'third'}) | |
243 | res = await conn.execute(tbl.select()) | |
244 | self.assertEqual(2, res.rowcount) | |
245 | self.assertEqual(('id', 'name'), res.keys()) | |
246 | self.assertTrue(res.returns_rows) | |
247 | ||
248 | rows = await res.fetchall() | |
249 | self.assertEqual(2, len(rows)) | |
250 | self.assertEqual(2, rows[1].id) | |
251 | self.loop.run_until_complete(go()) | |
252 | ||
253 | def test_raw_insert_with_named_params(self): | |
254 | async def go(): | |
255 | conn = await self.connect() | |
256 | res = await conn.execute( | |
257 | "INSERT INTO sa_tbl (id, name) VALUES (%(id)s, %(name)s)", | |
258 | id=2, name='third') | |
259 | res = await conn.execute(tbl.select()) | |
260 | self.assertEqual(2, res.rowcount) | |
261 | self.assertEqual(('id', 'name'), res.keys()) | |
262 | self.assertTrue(res.returns_rows) | |
263 | ||
264 | rows = await res.fetchall() | |
265 | self.assertEqual(2, len(rows)) | |
266 | self.assertEqual(2, rows[1].id) | |
267 | self.loop.run_until_complete(go()) | |
268 | ||
269 | def test_raw_insert_with_executemany(self): | |
270 | async def go(): | |
271 | conn = await self.connect() | |
272 | # with self.assertRaises(sa.ArgumentError): | |
273 | await conn.execute( | |
274 | "INSERT INTO sa_tbl (id, name) VALUES (%(id)s, %(name)s)", | |
275 | [{"id": 2, "name": 'third'}, {"id": 3, "name": 'forth'}]) | |
276 | await conn.execute( | |
277 | tbl.update().where( | |
278 | tbl.c.id == bindparam("id") | |
279 | ).values( | |
280 | {"name": bindparam("name")} | |
281 | ), | |
282 | [ | |
283 | {"id": 2, "name": "t2"}, | |
284 | {"id": 3, "name": "t3"} | |
285 | ] | |
286 | ) | |
287 | with self.assertRaises(sa.ArgumentError): | |
288 | await conn.execute( | |
289 | DropTable(tbl), | |
290 | [{}, {}] | |
291 | ) | |
292 | with self.assertRaises(sa.ArgumentError): | |
293 | await conn.execute( | |
294 | {}, | |
295 | [{}, {}] | |
296 | ) | |
297 | self.loop.run_until_complete(go()) | |
298 | ||
299 | def test_raw_select_with_wildcard(self): | |
300 | async def go(): | |
301 | conn = await self.connect() | |
302 | await conn.execute( | |
303 | 'SELECT * FROM sa_tbl WHERE name LIKE "%test%"') | |
304 | self.loop.run_until_complete(go()) | |
305 | ||
306 | def test_delete(self): | |
307 | async def go(): | |
308 | conn = await self.connect() | |
309 | ||
310 | res = await conn.execute(tbl.delete().where(tbl.c.id == 1)) | |
311 | ||
312 | self.assertEqual((), res.keys()) | |
313 | self.assertEqual(1, res.rowcount) | |
314 | self.assertFalse(res.returns_rows) | |
315 | self.assertTrue(res.closed) | |
316 | self.assertIsNone(res.cursor) | |
317 | ||
318 | self.loop.run_until_complete(go()) | |
319 | ||
320 | def test_double_close(self): | |
321 | async def go(): | |
322 | conn = await self.connect() | |
323 | res = await conn.execute("SELECT 1") | |
324 | await res.close() | |
325 | self.assertTrue(res.closed) | |
326 | self.assertIsNone(res.cursor) | |
327 | await res.close() | |
328 | self.assertTrue(res.closed) | |
329 | self.assertIsNone(res.cursor) | |
330 | ||
331 | self.loop.run_until_complete(go()) | |
332 | ||
333 | @unittest.skip("Find out how to close cursor on __del__ method") | |
334 | def test_weakrefs(self): | |
335 | async def go(): | |
336 | conn = await self.connect() | |
337 | self.assertEqual(0, len(conn._weak_results)) | |
338 | res = await conn.execute("SELECT 1") | |
339 | self.assertEqual(1, len(conn._weak_results)) | |
340 | cur = res.cursor | |
341 | self.assertFalse(cur.closed) | |
342 | # TODO: fix this, how close cursor if result was deleted | |
343 | # yield from cur.close() | |
344 | del res | |
345 | self.assertTrue(cur.closed) | |
346 | self.assertEqual(0, len(conn._weak_results)) | |
347 | ||
348 | self.loop.run_until_complete(go()) | |
349 | ||
350 | def test_fetchall(self): | |
351 | async def go(): | |
352 | conn = await self.connect() | |
353 | await conn.execute(tbl.insert().values(name='second')) | |
354 | ||
355 | res = await conn.execute(tbl.select()) | |
356 | rows = await res.fetchall() | |
357 | self.assertEqual(2, len(rows)) | |
358 | self.assertTrue(res.closed) | |
359 | self.assertTrue(res.returns_rows) | |
360 | self.assertEqual([(1, 'first'), (2, 'second')], rows) | |
361 | ||
362 | self.loop.run_until_complete(go()) | |
363 | ||
364 | def test_fetchall_closed(self): | |
365 | async def go(): | |
366 | conn = await self.connect() | |
367 | await conn.execute(tbl.insert().values(name='second')) | |
368 | ||
369 | res = await conn.execute(tbl.select()) | |
370 | await res.close() | |
371 | with self.assertRaises(sa.ResourceClosedError): | |
372 | await res.fetchall() | |
373 | ||
374 | self.loop.run_until_complete(go()) | |
375 | ||
376 | def test_fetchall_not_returns_rows(self): | |
377 | async def go(): | |
378 | conn = await self.connect() | |
379 | res = await conn.execute(tbl.delete()) | |
380 | with self.assertRaises(sa.ResourceClosedError): | |
381 | await res.fetchall() | |
382 | ||
383 | self.loop.run_until_complete(go()) | |
384 | ||
385 | def test_fetchone_closed(self): | |
386 | async def go(): | |
387 | conn = await self.connect() | |
388 | await conn.execute(tbl.insert().values(name='second')) | |
389 | ||
390 | res = await conn.execute(tbl.select()) | |
391 | await res.close() | |
392 | with self.assertRaises(sa.ResourceClosedError): | |
393 | await res.fetchone() | |
394 | ||
395 | self.loop.run_until_complete(go()) | |
396 | ||
397 | def test_first_not_returns_rows(self): | |
398 | async def go(): | |
399 | conn = await self.connect() | |
400 | res = await conn.execute(tbl.delete()) | |
401 | with self.assertRaises(sa.ResourceClosedError): | |
402 | await res.first() | |
403 | ||
404 | self.loop.run_until_complete(go()) | |
405 | ||
406 | def test_fetchmany(self): | |
407 | async def go(): | |
408 | conn = await self.connect() | |
409 | await conn.execute(tbl.insert().values(name='second')) | |
410 | ||
411 | res = await conn.execute(tbl.select()) | |
412 | rows = await res.fetchmany() | |
413 | self.assertEqual(1, len(rows)) | |
414 | self.assertFalse(res.closed) | |
415 | self.assertTrue(res.returns_rows) | |
416 | self.assertEqual([(1, 'first')], rows) | |
417 | ||
418 | self.loop.run_until_complete(go()) | |
419 | ||
420 | def test_fetchmany_with_size(self): | |
421 | async def go(): | |
422 | conn = await self.connect() | |
423 | await conn.execute(tbl.insert().values(name='second')) | |
424 | ||
425 | res = await conn.execute(tbl.select()) | |
426 | rows = await res.fetchmany(100) | |
427 | self.assertEqual(2, len(rows)) | |
428 | self.assertFalse(res.closed) | |
429 | self.assertTrue(res.returns_rows) | |
430 | self.assertEqual([(1, 'first'), (2, 'second')], rows) | |
431 | ||
432 | self.loop.run_until_complete(go()) | |
433 | ||
434 | def test_fetchmany_closed(self): | |
435 | async def go(): | |
436 | conn = await self.connect() | |
437 | await conn.execute(tbl.insert().values(name='second')) | |
438 | ||
439 | res = await conn.execute(tbl.select()) | |
440 | await res.close() | |
441 | with self.assertRaises(sa.ResourceClosedError): | |
442 | await res.fetchmany() | |
443 | ||
444 | self.loop.run_until_complete(go()) | |
445 | ||
446 | def test_fetchmany_with_size_closed(self): | |
447 | async def go(): | |
448 | conn = await self.connect() | |
449 | await conn.execute(tbl.insert().values(name='second')) | |
450 | ||
451 | res = await conn.execute(tbl.select()) | |
452 | await res.close() | |
453 | with self.assertRaises(sa.ResourceClosedError): | |
454 | await res.fetchmany(5555) | |
455 | ||
456 | self.loop.run_until_complete(go()) | |
457 | ||
458 | def test_fetchmany_not_returns_rows(self): | |
459 | async def go(): | |
460 | conn = await self.connect() | |
461 | res = await conn.execute(tbl.delete()) | |
462 | with self.assertRaises(sa.ResourceClosedError): | |
463 | await res.fetchmany() | |
464 | ||
465 | self.loop.run_until_complete(go()) | |
466 | ||
467 | def test_fetchmany_close_after_last_read(self): | |
468 | async def go(): | |
469 | conn = await self.connect() | |
470 | ||
471 | res = await conn.execute(tbl.select()) | |
472 | rows = await res.fetchmany() | |
473 | self.assertEqual(1, len(rows)) | |
474 | self.assertFalse(res.closed) | |
475 | self.assertTrue(res.returns_rows) | |
476 | self.assertEqual([(1, 'first')], rows) | |
477 | rows2 = await res.fetchmany() | |
478 | self.assertEqual(0, len(rows2)) | |
479 | self.assertTrue(res.closed) | |
480 | ||
481 | self.loop.run_until_complete(go()) | |
482 | ||
483 | def test_create_table(self, **kwargs): | |
484 | async def go(): | |
485 | conn = await self.connect() | |
486 | res = await conn.execute(DropTable(tbl)) | |
487 | with self.assertRaises(sa.ResourceClosedError): | |
488 | await res.fetchmany() | |
489 | ||
490 | with self.assertRaises(aiomysql.ProgrammingError): | |
491 | await conn.execute("SELECT * FROM sa_tbl") | |
492 | ||
493 | res = await conn.execute(CreateTable(tbl)) | |
494 | with self.assertRaises(sa.ResourceClosedError): | |
495 | await res.fetchmany() | |
496 | ||
497 | res = await conn.execute("SELECT * FROM sa_tbl") | |
498 | self.assertEqual(0, len(await res.fetchall())) | |
499 | ||
500 | self.loop.run_until_complete(go()) | |
34 | return connect | |
35 | ||
36 | ||
37 | @pytest.mark.run_loop | |
38 | async def test_execute_text_select(sa_connect): | |
39 | conn = await sa_connect() | |
40 | res = await conn.execute("SELECT * FROM sa_tbl;") | |
41 | assert isinstance(res.cursor, Cursor) | |
42 | assert ('id', 'name') == res.keys() | |
43 | rows = await res.fetchall() | |
44 | assert res.closed | |
45 | assert res.cursor is None | |
46 | assert 1 == len(rows) | |
47 | row = rows[0] | |
48 | assert 1 == row[0] | |
49 | assert 1 == row['id'] | |
50 | assert 1 == row.id | |
51 | assert 'first' == row[1] | |
52 | assert 'first' == row['name'] | |
53 | assert 'first' == row.name | |
54 | # TODO: fix this | |
55 | await conn._connection.commit() | |
56 | ||
57 | ||
58 | @pytest.mark.run_loop | |
59 | async def test_execute_sa_select(sa_connect): | |
60 | conn = await sa_connect() | |
61 | res = await conn.execute(tbl.select()) | |
62 | assert isinstance(res.cursor, Cursor) | |
63 | assert ('id', 'name') == res.keys() | |
64 | rows = await res.fetchall() | |
65 | assert res.closed | |
66 | assert res.cursor is None | |
67 | assert res.returns_rows | |
68 | ||
69 | assert 1 == len(rows) | |
70 | row = rows[0] | |
71 | assert 1 == row[0] | |
72 | assert 1 == row['id'] | |
73 | assert 1 == row.id | |
74 | assert 'first' == row[1] | |
75 | assert 'first' == row['name'] | |
76 | assert 'first' == row.name | |
77 | # TODO: fix this | |
78 | await conn._connection.commit() | |
79 | ||
80 | ||
81 | @pytest.mark.run_loop | |
82 | async def test_execute_sa_insert_with_dict(sa_connect): | |
83 | conn = await sa_connect() | |
84 | await conn.execute(tbl.insert(), {"id": 2, "name": "second"}) | |
85 | ||
86 | res = await conn.execute(tbl.select()) | |
87 | rows = await res.fetchall() | |
88 | assert 2 == len(rows) | |
89 | assert (1, 'first') == rows[0] | |
90 | assert (2, 'second') == rows[1] | |
91 | ||
92 | ||
93 | @pytest.mark.run_loop | |
94 | async def test_execute_sa_insert_with_tuple(sa_connect): | |
95 | conn = await sa_connect() | |
96 | await conn.execute(tbl.insert(), (2, "second")) | |
97 | ||
98 | res = await conn.execute(tbl.select()) | |
99 | rows = await res.fetchall() | |
100 | assert 2 == len(rows) | |
101 | assert (1, 'first') == rows[0] | |
102 | assert (2, 'second') == rows[1] | |
103 | ||
104 | ||
105 | @pytest.mark.run_loop | |
106 | async def test_execute_sa_insert_named_params(sa_connect): | |
107 | conn = await sa_connect() | |
108 | await conn.execute(tbl.insert(), id=2, name="second") | |
109 | ||
110 | res = await conn.execute(tbl.select()) | |
111 | rows = await res.fetchall() | |
112 | assert 2 == len(rows) | |
113 | assert (1, 'first') == rows[0] | |
114 | assert (2, 'second') == rows[1] | |
115 | ||
116 | ||
117 | @pytest.mark.run_loop | |
118 | async def test_execute_sa_insert_positional_params(sa_connect): | |
119 | conn = await sa_connect() | |
120 | await conn.execute(tbl.insert(), 2, "second") | |
121 | ||
122 | res = await conn.execute(tbl.select()) | |
123 | rows = await res.fetchall() | |
124 | assert 2 == len(rows) | |
125 | assert (1, 'first') == rows[0] | |
126 | assert (2, 'second') == rows[1] | |
127 | ||
128 | ||
129 | @pytest.mark.run_loop | |
130 | async def test_scalar(sa_connect): | |
131 | conn = await sa_connect() | |
132 | res = await conn.scalar(tbl.count()) | |
133 | assert 1 == res | |
134 | ||
135 | ||
136 | @pytest.mark.run_loop | |
137 | async def test_scalar_None(sa_connect): | |
138 | conn = await sa_connect() | |
139 | await conn.execute(tbl.delete()) | |
140 | res = await conn.scalar(tbl.select()) | |
141 | assert res is None | |
142 | # TODO: fix this | |
143 | await conn._connection.commit() | |
144 | ||
145 | ||
146 | @pytest.mark.run_loop | |
147 | async def test_row_proxy(sa_connect): | |
148 | conn = await sa_connect() | |
149 | res = await conn.execute(tbl.select()) | |
150 | rows = await res.fetchall() | |
151 | row = rows[0] | |
152 | row2 = await (await conn.execute(tbl.select())).first() | |
153 | assert 2 == len(row) | |
154 | assert ['id', 'name'] == list(row) | |
155 | assert 'id' in row | |
156 | assert 'unknown' not in row | |
157 | assert 'first' == row.name | |
158 | assert 'first' == row[tbl.c.name] | |
159 | with pytest.raises(AttributeError): | |
160 | row.unknown | |
161 | assert "(1, 'first')" == repr(row) | |
162 | assert (1, 'first') == row.as_tuple() | |
163 | assert (555, 'other') != row.as_tuple() | |
164 | assert row2 == row | |
165 | assert 5 != row | |
166 | # TODO: fix this | |
167 | await conn._connection.commit() | |
168 | ||
169 | ||
170 | @pytest.mark.run_loop | |
171 | async def test_insert(sa_connect): | |
172 | conn = await sa_connect() | |
173 | res = await conn.execute(tbl.insert().values(name='second')) | |
174 | assert 1 == res.rowcount | |
175 | assert 2 == res.lastrowid | |
176 | ||
177 | ||
178 | @pytest.mark.run_loop | |
179 | async def test_raw_insert(sa_connect): | |
180 | conn = await sa_connect() | |
181 | await conn.execute( | |
182 | "INSERT INTO sa_tbl (name) VALUES ('third')") | |
183 | res = await conn.execute(tbl.select()) | |
184 | assert 2 == res.rowcount | |
185 | assert ('id', 'name') == res.keys() | |
186 | assert res.returns_rows | |
187 | ||
188 | rows = await res.fetchall() | |
189 | assert 2 == len(rows) | |
190 | assert 2 == rows[1].id | |
191 | ||
192 | ||
193 | @pytest.mark.run_loop | |
194 | async def test_raw_insert_with_params(sa_connect): | |
195 | conn = await sa_connect() | |
196 | res = await conn.execute( | |
197 | "INSERT INTO sa_tbl (id, name) VALUES (%s, %s)", | |
198 | 2, 'third') | |
199 | res = await conn.execute(tbl.select()) | |
200 | assert 2 == res.rowcount | |
201 | assert ('id', 'name') == res.keys() | |
202 | assert res.returns_rows | |
203 | ||
204 | rows = await res.fetchall() | |
205 | assert 2 == len(rows) | |
206 | assert 2 == rows[1].id | |
207 | ||
208 | ||
209 | @pytest.mark.run_loop | |
210 | async def test_raw_insert_with_params_dict(sa_connect): | |
211 | conn = await sa_connect() | |
212 | res = await conn.execute( | |
213 | "INSERT INTO sa_tbl (id, name) VALUES (%(id)s, %(name)s)", | |
214 | {'id': 2, 'name': 'third'}) | |
215 | res = await conn.execute(tbl.select()) | |
216 | assert 2 == res.rowcount | |
217 | assert ('id', 'name') == res.keys() | |
218 | assert res.returns_rows | |
219 | ||
220 | rows = await res.fetchall() | |
221 | assert 2 == len(rows) | |
222 | assert 2 == rows[1].id | |
223 | ||
224 | ||
225 | @pytest.mark.run_loop | |
226 | async def test_raw_insert_with_named_params(sa_connect): | |
227 | conn = await sa_connect() | |
228 | res = await conn.execute( | |
229 | "INSERT INTO sa_tbl (id, name) VALUES (%(id)s, %(name)s)", | |
230 | id=2, name='third') | |
231 | res = await conn.execute(tbl.select()) | |
232 | assert 2 == res.rowcount | |
233 | assert ('id', 'name') == res.keys() | |
234 | assert res.returns_rows | |
235 | ||
236 | rows = await res.fetchall() | |
237 | assert 2 == len(rows) | |
238 | assert 2 == rows[1].id | |
239 | ||
240 | ||
241 | @pytest.mark.run_loop | |
242 | async def test_raw_insert_with_executemany(sa_connect): | |
243 | conn = await sa_connect() | |
244 | # with pytest.raises(sa.ArgumentError): | |
245 | await conn.execute( | |
246 | "INSERT INTO sa_tbl (id, name) VALUES (%(id)s, %(name)s)", | |
247 | [{"id": 2, "name": 'third'}, {"id": 3, "name": 'forth'}]) | |
248 | await conn.execute( | |
249 | tbl.update().where( | |
250 | tbl.c.id == bindparam("id") | |
251 | ).values( | |
252 | {"name": bindparam("name")} | |
253 | ), | |
254 | [ | |
255 | {"id": 2, "name": "t2"}, | |
256 | {"id": 3, "name": "t3"} | |
257 | ] | |
258 | ) | |
259 | with pytest.raises(sa.ArgumentError): | |
260 | await conn.execute( | |
261 | DropTable(tbl), | |
262 | [{}, {}] | |
263 | ) | |
264 | with pytest.raises(sa.ArgumentError): | |
265 | await conn.execute( | |
266 | {}, | |
267 | [{}, {}] | |
268 | ) | |
269 | ||
270 | ||
271 | @pytest.mark.run_loop | |
272 | async def test_raw_select_with_wildcard(sa_connect): | |
273 | conn = await sa_connect() | |
274 | await conn.execute( | |
275 | 'SELECT * FROM sa_tbl WHERE name LIKE "%test%"') | |
276 | ||
277 | ||
278 | @pytest.mark.run_loop | |
279 | async def test_delete(sa_connect): | |
280 | conn = await sa_connect() | |
281 | ||
282 | res = await conn.execute(tbl.delete().where(tbl.c.id == 1)) | |
283 | ||
284 | assert () == res.keys() | |
285 | assert 1 == res.rowcount | |
286 | assert not res.returns_rows | |
287 | assert res.closed | |
288 | assert res.cursor is None | |
289 | ||
290 | ||
291 | @pytest.mark.run_loop | |
292 | async def test_double_close(sa_connect): | |
293 | conn = await sa_connect() | |
294 | res = await conn.execute("SELECT 1") | |
295 | await res.close() | |
296 | assert res.closed | |
297 | assert res.cursor is None | |
298 | await res.close() | |
299 | assert res.closed | |
300 | assert res.cursor is None | |
301 | ||
302 | ||
303 | @pytest.mark.run_loop | |
304 | @pytest.mark.skip("Find out how to close cursor on __del__ method") | |
305 | async def test_weakrefs(sa_connect): | |
306 | conn = await sa_connect() | |
307 | assert 0 == len(conn._weak_results) | |
308 | res = await conn.execute("SELECT 1") | |
309 | assert 1 == len(conn._weak_results) | |
310 | cur = res.cursor | |
311 | assert not cur.closed | |
312 | # TODO: fix this, how close cursor if result was deleted | |
313 | # yield from cur.close() | |
314 | del res | |
315 | assert cur.closed | |
316 | assert 0 == len(conn._weak_results) | |
317 | ||
318 | ||
319 | @pytest.mark.run_loop | |
320 | async def test_fetchall(sa_connect): | |
321 | conn = await sa_connect() | |
322 | await conn.execute(tbl.insert().values(name='second')) | |
323 | ||
324 | res = await conn.execute(tbl.select()) | |
325 | rows = await res.fetchall() | |
326 | assert 2 == len(rows) | |
327 | assert res.closed | |
328 | assert res.returns_rows | |
329 | assert [(1, 'first'), (2, 'second')] == rows | |
330 | ||
331 | ||
332 | @pytest.mark.run_loop | |
333 | async def test_fetchall_closed(sa_connect): | |
334 | conn = await sa_connect() | |
335 | await conn.execute(tbl.insert().values(name='second')) | |
336 | ||
337 | res = await conn.execute(tbl.select()) | |
338 | await res.close() | |
339 | with pytest.raises(sa.ResourceClosedError): | |
340 | await res.fetchall() | |
341 | ||
342 | ||
343 | @pytest.mark.run_loop | |
344 | async def test_fetchall_not_returns_rows(sa_connect): | |
345 | conn = await sa_connect() | |
346 | res = await conn.execute(tbl.delete()) | |
347 | with pytest.raises(sa.ResourceClosedError): | |
348 | await res.fetchall() | |
349 | ||
350 | ||
351 | @pytest.mark.run_loop | |
352 | async def test_fetchone_closed(sa_connect): | |
353 | conn = await sa_connect() | |
354 | await conn.execute(tbl.insert().values(name='second')) | |
355 | ||
356 | res = await conn.execute(tbl.select()) | |
357 | await res.close() | |
358 | with pytest.raises(sa.ResourceClosedError): | |
359 | await res.fetchone() | |
360 | ||
361 | ||
362 | @pytest.mark.run_loop | |
363 | async def test_first_not_returns_rows(sa_connect): | |
364 | conn = await sa_connect() | |
365 | res = await conn.execute(tbl.delete()) | |
366 | with pytest.raises(sa.ResourceClosedError): | |
367 | await res.first() | |
368 | ||
369 | ||
370 | @pytest.mark.run_loop | |
371 | async def test_fetchmany(sa_connect): | |
372 | conn = await sa_connect() | |
373 | await conn.execute(tbl.insert().values(name='second')) | |
374 | ||
375 | res = await conn.execute(tbl.select()) | |
376 | rows = await res.fetchmany() | |
377 | assert 1 == len(rows) | |
378 | assert not res.closed | |
379 | assert res.returns_rows | |
380 | assert [(1, 'first')] == rows | |
381 | ||
382 | ||
383 | @pytest.mark.run_loop | |
384 | async def test_fetchmany_with_size(sa_connect): | |
385 | conn = await sa_connect() | |
386 | await conn.execute(tbl.insert().values(name='second')) | |
387 | ||
388 | res = await conn.execute(tbl.select()) | |
389 | rows = await res.fetchmany(100) | |
390 | assert 2 == len(rows) | |
391 | assert not res.closed | |
392 | assert res.returns_rows | |
393 | assert [(1, 'first'), (2, 'second')] == rows | |
394 | ||
395 | ||
396 | @pytest.mark.run_loop | |
397 | async def test_fetchmany_closed(sa_connect): | |
398 | conn = await sa_connect() | |
399 | await conn.execute(tbl.insert().values(name='second')) | |
400 | ||
401 | res = await conn.execute(tbl.select()) | |
402 | await res.close() | |
403 | with pytest.raises(sa.ResourceClosedError): | |
404 | await res.fetchmany() | |
405 | ||
406 | ||
407 | @pytest.mark.run_loop | |
408 | async def test_fetchmany_with_size_closed(sa_connect): | |
409 | conn = await sa_connect() | |
410 | await conn.execute(tbl.insert().values(name='second')) | |
411 | ||
412 | res = await conn.execute(tbl.select()) | |
413 | await res.close() | |
414 | with pytest.raises(sa.ResourceClosedError): | |
415 | await res.fetchmany(5555) | |
416 | ||
417 | ||
418 | @pytest.mark.run_loop | |
419 | async def test_fetchmany_not_returns_rows(sa_connect): | |
420 | conn = await sa_connect() | |
421 | res = await conn.execute(tbl.delete()) | |
422 | with pytest.raises(sa.ResourceClosedError): | |
423 | await res.fetchmany() | |
424 | ||
425 | ||
426 | @pytest.mark.run_loop | |
427 | async def test_fetchmany_close_after_last_read(sa_connect): | |
428 | conn = await sa_connect() | |
429 | ||
430 | res = await conn.execute(tbl.select()) | |
431 | rows = await res.fetchmany() | |
432 | assert 1 == len(rows) | |
433 | assert not res.closed | |
434 | assert res.returns_rows | |
435 | assert [(1, 'first')] == rows | |
436 | rows2 = await res.fetchmany() | |
437 | assert 0 == len(rows2) | |
438 | assert res.closed | |
439 | ||
440 | ||
441 | @pytest.mark.run_loop | |
442 | async def test_create_table(sa_connect): | |
443 | conn = await sa_connect() | |
444 | res = await conn.execute(DropTable(tbl)) | |
445 | with pytest.raises(sa.ResourceClosedError): | |
446 | await res.fetchmany() | |
447 | ||
448 | with pytest.raises(aiomysql.ProgrammingError): | |
449 | await conn.execute("SELECT * FROM sa_tbl") | |
450 | ||
451 | res = await conn.execute(CreateTable(tbl)) | |
452 | with pytest.raises(sa.ResourceClosedError): | |
453 | await res.fetchmany() | |
454 | ||
455 | res = await conn.execute("SELECT * FROM sa_tbl") | |
456 | assert 0 == len(await res.fetchall()) | |
457 | ||
458 | ||
459 | @pytest.mark.run_loop | |
460 | async def test_async_iter(sa_connect): | |
461 | conn = await sa_connect() | |
462 | await conn.execute(tbl.insert().values(name="second")) | |
463 | ||
464 | ret = [] | |
465 | async for row in conn.execute(tbl.select()): | |
466 | ret.append(row) | |
467 | assert [(1, "first"), (2, "second")] == ret |
0 | import datetime | |
1 | ||
2 | import pytest | |
3 | from sqlalchemy import MetaData, Table, Column, Integer, String | |
4 | from sqlalchemy import func, DateTime, Boolean | |
5 | ||
6 | from aiomysql import sa | |
7 | ||
8 | meta = MetaData() | |
9 | table = Table('sa_tbl_default_test', meta, | |
10 | Column('id', Integer, nullable=False, primary_key=True), | |
11 | Column('string_length', Integer, | |
12 | default=func.length('qwerty')), | |
13 | Column('number', Integer, default=100, nullable=False), | |
14 | Column('description', String(255), nullable=False, | |
15 | default='default test'), | |
16 | Column('created_at', DateTime, | |
17 | default=datetime.datetime.now), | |
18 | Column('enabled', Boolean, default=True)) | |
19 | ||
20 | ||
21 | @pytest.fixture() | |
22 | def make_engine(mysql_params, connection): | |
23 | async def _make_engine(**kwargs): | |
24 | if "unix_socket" in mysql_params: | |
25 | conn_args = {"unix_socket": mysql_params["unix_socket"]} | |
26 | else: | |
27 | conn_args = { | |
28 | "host": mysql_params['host'], | |
29 | "port": mysql_params['port'], | |
30 | } | |
31 | ||
32 | return (await sa.create_engine(db=mysql_params['db'], | |
33 | user=mysql_params['user'], | |
34 | password=mysql_params['password'], | |
35 | minsize=10, | |
36 | **conn_args, | |
37 | **kwargs)) | |
38 | ||
39 | return _make_engine | |
40 | ||
41 | ||
42 | async def start(engine): | |
43 | async with engine.acquire() as conn: | |
44 | await conn.execute("DROP TABLE IF EXISTS sa_tbl_default_test") | |
45 | await conn.execute("CREATE TABLE sa_tbl_default_test " | |
46 | "(id integer," | |
47 | " string_length integer, " | |
48 | "number integer," | |
49 | " description VARCHAR(255), " | |
50 | "created_at DATETIME(6), " | |
51 | "enabled TINYINT)") | |
52 | ||
53 | ||
54 | @pytest.mark.run_loop | |
55 | async def test_default_fields(make_engine): | |
56 | engine = await make_engine() | |
57 | await start(engine) | |
58 | async with engine.acquire() as conn: | |
59 | await conn.execute(table.insert().values()) | |
60 | res = await conn.execute(table.select()) | |
61 | row = await res.fetchone() | |
62 | assert row.string_length == 6 | |
63 | assert row.number == 100 | |
64 | assert row.description == 'default test' | |
65 | assert row.enabled is True | |
66 | assert type(row.created_at) == datetime.datetime | |
67 | ||
68 | ||
69 | @pytest.mark.run_loop | |
70 | async def test_default_fields_isnull(make_engine): | |
71 | engine = await make_engine() | |
72 | await start(engine) | |
73 | async with engine.acquire() as conn: | |
74 | created_at = None | |
75 | enabled = False | |
76 | await conn.execute(table.insert().values( | |
77 | enabled=enabled, | |
78 | created_at=created_at, | |
79 | )) | |
80 | ||
81 | res = await conn.execute(table.select()) | |
82 | row = await res.fetchone() | |
83 | assert row.number == 100 | |
84 | assert row.string_length == 6 | |
85 | assert row.description == 'default test' | |
86 | assert row.enabled == enabled | |
87 | assert row.created_at == created_at | |
88 | ||
89 | ||
90 | @pytest.mark.run_loop | |
91 | async def test_default_fields_edit(make_engine): | |
92 | engine = await make_engine() | |
93 | await start(engine) | |
94 | async with engine.acquire() as conn: | |
95 | created_at = datetime.datetime.now() | |
96 | description = 'new descr' | |
97 | enabled = False | |
98 | number = 111 | |
99 | await conn.execute(table.insert().values( | |
100 | description=description, | |
101 | enabled=enabled, | |
102 | created_at=created_at, | |
103 | number=number, | |
104 | )) | |
105 | ||
106 | res = await conn.execute(table.select()) | |
107 | row = await res.fetchone() | |
108 | assert row.number == number | |
109 | assert row.string_length == 6 | |
110 | assert row.description == description | |
111 | assert row.enabled == enabled | |
112 | assert row.created_at == created_at |
0 | 0 | import asyncio |
1 | ||
2 | import pytest | |
3 | from sqlalchemy import MetaData, Table, Column, Integer, String | |
4 | ||
1 | 5 | from aiomysql import sa |
2 | # from aiomysql.connection import TIMEOUT | |
3 | ||
4 | import os | |
5 | import unittest | |
6 | ||
7 | from sqlalchemy import MetaData, Table, Column, Integer, String | |
8 | 6 | |
9 | 7 | meta = MetaData() |
10 | 8 | tbl = Table('sa_tbl3', meta, |
13 | 11 | Column('name', String(255))) |
14 | 12 | |
15 | 13 | |
16 | class TestEngine(unittest.TestCase): | |
17 | def setUp(self): | |
18 | self.loop = asyncio.new_event_loop() | |
19 | asyncio.set_event_loop(None) | |
20 | self.host = os.environ.get('MYSQL_HOST', 'localhost') | |
21 | self.port = int(os.environ.get('MYSQL_PORT', 3306)) | |
22 | self.user = os.environ.get('MYSQL_USER', 'root') | |
23 | self.db = os.environ.get('MYSQL_DB', 'test_pymysql') | |
24 | self.password = os.environ.get('MYSQL_PASSWORD', '') | |
25 | self.engine = self.loop.run_until_complete(self.make_engine()) | |
26 | self.loop.run_until_complete(self.start()) | |
14 | @pytest.fixture() | |
15 | def make_engine(connection, mysql_params): | |
16 | async def _make_engine(**kwargs): | |
17 | if "unix_socket" in mysql_params: | |
18 | conn_args = {"unix_socket": mysql_params["unix_socket"]} | |
19 | else: | |
20 | conn_args = { | |
21 | "host": mysql_params['host'], | |
22 | "port": mysql_params['port'], | |
23 | } | |
27 | 24 | |
28 | def tearDown(self): | |
29 | self.engine.terminate() | |
30 | self.loop.run_until_complete(self.engine.wait_closed()) | |
31 | self.loop.close() | |
25 | return (await sa.create_engine(db=mysql_params['db'], | |
26 | user=mysql_params['user'], | |
27 | password=mysql_params['password'], | |
28 | minsize=10, | |
29 | **conn_args, | |
30 | **kwargs)) | |
31 | return _make_engine | |
32 | 32 | |
33 | async def make_engine(self, use_loop=True, **kwargs): | |
34 | if use_loop: | |
35 | return (await sa.create_engine(db=self.db, | |
36 | user=self.user, | |
37 | password=self.password, | |
38 | host=self.host, | |
39 | port=self.port, | |
40 | loop=self.loop, | |
41 | minsize=10, | |
42 | **kwargs)) | |
43 | else: | |
44 | return (await sa.create_engine(db=self.db, | |
45 | user=self.user, | |
46 | password=self.password, | |
47 | host=self.host, | |
48 | port=self.port, | |
49 | minsize=10, | |
50 | **kwargs)) | |
51 | 33 | |
52 | async def start(self): | |
53 | async with self.engine.acquire() as conn: | |
54 | await conn.execute("DROP TABLE IF EXISTS sa_tbl3") | |
55 | await conn.execute("CREATE TABLE sa_tbl3 " | |
56 | "(id serial, name varchar(255))") | |
34 | async def start(engine): | |
35 | async with engine.acquire() as conn: | |
36 | await conn.execute("DROP TABLE IF EXISTS sa_tbl3") | |
37 | await conn.execute("CREATE TABLE sa_tbl3 " | |
38 | "(id serial, name varchar(255))") | |
57 | 39 | |
58 | def test_dialect(self): | |
59 | self.assertEqual(sa.engine._dialect, self.engine.dialect) | |
60 | 40 | |
61 | def test_name(self): | |
62 | self.assertEqual('mysql', self.engine.name) | |
41 | @pytest.mark.run_loop | |
42 | async def test_dialect(make_engine): | |
43 | engine = await make_engine() | |
44 | await start(engine) | |
63 | 45 | |
64 | def test_driver(self): | |
65 | self.assertEqual('pymysql', self.engine.driver) | |
46 | assert sa.engine._dialect == engine.dialect | |
66 | 47 | |
67 | # def test_dsn(self): | |
68 | # self.assertEqual( | |
69 | # 'dbname=aiomysql user=aiomysql password=xxxxxx host=127.0.0.1', | |
70 | # self.engine.dsn) | |
71 | 48 | |
72 | def test_minsize(self): | |
73 | self.assertEqual(10, self.engine.minsize) | |
49 | @pytest.mark.run_loop | |
50 | async def test_name(make_engine): | |
51 | engine = await make_engine() | |
52 | await start(engine) | |
53 | assert 'mysql' == engine.name | |
74 | 54 | |
75 | def test_maxsize(self): | |
76 | self.assertEqual(10, self.engine.maxsize) | |
77 | 55 | |
78 | def test_size(self): | |
79 | self.assertEqual(10, self.engine.size) | |
56 | @pytest.mark.run_loop | |
57 | async def test_driver(make_engine): | |
58 | engine = await make_engine() | |
59 | await start(engine) | |
60 | assert 'pymysql' == engine.driver | |
80 | 61 | |
81 | def test_freesize(self): | |
82 | self.assertEqual(10, self.engine.freesize) | |
62 | # @pytest.mark.run_loop | |
63 | # async def test_dsn(self): | |
64 | # self.assertEqual( | |
65 | # 'dbname=aiomysql user=aiomysql password=xxxxxx host=127.0.0.1', | |
66 | # engine.dsn) | |
83 | 67 | |
84 | def test_make_engine_with_default_loop(self): | |
85 | 68 | |
86 | async def go(): | |
87 | engine = await self.make_engine(use_loop=False) | |
88 | engine.close() | |
89 | await engine.wait_closed() | |
69 | @pytest.mark.run_loop | |
70 | async def test_minsize(make_engine): | |
71 | engine = await make_engine() | |
72 | await start(engine) | |
73 | assert 10 == engine.minsize | |
90 | 74 | |
91 | asyncio.set_event_loop(self.loop) | |
92 | try: | |
93 | self.loop.run_until_complete(go()) | |
94 | finally: | |
95 | asyncio.set_event_loop(None) | |
96 | 75 | |
97 | def test_not_context_manager(self): | |
98 | async def go(): | |
99 | with self.assertRaises(RuntimeError): | |
100 | with self.engine: | |
101 | pass | |
102 | self.loop.run_until_complete(go()) | |
76 | @pytest.mark.run_loop | |
77 | async def test_maxsize(make_engine): | |
78 | engine = await make_engine() | |
79 | await start(engine) | |
80 | assert 10 == engine.maxsize | |
103 | 81 | |
104 | def test_release_transacted(self): | |
105 | async def go(): | |
106 | conn = await self.engine.acquire() | |
107 | tr = await conn.begin() | |
108 | with self.assertRaises(sa.InvalidRequestError): | |
109 | self.engine.release(conn) | |
110 | del tr | |
111 | self.loop.run_until_complete(go()) | |
112 | 82 | |
113 | # def test_timeout(self): | |
114 | # self.assertEqual(TIMEOUT, self.engine.timeout) | |
83 | @pytest.mark.run_loop | |
84 | async def test_size(make_engine): | |
85 | engine = await make_engine() | |
86 | await start(engine) | |
87 | assert 10 == engine.size | |
115 | 88 | |
116 | # def test_timeout_override(self): | |
117 | # @asyncio.coroutine | |
118 | # def go(): | |
119 | # timeout = 1 | |
120 | # engine = yield from self.make_engine(timeout=timeout) | |
121 | # self.assertEqual(timeout, engine.timeout) | |
122 | # conn = yield from engine.acquire() | |
123 | # with self.assertRaises(asyncio.TimeoutError): | |
124 | # yield from conn.execute("SELECT pg_sleep(10)") | |
125 | # self.loop.run_until_complete(go()) | |
126 | 89 | |
127 | def test_cannot_acquire_after_closing(self): | |
128 | async def go(): | |
129 | engine = await self.make_engine() | |
130 | engine.close() | |
90 | @pytest.mark.run_loop | |
91 | async def test_freesize(make_engine): | |
92 | engine = await make_engine() | |
93 | await start(engine) | |
94 | assert 10 == engine.freesize | |
131 | 95 | |
132 | with self.assertRaises(RuntimeError): | |
133 | await engine.acquire() | |
134 | await engine.wait_closed() | |
135 | self.loop.run_until_complete(go()) | |
136 | 96 | |
137 | def test_wait_closed(self): | |
138 | async def go(): | |
139 | engine = await self.make_engine() | |
97 | @pytest.mark.run_loop | |
98 | async def test_make_engine_with_default_loop(make_engine): | |
99 | engine = await make_engine() | |
100 | await start(engine) | |
140 | 101 | |
141 | c1 = await engine.acquire() | |
142 | c2 = await engine.acquire() | |
143 | self.assertEqual(10, engine.size) | |
144 | self.assertEqual(8, engine.freesize) | |
102 | engine.close() | |
103 | await engine.wait_closed() | |
145 | 104 | |
146 | ops = [] | |
147 | 105 | |
148 | async def do_release(conn): | |
149 | await asyncio.sleep(0, loop=self.loop) | |
150 | engine.release(conn) | |
151 | ops.append('release') | |
106 | @pytest.mark.run_loop | |
107 | async def test_not_context_manager(make_engine): | |
108 | engine = await make_engine() | |
109 | await start(engine) | |
110 | with pytest.raises(RuntimeError): | |
111 | with engine: | |
112 | pass | |
152 | 113 | |
153 | async def wait_closed(): | |
154 | await engine.wait_closed() | |
155 | ops.append('wait_closed') | |
156 | 114 | |
157 | engine.close() | |
158 | await asyncio.gather(wait_closed(), do_release(c1), | |
159 | do_release(c2), loop=self.loop) | |
160 | self.assertEqual(['release', 'release', 'wait_closed'], ops) | |
161 | self.assertEqual(0, engine.freesize) | |
162 | engine.close() | |
163 | await engine.wait_closed() | |
115 | @pytest.mark.run_loop | |
116 | async def test_release_transacted(make_engine): | |
117 | engine = await make_engine() | |
118 | await start(engine) | |
119 | conn = await engine.acquire() | |
120 | tr = await conn.begin() | |
121 | with pytest.raises(sa.InvalidRequestError): | |
122 | engine.release(conn) | |
123 | del tr | |
164 | 124 | |
165 | self.loop.run_until_complete(go()) | |
166 | 125 | |
167 | def test_terminate_with_acquired_connections(self): | |
126 | @pytest.mark.run_loop | |
127 | async def test_cannot_acquire_after_closing(make_engine): | |
128 | engine = await make_engine() | |
129 | await start(engine) | |
130 | engine.close() | |
168 | 131 | |
169 | async def go(): | |
170 | engine = await self.make_engine() | |
171 | conn = await engine.acquire() | |
172 | engine.terminate() | |
173 | await engine.wait_closed() | |
132 | with pytest.raises(RuntimeError): | |
133 | await engine.acquire() | |
134 | await engine.wait_closed() | |
174 | 135 | |
175 | self.assertTrue(conn.closed) | |
176 | 136 | |
177 | self.loop.run_until_complete(go()) | |
137 | @pytest.mark.run_loop | |
138 | async def test_wait_closed(make_engine): | |
139 | engine = await make_engine() | |
140 | await start(engine) | |
141 | ||
142 | c1 = await engine.acquire() | |
143 | c2 = await engine.acquire() | |
144 | assert 10 == engine.size | |
145 | assert 8 == engine.freesize | |
146 | ||
147 | ops = [] | |
148 | ||
149 | async def do_release(conn): | |
150 | await asyncio.sleep(0) | |
151 | engine.release(conn) | |
152 | ops.append('release') | |
153 | ||
154 | async def wait_closed(): | |
155 | await engine.wait_closed() | |
156 | ops.append('wait_closed') | |
157 | ||
158 | engine.close() | |
159 | await asyncio.gather(wait_closed(), do_release(c1), | |
160 | do_release(c2)) | |
161 | assert ['release', 'release', 'wait_closed'] == ops | |
162 | assert 0 == engine.freesize | |
163 | engine.close() | |
164 | await engine.wait_closed() | |
165 | ||
166 | ||
167 | @pytest.mark.run_loop | |
168 | async def test_terminate_with_acquired_connections(make_engine): | |
169 | engine = await make_engine() | |
170 | await start(engine) | |
171 | ||
172 | conn = await engine.acquire() | |
173 | engine.terminate() | |
174 | await engine.wait_closed() | |
175 | ||
176 | assert conn.closed |
0 | import asyncio | |
1 | from aiomysql import connect, sa | |
2 | 0 | import functools |
3 | ||
4 | import os | |
5 | 1 | import unittest |
6 | 2 | from unittest import mock |
7 | 3 | |
4 | import pytest | |
8 | 5 | from sqlalchemy import MetaData, Table, Column, Integer, String |
6 | ||
7 | from aiomysql import sa | |
8 | ||
9 | 9 | |
10 | 10 | meta = MetaData() |
11 | 11 | tbl = Table('sa_tbl2', meta, |
27 | 27 | return wrapper |
28 | 28 | |
29 | 29 | |
30 | class TestTransaction(unittest.TestCase): | |
31 | def setUp(self): | |
32 | self.loop = asyncio.new_event_loop() | |
33 | asyncio.set_event_loop(None) | |
34 | self.host = os.environ.get('MYSQL_HOST', 'localhost') | |
35 | self.port = int(os.environ.get('MYSQL_PORT', 3306)) | |
36 | self.user = os.environ.get('MYSQL_USER', 'root') | |
37 | self.db = os.environ.get('MYSQL_DB', 'test_pymysql') | |
38 | self.password = os.environ.get('MYSQL_PASSWORD', '') | |
39 | self.loop.run_until_complete(self.start()) | |
40 | ||
41 | def tearDown(self): | |
42 | self.loop.close() | |
43 | ||
44 | async def start(self, **kwargs): | |
45 | conn = await self.connect(**kwargs) | |
46 | await conn.execute("DROP TABLE IF EXISTS sa_tbl2") | |
47 | await conn.execute("CREATE TABLE sa_tbl2 " | |
48 | "(id serial, name varchar(255))") | |
49 | await conn.execute("INSERT INTO sa_tbl2 (name)" | |
50 | "VALUES ('first')") | |
51 | await conn._connection.commit() | |
52 | ||
53 | async def connect(self, **kwargs): | |
54 | conn = await connect(db=self.db, | |
55 | user=self.user, | |
56 | password=self.password, | |
57 | host=self.host, | |
58 | port=self.port, | |
59 | loop=self.loop, | |
60 | **kwargs) | |
30 | async def start(conn): | |
31 | await conn.execute("DROP TABLE IF EXISTS sa_tbl2") | |
32 | await conn.execute("CREATE TABLE sa_tbl2 " | |
33 | "(id serial, name varchar(255))") | |
34 | await conn.execute("INSERT INTO sa_tbl2 (name)" | |
35 | "VALUES ('first')") | |
36 | await conn._connection.commit() | |
37 | ||
38 | ||
39 | @pytest.fixture() | |
40 | def sa_connect(connection, connection_creator): | |
41 | async def _connect(**kwargs): | |
42 | conn = await connection_creator(**kwargs) | |
61 | 43 | # TODO: fix this, should autocommit be enabled by default? |
62 | 44 | await conn.autocommit(True) |
63 | 45 | engine = mock.Mock() |
69 | 51 | |
70 | 52 | ret = sa.SAConnection(conn, engine) |
71 | 53 | return ret |
72 | ||
73 | def test_without_transactions(self): | |
74 | async def go(): | |
75 | conn1 = await self.connect() | |
76 | conn2 = await self.connect() | |
77 | res1 = await conn1.scalar(tbl.count()) | |
78 | self.assertEqual(1, res1) | |
79 | ||
80 | await conn2.execute(tbl.delete()) | |
81 | ||
82 | res2 = await conn1.scalar(tbl.count()) | |
83 | self.assertEqual(0, res2) | |
84 | await conn1.close() | |
85 | await conn2.close() | |
86 | ||
87 | self.loop.run_until_complete(go()) | |
88 | ||
89 | def test_connection_attr(self): | |
90 | async def go(): | |
91 | conn = await self.connect() | |
92 | tr = await conn.begin() | |
93 | self.assertIs(tr.connection, conn) | |
94 | await conn.close() | |
95 | ||
96 | self.loop.run_until_complete(go()) | |
97 | ||
98 | def test_root_transaction(self): | |
99 | async def go(): | |
100 | conn1 = await self.connect() | |
101 | conn2 = await self.connect() | |
102 | ||
103 | tr = await conn1.begin() | |
104 | self.assertTrue(tr.is_active) | |
105 | await conn1.execute(tbl.delete()) | |
106 | ||
107 | res1 = await conn2.scalar(tbl.count()) | |
108 | self.assertEqual(1, res1) | |
109 | ||
110 | await tr.commit() | |
111 | ||
112 | self.assertFalse(tr.is_active) | |
113 | self.assertFalse(conn1.in_transaction) | |
114 | res2 = await conn2.scalar(tbl.count()) | |
115 | self.assertEqual(0, res2) | |
116 | await conn1.close() | |
117 | await conn2.close() | |
118 | ||
119 | self.loop.run_until_complete(go()) | |
120 | ||
121 | def test_root_transaction_rollback(self): | |
122 | async def go(): | |
123 | conn1 = await self.connect() | |
124 | conn2 = await self.connect() | |
125 | ||
126 | tr = await conn1.begin() | |
127 | self.assertTrue(tr.is_active) | |
128 | await conn1.execute(tbl.delete()) | |
129 | ||
130 | res1 = await conn2.scalar(tbl.count()) | |
131 | self.assertEqual(1, res1) | |
132 | ||
133 | await tr.rollback() | |
134 | ||
135 | self.assertFalse(tr.is_active) | |
136 | res2 = await conn2.scalar(tbl.count()) | |
137 | self.assertEqual(1, res2) | |
138 | await conn1.close() | |
139 | await conn2.close() | |
140 | ||
141 | self.loop.run_until_complete(go()) | |
142 | ||
143 | def test_root_transaction_close(self): | |
144 | async def go(): | |
145 | conn1 = await self.connect() | |
146 | conn2 = await self.connect() | |
147 | ||
148 | tr = await conn1.begin() | |
149 | self.assertTrue(tr.is_active) | |
150 | await conn1.execute(tbl.delete()) | |
151 | ||
152 | res1 = await conn2.scalar(tbl.count()) | |
153 | self.assertEqual(1, res1) | |
154 | ||
155 | await tr.close() | |
156 | ||
157 | self.assertFalse(tr.is_active) | |
158 | res2 = await conn2.scalar(tbl.count()) | |
159 | self.assertEqual(1, res2) | |
160 | await conn1.close() | |
161 | await conn2.close() | |
162 | ||
163 | self.loop.run_until_complete(go()) | |
164 | ||
165 | def test_rollback_on_connection_close(self): | |
166 | async def go(): | |
167 | conn1 = await self.connect() | |
168 | conn2 = await self.connect() | |
169 | ||
170 | tr = await conn1.begin() | |
171 | await conn1.execute(tbl.delete()) | |
172 | ||
173 | res1 = await conn2.scalar(tbl.count()) | |
174 | self.assertEqual(1, res1) | |
175 | ||
176 | await conn1.close() | |
177 | ||
178 | res2 = await conn2.scalar(tbl.count()) | |
179 | self.assertEqual(1, res2) | |
180 | del tr | |
181 | await conn1.close() | |
182 | await conn2.close() | |
183 | ||
184 | self.loop.run_until_complete(go()) | |
185 | ||
186 | def test_root_transaction_commit_inactive(self): | |
187 | async def go(): | |
188 | conn = await self.connect() | |
189 | tr = await conn.begin() | |
190 | self.assertTrue(tr.is_active) | |
191 | await tr.commit() | |
192 | self.assertFalse(tr.is_active) | |
193 | with self.assertRaises(sa.InvalidRequestError): | |
194 | await tr.commit() | |
195 | await conn.close() | |
196 | ||
197 | self.loop.run_until_complete(go()) | |
198 | ||
199 | def test_root_transaction_rollback_inactive(self): | |
200 | async def go(): | |
201 | conn = await self.connect() | |
202 | tr = await conn.begin() | |
203 | self.assertTrue(tr.is_active) | |
204 | await tr.rollback() | |
205 | self.assertFalse(tr.is_active) | |
206 | await tr.rollback() | |
207 | self.assertFalse(tr.is_active) | |
208 | await conn.close() | |
209 | ||
210 | self.loop.run_until_complete(go()) | |
211 | ||
212 | def test_root_transaction_double_close(self): | |
213 | async def go(): | |
214 | conn = await self.connect() | |
215 | tr = await conn.begin() | |
216 | self.assertTrue(tr.is_active) | |
217 | await tr.close() | |
218 | self.assertFalse(tr.is_active) | |
219 | await tr.close() | |
220 | self.assertFalse(tr.is_active) | |
221 | await conn.close() | |
222 | ||
223 | self.loop.run_until_complete(go()) | |
224 | ||
225 | def test_inner_transaction_commit(self): | |
226 | async def go(): | |
227 | conn = await self.connect() | |
228 | tr1 = await conn.begin() | |
229 | tr2 = await conn.begin() | |
230 | self.assertTrue(tr2.is_active) | |
231 | ||
232 | await tr2.commit() | |
233 | self.assertFalse(tr2.is_active) | |
234 | self.assertTrue(tr1.is_active) | |
235 | ||
236 | await tr1.commit() | |
237 | self.assertFalse(tr2.is_active) | |
238 | self.assertFalse(tr1.is_active) | |
239 | await conn.close() | |
240 | ||
241 | self.loop.run_until_complete(go()) | |
242 | ||
243 | def test_inner_transaction_rollback(self): | |
244 | async def go(): | |
245 | conn = await self.connect() | |
246 | tr1 = await conn.begin() | |
247 | tr2 = await conn.begin() | |
248 | self.assertTrue(tr2.is_active) | |
249 | await conn.execute(tbl.insert().values(name='aaaa')) | |
250 | ||
251 | await tr2.rollback() | |
252 | self.assertFalse(tr2.is_active) | |
253 | self.assertFalse(tr1.is_active) | |
254 | ||
255 | res = await conn.scalar(tbl.count()) | |
256 | self.assertEqual(1, res) | |
257 | await conn.close() | |
258 | ||
259 | self.loop.run_until_complete(go()) | |
260 | ||
261 | def test_inner_transaction_close(self): | |
262 | async def go(): | |
263 | conn = await self.connect() | |
264 | tr1 = await conn.begin() | |
265 | tr2 = await conn.begin() | |
266 | self.assertTrue(tr2.is_active) | |
267 | await conn.execute(tbl.insert().values(name='aaaa')) | |
268 | ||
269 | await tr2.close() | |
270 | self.assertFalse(tr2.is_active) | |
271 | self.assertTrue(tr1.is_active) | |
272 | await tr1.commit() | |
273 | ||
274 | res = await conn.scalar(tbl.count()) | |
275 | self.assertEqual(2, res) | |
276 | await conn.close() | |
277 | ||
278 | self.loop.run_until_complete(go()) | |
279 | ||
280 | def test_nested_transaction_commit(self): | |
281 | async def go(): | |
282 | conn = await self.connect() | |
283 | tr1 = await conn.begin_nested() | |
284 | tr2 = await conn.begin_nested() | |
285 | self.assertTrue(tr1.is_active) | |
286 | self.assertTrue(tr2.is_active) | |
287 | ||
288 | await conn.execute(tbl.insert().values(name='aaaa')) | |
289 | await tr2.commit() | |
290 | self.assertFalse(tr2.is_active) | |
291 | self.assertTrue(tr1.is_active) | |
292 | ||
293 | res = await conn.scalar(tbl.count()) | |
294 | self.assertEqual(2, res) | |
295 | ||
296 | await tr1.commit() | |
297 | self.assertFalse(tr2.is_active) | |
298 | self.assertFalse(tr1.is_active) | |
299 | ||
300 | res = await conn.scalar(tbl.count()) | |
301 | self.assertEqual(2, res) | |
302 | await conn.close() | |
303 | ||
304 | self.loop.run_until_complete(go()) | |
305 | ||
306 | def test_nested_transaction_commit_twice(self): | |
307 | async def go(): | |
308 | conn = await self.connect() | |
309 | tr1 = await conn.begin_nested() | |
310 | tr2 = await conn.begin_nested() | |
311 | ||
312 | await conn.execute(tbl.insert().values(name='aaaa')) | |
313 | await tr2.commit() | |
314 | self.assertFalse(tr2.is_active) | |
315 | self.assertTrue(tr1.is_active) | |
316 | ||
317 | await tr2.commit() | |
318 | self.assertFalse(tr2.is_active) | |
319 | self.assertTrue(tr1.is_active) | |
320 | ||
321 | res = await conn.scalar(tbl.count()) | |
322 | self.assertEqual(2, res) | |
323 | ||
324 | await tr1.close() | |
325 | await conn.close() | |
326 | ||
327 | self.loop.run_until_complete(go()) | |
328 | ||
329 | def test_nested_transaction_rollback(self): | |
330 | async def go(): | |
331 | conn = await self.connect() | |
332 | tr1 = await conn.begin_nested() | |
333 | tr2 = await conn.begin_nested() | |
334 | self.assertTrue(tr1.is_active) | |
335 | self.assertTrue(tr2.is_active) | |
336 | ||
337 | await conn.execute(tbl.insert().values(name='aaaa')) | |
338 | await tr2.rollback() | |
339 | self.assertFalse(tr2.is_active) | |
340 | self.assertTrue(tr1.is_active) | |
341 | ||
342 | res = await conn.scalar(tbl.count()) | |
343 | self.assertEqual(1, res) | |
344 | ||
345 | await tr1.commit() | |
346 | self.assertFalse(tr2.is_active) | |
347 | self.assertFalse(tr1.is_active) | |
348 | ||
349 | res = await conn.scalar(tbl.count()) | |
350 | self.assertEqual(1, res) | |
351 | await conn.close() | |
352 | ||
353 | self.loop.run_until_complete(go()) | |
354 | ||
355 | def test_nested_transaction_rollback_twice(self): | |
356 | async def go(): | |
357 | conn = await self.connect() | |
358 | tr1 = await conn.begin_nested() | |
359 | tr2 = await conn.begin_nested() | |
360 | ||
361 | await conn.execute(tbl.insert().values(name='aaaa')) | |
362 | await tr2.rollback() | |
363 | self.assertFalse(tr2.is_active) | |
364 | self.assertTrue(tr1.is_active) | |
365 | ||
366 | await tr2.rollback() | |
367 | self.assertFalse(tr2.is_active) | |
368 | self.assertTrue(tr1.is_active) | |
369 | ||
370 | await tr1.commit() | |
371 | res = await conn.scalar(tbl.count()) | |
372 | self.assertEqual(1, res) | |
373 | await conn.close() | |
374 | ||
375 | self.loop.run_until_complete(go()) | |
376 | ||
377 | def test_twophase_transaction_commit(self): | |
378 | async def go(): | |
379 | conn = await self.connect() | |
380 | tr = await conn.begin_twophase('sa_twophase') | |
381 | self.assertEqual(tr.xid, 'sa_twophase') | |
382 | await conn.execute(tbl.insert().values(name='aaaa')) | |
383 | ||
384 | await tr.prepare() | |
385 | self.assertTrue(tr.is_active) | |
386 | ||
387 | await tr.commit() | |
388 | self.assertFalse(tr.is_active) | |
389 | ||
390 | res = await conn.scalar(tbl.count()) | |
391 | self.assertEqual(2, res) | |
392 | await conn.close() | |
393 | ||
394 | self.loop.run_until_complete(go()) | |
395 | ||
396 | def test_twophase_transaction_twice(self): | |
397 | async def go(): | |
398 | conn = await self.connect() | |
399 | tr = await conn.begin_twophase() | |
400 | with self.assertRaises(sa.InvalidRequestError): | |
401 | await conn.begin_twophase() | |
402 | ||
403 | self.assertTrue(tr.is_active) | |
404 | await tr.prepare() | |
405 | await tr.commit() | |
406 | await conn.close() | |
407 | ||
408 | self.loop.run_until_complete(go()) | |
409 | ||
410 | def test_transactions_sequence(self): | |
411 | async def go(): | |
412 | conn = await self.connect() | |
413 | ||
414 | await conn.execute(tbl.delete()) | |
415 | ||
416 | self.assertIsNone(conn._transaction) | |
417 | ||
418 | tr1 = await conn.begin() | |
419 | self.assertIs(tr1, conn._transaction) | |
420 | await conn.execute(tbl.insert().values(name='a')) | |
421 | res1 = await conn.scalar(tbl.count()) | |
422 | self.assertEqual(1, res1) | |
423 | ||
424 | await tr1.commit() | |
425 | self.assertIsNone(conn._transaction) | |
426 | ||
427 | tr2 = await conn.begin() | |
428 | self.assertIs(tr2, conn._transaction) | |
429 | await conn.execute(tbl.insert().values(name='b')) | |
430 | res2 = await conn.scalar(tbl.count()) | |
431 | self.assertEqual(2, res2) | |
432 | await tr2.rollback() | |
433 | self.assertIsNone(conn._transaction) | |
434 | ||
435 | tr3 = await conn.begin() | |
436 | self.assertIs(tr3, conn._transaction) | |
437 | await conn.execute(tbl.insert().values(name='b')) | |
438 | res3 = await conn.scalar(tbl.count()) | |
439 | self.assertEqual(2, res3) | |
440 | await tr3.commit() | |
441 | self.assertIsNone(conn._transaction) | |
442 | await conn.close() | |
443 | ||
444 | self.loop.run_until_complete(go()) | |
54 | return _connect | |
55 | ||
56 | ||
57 | @pytest.mark.run_loop | |
58 | async def test_without_transactions(sa_connect): | |
59 | conn1 = await sa_connect() | |
60 | await start(conn1) | |
61 | ||
62 | conn2 = await sa_connect() | |
63 | res1 = await conn1.scalar(tbl.count()) | |
64 | assert 1 == res1 | |
65 | ||
66 | await conn2.execute(tbl.delete()) | |
67 | ||
68 | res2 = await conn1.scalar(tbl.count()) | |
69 | assert 0 == res2 | |
70 | await conn1.close() | |
71 | await conn2.close() | |
72 | ||
73 | ||
74 | @pytest.mark.run_loop | |
75 | async def test_connection_attr(sa_connect): | |
76 | conn = await sa_connect() | |
77 | await start(conn) | |
78 | tr = await conn.begin() | |
79 | assert tr.connection is conn | |
80 | await conn.close() | |
81 | ||
82 | ||
83 | @pytest.mark.run_loop | |
84 | async def test_root_transaction(sa_connect): | |
85 | conn1 = await sa_connect() | |
86 | await start(conn1) | |
87 | conn2 = await sa_connect() | |
88 | ||
89 | tr = await conn1.begin() | |
90 | assert tr.is_active | |
91 | await conn1.execute(tbl.delete()) | |
92 | ||
93 | res1 = await conn2.scalar(tbl.count()) | |
94 | assert 1 == res1 | |
95 | ||
96 | await tr.commit() | |
97 | ||
98 | assert not tr.is_active | |
99 | assert not conn1.in_transaction | |
100 | res2 = await conn2.scalar(tbl.count()) | |
101 | assert 0 == res2 | |
102 | await conn1.close() | |
103 | await conn2.close() | |
104 | ||
105 | ||
106 | @pytest.mark.run_loop | |
107 | async def test_root_transaction_rollback(sa_connect): | |
108 | conn1 = await sa_connect() | |
109 | await start(conn1) | |
110 | conn2 = await sa_connect() | |
111 | ||
112 | tr = await conn1.begin() | |
113 | assert tr.is_active | |
114 | await conn1.execute(tbl.delete()) | |
115 | ||
116 | res1 = await conn2.scalar(tbl.count()) | |
117 | assert 1 == res1 | |
118 | ||
119 | await tr.rollback() | |
120 | ||
121 | assert not tr.is_active | |
122 | res2 = await conn2.scalar(tbl.count()) | |
123 | assert 1 == res2 | |
124 | await conn1.close() | |
125 | await conn2.close() | |
126 | ||
127 | ||
128 | @pytest.mark.run_loop | |
129 | async def test_root_transaction_close(sa_connect): | |
130 | conn1 = await sa_connect() | |
131 | await start(conn1) | |
132 | conn2 = await sa_connect() | |
133 | ||
134 | tr = await conn1.begin() | |
135 | assert tr.is_active | |
136 | await conn1.execute(tbl.delete()) | |
137 | ||
138 | res1 = await conn2.scalar(tbl.count()) | |
139 | assert 1 == res1 | |
140 | ||
141 | await tr.close() | |
142 | ||
143 | assert not tr.is_active | |
144 | res2 = await conn2.scalar(tbl.count()) | |
145 | assert 1 == res2 | |
146 | await conn1.close() | |
147 | await conn2.close() | |
148 | ||
149 | ||
150 | @pytest.mark.run_loop | |
151 | async def test_rollback_on_connection_close(sa_connect): | |
152 | conn1 = await sa_connect() | |
153 | await start(conn1) | |
154 | conn2 = await sa_connect() | |
155 | ||
156 | tr = await conn1.begin() | |
157 | await conn1.execute(tbl.delete()) | |
158 | ||
159 | res1 = await conn2.scalar(tbl.count()) | |
160 | assert 1 == res1 | |
161 | ||
162 | await conn1.close() | |
163 | ||
164 | res2 = await conn2.scalar(tbl.count()) | |
165 | assert 1 == res2 | |
166 | del tr | |
167 | await conn1.close() | |
168 | await conn2.close() | |
169 | ||
170 | ||
171 | @pytest.mark.run_loop | |
172 | async def test_root_transaction_commit_inactive(sa_connect): | |
173 | conn = await sa_connect() | |
174 | await start(conn) | |
175 | tr = await conn.begin() | |
176 | assert tr.is_active | |
177 | await tr.commit() | |
178 | assert not tr.is_active | |
179 | with pytest.raises(sa.InvalidRequestError): | |
180 | await tr.commit() | |
181 | await conn.close() | |
182 | ||
183 | ||
184 | @pytest.mark.run_loop | |
185 | async def test_root_transaction_rollback_inactive(sa_connect): | |
186 | conn = await sa_connect() | |
187 | await start(conn) | |
188 | tr = await conn.begin() | |
189 | assert tr.is_active | |
190 | await tr.rollback() | |
191 | assert not tr.is_active | |
192 | await tr.rollback() | |
193 | assert not tr.is_active | |
194 | await conn.close() | |
195 | ||
196 | ||
197 | @pytest.mark.run_loop | |
198 | async def test_root_transaction_double_close(sa_connect): | |
199 | conn = await sa_connect() | |
200 | await start(conn) | |
201 | tr = await conn.begin() | |
202 | assert tr.is_active | |
203 | await tr.close() | |
204 | assert not tr.is_active | |
205 | await tr.close() | |
206 | assert not tr.is_active | |
207 | await conn.close() | |
208 | ||
209 | ||
210 | @pytest.mark.run_loop | |
211 | async def test_inner_transaction_commit(sa_connect): | |
212 | conn = await sa_connect() | |
213 | await start(conn) | |
214 | tr1 = await conn.begin() | |
215 | tr2 = await conn.begin() | |
216 | assert tr2.is_active | |
217 | ||
218 | await tr2.commit() | |
219 | assert not tr2.is_active | |
220 | assert tr1.is_active | |
221 | ||
222 | await tr1.commit() | |
223 | assert not tr2.is_active | |
224 | assert not tr1.is_active | |
225 | await conn.close() | |
226 | ||
227 | ||
228 | @pytest.mark.run_loop | |
229 | async def test_inner_transaction_rollback(sa_connect): | |
230 | conn = await sa_connect() | |
231 | await start(conn) | |
232 | tr1 = await conn.begin() | |
233 | tr2 = await conn.begin() | |
234 | assert tr2.is_active | |
235 | await conn.execute(tbl.insert().values(name='aaaa')) | |
236 | ||
237 | await tr2.rollback() | |
238 | assert not tr2.is_active | |
239 | assert not tr1.is_active | |
240 | ||
241 | res = await conn.scalar(tbl.count()) | |
242 | assert 1 == res | |
243 | await conn.close() | |
244 | ||
245 | ||
246 | @pytest.mark.run_loop | |
247 | async def test_inner_transaction_close(sa_connect): | |
248 | conn = await sa_connect() | |
249 | await start(conn) | |
250 | tr1 = await conn.begin() | |
251 | tr2 = await conn.begin() | |
252 | assert tr2.is_active | |
253 | await conn.execute(tbl.insert().values(name='aaaa')) | |
254 | ||
255 | await tr2.close() | |
256 | assert not tr2.is_active | |
257 | assert tr1.is_active | |
258 | await tr1.commit() | |
259 | ||
260 | res = await conn.scalar(tbl.count()) | |
261 | assert 2 == res | |
262 | await conn.close() | |
263 | ||
264 | ||
265 | @pytest.mark.run_loop | |
266 | async def test_nested_transaction_commit(sa_connect): | |
267 | conn = await sa_connect() | |
268 | await start(conn) | |
269 | tr1 = await conn.begin_nested() | |
270 | tr2 = await conn.begin_nested() | |
271 | assert tr1.is_active | |
272 | assert tr2.is_active | |
273 | ||
274 | await conn.execute(tbl.insert().values(name='aaaa')) | |
275 | await tr2.commit() | |
276 | assert not tr2.is_active | |
277 | assert tr1.is_active | |
278 | ||
279 | res = await conn.scalar(tbl.count()) | |
280 | assert 2 == res | |
281 | ||
282 | await tr1.commit() | |
283 | assert not tr2.is_active | |
284 | assert not tr1.is_active | |
285 | ||
286 | res = await conn.scalar(tbl.count()) | |
287 | assert 2 == res | |
288 | await conn.close() | |
289 | ||
290 | ||
291 | @pytest.mark.run_loop | |
292 | async def test_nested_transaction_commit_twice(sa_connect): | |
293 | conn = await sa_connect() | |
294 | await start(conn) | |
295 | tr1 = await conn.begin_nested() | |
296 | tr2 = await conn.begin_nested() | |
297 | ||
298 | await conn.execute(tbl.insert().values(name='aaaa')) | |
299 | await tr2.commit() | |
300 | assert not tr2.is_active | |
301 | assert tr1.is_active | |
302 | ||
303 | await tr2.commit() | |
304 | assert not tr2.is_active | |
305 | assert tr1.is_active | |
306 | ||
307 | res = await conn.scalar(tbl.count()) | |
308 | assert 2 == res | |
309 | ||
310 | await tr1.close() | |
311 | await conn.close() | |
312 | ||
313 | ||
314 | @pytest.mark.run_loop | |
315 | async def test_nested_transaction_rollback(sa_connect): | |
316 | conn = await sa_connect() | |
317 | await start(conn) | |
318 | tr1 = await conn.begin_nested() | |
319 | tr2 = await conn.begin_nested() | |
320 | assert tr1.is_active | |
321 | assert tr2.is_active | |
322 | ||
323 | await conn.execute(tbl.insert().values(name='aaaa')) | |
324 | await tr2.rollback() | |
325 | assert not tr2.is_active | |
326 | assert tr1.is_active | |
327 | ||
328 | res = await conn.scalar(tbl.count()) | |
329 | assert 1 == res | |
330 | ||
331 | await tr1.commit() | |
332 | assert not tr2.is_active | |
333 | assert not tr1.is_active | |
334 | ||
335 | res = await conn.scalar(tbl.count()) | |
336 | assert 1 == res | |
337 | await conn.close() | |
338 | ||
339 | ||
340 | @pytest.mark.run_loop | |
341 | async def test_nested_transaction_rollback_twice(sa_connect): | |
342 | conn = await sa_connect() | |
343 | await start(conn) | |
344 | tr1 = await conn.begin_nested() | |
345 | tr2 = await conn.begin_nested() | |
346 | ||
347 | await conn.execute(tbl.insert().values(name='aaaa')) | |
348 | await tr2.rollback() | |
349 | assert not tr2.is_active | |
350 | assert tr1.is_active | |
351 | ||
352 | await tr2.rollback() | |
353 | assert not tr2.is_active | |
354 | assert tr1.is_active | |
355 | ||
356 | await tr1.commit() | |
357 | res = await conn.scalar(tbl.count()) | |
358 | assert 1 == res | |
359 | await conn.close() | |
360 | ||
361 | ||
362 | @pytest.mark.run_loop | |
363 | async def test_twophase_transaction_commit(sa_connect): | |
364 | conn = await sa_connect() | |
365 | await start(conn) | |
366 | tr = await conn.begin_twophase('sa_twophase') | |
367 | assert tr.xid == 'sa_twophase' | |
368 | await conn.execute(tbl.insert().values(name='aaaa')) | |
369 | ||
370 | await tr.prepare() | |
371 | assert tr.is_active | |
372 | ||
373 | await tr.commit() | |
374 | assert not tr.is_active | |
375 | ||
376 | res = await conn.scalar(tbl.count()) | |
377 | assert 2 == res | |
378 | await conn.close() | |
379 | ||
380 | ||
381 | @pytest.mark.run_loop | |
382 | async def test_twophase_transaction_twice(sa_connect): | |
383 | conn = await sa_connect() | |
384 | await start(conn) | |
385 | tr = await conn.begin_twophase() | |
386 | with pytest.raises(sa.InvalidRequestError): | |
387 | await conn.begin_twophase() | |
388 | ||
389 | assert tr.is_active | |
390 | await tr.prepare() | |
391 | await tr.commit() | |
392 | await conn.close() | |
393 | ||
394 | ||
395 | @pytest.mark.run_loop | |
396 | async def test_transactions_sequence(sa_connect): | |
397 | conn = await sa_connect() | |
398 | await start(conn) | |
399 | ||
400 | await conn.execute(tbl.delete()) | |
401 | ||
402 | assert conn._transaction is None | |
403 | ||
404 | tr1 = await conn.begin() | |
405 | assert tr1 is conn._transaction | |
406 | await conn.execute(tbl.insert().values(name='a')) | |
407 | res1 = await conn.scalar(tbl.count()) | |
408 | assert 1 == res1 | |
409 | ||
410 | await tr1.commit() | |
411 | assert conn._transaction is None | |
412 | ||
413 | tr2 = await conn.begin() | |
414 | assert tr2 is conn._transaction | |
415 | await conn.execute(tbl.insert().values(name='b')) | |
416 | res2 = await conn.scalar(tbl.count()) | |
417 | assert 2 == res2 | |
418 | await tr2.rollback() | |
419 | assert conn._transaction is None | |
420 | ||
421 | tr3 = await conn.begin() | |
422 | assert tr3 is conn._transaction | |
423 | await conn.execute(tbl.insert().values(name='b')) | |
424 | res3 = await conn.scalar(tbl.count()) | |
425 | assert 2 == res3 | |
426 | await tr3.commit() | |
427 | assert conn._transaction is None | |
428 | await conn.close() |
0 | import asyncio | |
1 | from aiomysql import connect, sa | |
2 | 0 | from enum import IntEnum |
3 | ||
4 | import os | |
5 | import unittest | |
6 | 1 | from unittest import mock |
7 | 2 | |
3 | import pytest | |
8 | 4 | from sqlalchemy import MetaData, Table, Column, Integer, TypeDecorator |
5 | ||
6 | from aiomysql import sa | |
9 | 7 | |
10 | 8 | |
11 | 9 | class UserDefinedEnum(IntEnum): |
44 | 42 | Column('val', IntEnumField(enum_class=UserDefinedEnum))) |
45 | 43 | |
46 | 44 | |
47 | class TestSATypes(unittest.TestCase): | |
48 | def setUp(self): | |
49 | self.loop = asyncio.new_event_loop() | |
50 | asyncio.set_event_loop(None) | |
51 | self.host = os.environ.get('MYSQL_HOST', 'localhost') | |
52 | self.port = int(os.environ.get('MYSQL_PORT', 3306)) | |
53 | self.user = os.environ.get('MYSQL_USER', 'root') | |
54 | self.db = os.environ.get('MYSQL_DB', 'test_pymysql') | |
55 | self.password = os.environ.get('MYSQL_PASSWORD', '') | |
56 | ||
57 | def tearDown(self): | |
58 | self.loop.close() | |
59 | ||
60 | async def connect(self, **kwargs): | |
61 | conn = await connect(db=self.db, | |
62 | user=self.user, | |
63 | password=self.password, | |
64 | host=self.host, | |
65 | loop=self.loop, | |
66 | port=self.port, | |
67 | **kwargs) | |
45 | @pytest.fixture() | |
46 | def sa_connect(connection_creator): | |
47 | async def connect(**kwargs): | |
48 | conn = await connection_creator() | |
68 | 49 | await conn.autocommit(True) |
69 | 50 | cur = await conn.cursor() |
70 | 51 | await cur.execute("DROP TABLE IF EXISTS sa_test_type_tbl") |
74 | 55 | engine = mock.Mock() |
75 | 56 | engine.dialect = sa.engine._dialect |
76 | 57 | return sa.SAConnection(conn, engine) |
58 | return connect | |
77 | 59 | |
78 | def test_values(self): | |
79 | async def go(): | |
80 | conn = await self.connect() | |
81 | 60 | |
82 | await conn.execute(tbl.insert().values( | |
83 | val=UserDefinedEnum.Value1) | |
84 | ) | |
85 | result = await conn.execute(tbl.select().where( | |
86 | tbl.c.val == UserDefinedEnum.Value1) | |
87 | ) | |
88 | data = await result.fetchone() | |
89 | self.assertEqual( | |
90 | data['val'], UserDefinedEnum.Value1 | |
91 | ) | |
61 | @pytest.mark.run_loop | |
62 | async def test_values(sa_connect): | |
63 | conn = await sa_connect() | |
92 | 64 | |
93 | self.loop.run_until_complete(go()) | |
65 | await conn.execute(tbl.insert().values( | |
66 | val=UserDefinedEnum.Value1) | |
67 | ) | |
68 | result = await conn.execute(tbl.select().where( | |
69 | tbl.c.val == UserDefinedEnum.Value1) | |
70 | ) | |
71 | data = await result.fetchone() | |
72 | assert data['val'] == UserDefinedEnum.Value1 |
2 | 2 | import aiomysql |
3 | 3 | import pytest |
4 | 4 | |
5 | from aiomysql import sa, create_pool, DictCursor | |
5 | from aiomysql import sa, create_pool, DictCursor, Cursor | |
6 | 6 | from sqlalchemy import MetaData, Table, Column, Integer, String |
7 | 7 | |
8 | 8 | |
130 | 130 | warnings.simplefilter("always") |
131 | 131 | async with pool.get() as conn: |
132 | 132 | pass |
133 | assert issubclass(w[-1].category, DeprecationWarning) | |
133 | # The first warning emitted is expected to be DeprecationWarning: | |
134 | # in the past, we used to check for the last one but this assumption | |
135 | # breaks under Python 3.7 that also emits a `ResourceWarning` when | |
136 | # executed with `PYTHONASYNCIODEBUG=1`. | |
137 | assert issubclass(w[0].category, DeprecationWarning) | |
134 | 138 | assert conn.closed |
135 | 139 | |
136 | 140 | async with create_pool(loop=loop, **mysql_params) as pool: |
148 | 152 | connection = await engine.acquire() |
149 | 153 | assert not connection.closed |
150 | 154 | async with connection: |
151 | ret = [] | |
152 | async for i in connection.execute(tbl.select()): | |
153 | ret.append(i) | |
155 | async with connection.execute(tbl.select()) as cursor: | |
156 | ret = [] | |
157 | async for i in cursor: | |
158 | ret.append(i) | |
154 | 159 | assert [(1, 'a'), (2, 'b'), (3, 'c')] == ret |
155 | 160 | assert connection.closed |
156 | 161 | |
193 | 198 | async def test_create_engine(loop, mysql_params, table): |
194 | 199 | async with sa.create_engine(loop=loop, **mysql_params) as engine: |
195 | 200 | async with engine.acquire() as conn: |
196 | ret = [] | |
197 | async for i in conn.execute(tbl.select()): | |
198 | ret.append(i) | |
199 | assert [(1, 'a'), (2, 'b'), (3, 'c')] == ret | |
201 | async with conn.execute(tbl.select()) as cursor: | |
202 | ret = [] | |
203 | async for i in cursor: | |
204 | ret.append(i) | |
205 | assert [(1, 'a'), (2, 'b'), (3, 'c')] == ret | |
200 | 206 | |
201 | 207 | |
202 | 208 | @pytest.mark.run_loop |
204 | 210 | engine = await sa.create_engine(loop=loop, **mysql_params) |
205 | 211 | async with engine: |
206 | 212 | async with engine.acquire() as conn: |
207 | ret = [] | |
208 | async for i in conn.execute(tbl.select()): | |
209 | ret.append(i) | |
210 | assert [(1, 'a'), (2, 'b'), (3, 'c')] == ret | |
213 | async with conn.execute(tbl.select()) as cursor: | |
214 | ret = [] | |
215 | async for i in cursor: | |
216 | ret.append(i) | |
217 | assert [(1, 'a'), (2, 'b'), (3, 'c')] == ret | |
211 | 218 | |
212 | 219 | |
213 | 220 | @pytest.mark.run_loop |
217 | 224 | async with conn.begin() as tr: |
218 | 225 | async with conn.execute(tbl.select()) as cursor: |
219 | 226 | ret = [] |
220 | async for i in conn.execute(tbl.select()): | |
227 | async for i in cursor: | |
221 | 228 | ret.append(i) |
222 | 229 | assert [(1, 'a'), (2, 'b'), (3, 'c')] == ret |
223 | 230 | assert cursor.closed |
275 | 282 | |
276 | 283 | msg = 'SQLAlchemy engine does not support this cursor class' |
277 | 284 | assert str(ctx.value) == msg |
285 | ||
286 | ||
287 | @pytest.mark.run_loop | |
288 | async def test_compatible_cursor_correct(loop, mysql_params): | |
289 | class SubCursor(Cursor): | |
290 | pass | |
291 | ||
292 | mysql_params['cursorclass'] = SubCursor | |
293 | async with sa.create_engine(loop=loop, **mysql_params) as engine: | |
294 | async with engine.acquire() as conn: | |
295 | # check not raise sa.ArgumentError exception | |
296 | pass | |
297 | assert conn.closed |
3 | 3 | import time |
4 | 4 | |
5 | 5 | import pytest |
6 | from pymysql import util | |
7 | 6 | from pymysql.err import ProgrammingError |
8 | 7 | |
9 | 8 | |
41 | 40 | await cursor.execute( |
42 | 41 | "select b,i,l,f,s,u,bb,d,dt,td,t,st from test_datatypes") |
43 | 42 | r = await cursor.fetchone() |
44 | assert util.int2byte(1) == r[0] | |
43 | assert bytes([1]) == r[0] | |
45 | 44 | # assert v[1:8] == r[1:8]) |
46 | 45 | assert v[1:9] == r[1:9] |
47 | 46 | # mysql throws away microseconds so we need to check datetimes |
102 | 101 | await cursor.execute("INSERT INTO test_string (a) VALUES (%s)", |
103 | 102 | test_value) |
104 | 103 | await cursor.execute("SELECT a FROM test_string") |
104 | r = await cursor.fetchone() | |
105 | assert (test_value,) == r | |
106 | ||
107 | ||
108 | @pytest.mark.run_loop | |
109 | async def test_string_with_emoji(cursor, table_cleanup): | |
110 | await cursor.execute("DROP TABLE IF EXISTS test_string_with_emoji;") | |
111 | await cursor.execute("CREATE TABLE test_string_with_emoji (a text) " | |
112 | "DEFAULT CHARACTER SET=\"utf8mb4\"") | |
113 | test_value = "I am a test string with emoji 😄" | |
114 | table_cleanup('test_string_with_emoji') | |
115 | await cursor.execute("INSERT INTO test_string_with_emoji (a) VALUES (%s)", | |
116 | test_value) | |
117 | await cursor.execute("SELECT a FROM test_string_with_emoji") | |
105 | 118 | r = await cursor.fetchone() |
106 | 119 | assert (test_value,) == r |
107 | 120 | |
238 | 251 | async def test_json(connection_creator, table_cleanup): |
239 | 252 | connection = await connection_creator( |
240 | 253 | charset="utf8mb4", autocommit=True) |
254 | # TODO do better | |
241 | 255 | server_info = connection.get_server_info() |
242 | 256 | if not mysql_server_is(server_info, (5, 7, 0)): |
243 | 257 | raise pytest.skip("JSON type is not supported on MySQL <= 5.6") |
0 | 0 | import asyncio |
1 | 1 | import gc |
2 | 2 | import os |
3 | import sys | |
4 | import unittest | |
3 | ||
4 | import pytest | |
5 | ||
5 | 6 | import aiomysql |
6 | from tests._testutils import run_until_complete | |
7 | from tests.base import AIOPyMySQLTestCase | |
8 | ||
9 | ||
10 | PY_341 = sys.version_info >= (3, 4, 1) | |
11 | ||
12 | ||
13 | class TestConnection(AIOPyMySQLTestCase): | |
14 | ||
15 | def fill_my_cnf(self): | |
16 | tests_root = os.path.abspath(os.path.dirname(__file__)) | |
17 | path1 = os.path.join(tests_root, 'fixtures/my.cnf.tmpl') | |
18 | path2 = os.path.join(tests_root, 'fixtures/my.cnf') | |
19 | with open(path1) as f1: | |
20 | tmpl = f1.read() | |
21 | with open(path2, 'w') as f2: | |
22 | f2.write(tmpl.format_map(self.__dict__)) | |
23 | ||
24 | @run_until_complete | |
25 | def test_connect_timeout(self): | |
26 | # All exceptions are caught and raised as operational errors | |
27 | with self.assertRaises(aiomysql.OperationalError): | |
28 | yield from self.connect(connect_timeout=0.000000000001) | |
29 | ||
30 | @run_until_complete | |
31 | def test_config_file(self): | |
32 | self.fill_my_cnf() | |
33 | tests_root = os.path.abspath(os.path.dirname(__file__)) | |
34 | path = os.path.join(tests_root, 'fixtures/my.cnf') | |
35 | conn = yield from self.connect(read_default_file=path) | |
36 | ||
37 | self.assertEqual(conn.host, self.host) | |
38 | self.assertEqual(conn.port, self.port) | |
39 | self.assertEqual(conn.user, self.user) | |
40 | ||
41 | # make sure connection is working | |
42 | cur = yield from conn.cursor() | |
43 | yield from cur.execute('SELECT 42;') | |
44 | (r, ) = yield from cur.fetchone() | |
45 | self.assertEqual(r, 42) | |
46 | conn.close() | |
47 | ||
48 | @run_until_complete | |
49 | def test_config_file_with_different_group(self): | |
50 | self.fill_my_cnf() | |
51 | # same test with config file but actual settings | |
52 | # located in not default group. | |
53 | tests_root = os.path.abspath(os.path.dirname(__file__)) | |
54 | path = os.path.join(tests_root, 'fixtures/my.cnf') | |
55 | group = 'client_with_unix_socket' | |
56 | conn = yield from self.connect(read_default_file=path, | |
57 | read_default_group=group) | |
58 | ||
59 | self.assertEqual(conn.charset, 'utf8') | |
60 | self.assertEqual(conn.user, 'root') | |
61 | self.assertEqual(conn.unix_socket, '/var/run/mysqld/mysqld.sock') | |
62 | ||
63 | # make sure connection is working | |
64 | cur = yield from conn.cursor() | |
65 | yield from cur.execute('SELECT 42;') | |
66 | (r, ) = yield from cur.fetchone() | |
67 | self.assertEqual(r, 42) | |
68 | conn.close() | |
69 | ||
70 | @run_until_complete | |
71 | def test_connect_using_unix_socket(self): | |
72 | sock = '/var/run/mysqld/mysqld.sock' | |
73 | conn = yield from self.connect(unix_socket=sock) | |
74 | self.assertEqual(conn.unix_socket, sock) | |
75 | ||
76 | cur = yield from conn.cursor() | |
77 | yield from cur.execute('SELECT 42;') | |
78 | (r, ) = yield from cur.fetchone() | |
79 | self.assertEqual(r, 42) | |
80 | conn.close() | |
81 | ||
82 | @run_until_complete | |
83 | def test_utf8mb4(self): | |
84 | """This test requires MySQL >= 5.5""" | |
85 | charset = 'utf8mb4' | |
86 | conn = yield from self.connect(charset=charset) | |
87 | self.assertEqual(conn.charset, charset) | |
88 | conn.close() | |
89 | ||
90 | @run_until_complete | |
91 | def test_largedata(self): | |
92 | """Large query and response (>=16MB)""" | |
93 | cur = yield from self.connections[0].cursor() | |
94 | yield from cur.execute("SELECT @@max_allowed_packet") | |
95 | r = yield from cur.fetchone() | |
96 | if r[0] < 16 * 1024 * 1024 + 10: | |
97 | self.skipTest('Set max_allowed_packet to bigger than 17MB') | |
98 | else: | |
99 | t = 'a' * (16 * 1024 * 1024) | |
100 | yield from cur.execute("SELECT '" + t + "'") | |
101 | r = yield from cur.fetchone() | |
102 | self.assertEqual(r[0], t) | |
103 | ||
104 | @run_until_complete | |
105 | def test_escape_string(self): | |
106 | con = self.connections[0] | |
107 | cur = yield from con.cursor() | |
108 | ||
109 | self.assertEqual(con.escape("foo'bar"), "'foo\\'bar'") | |
110 | # literal is alias for escape | |
111 | self.assertEqual(con.literal("foo'bar"), "'foo\\'bar'") | |
112 | yield from cur.execute("SET sql_mode='NO_BACKSLASH_ESCAPES'") | |
113 | self.assertEqual(con.escape("foo'bar"), "'foo''bar'") | |
114 | ||
115 | @run_until_complete | |
116 | def test_sql_mode_param(self): | |
117 | con = yield from self.connect(sql_mode='NO_BACKSLASH_ESCAPES') | |
118 | self.assertEqual(con.escape("foo'bar"), "'foo''bar'") | |
119 | ||
120 | @run_until_complete | |
121 | def test_init_param(self): | |
122 | init_command = "SET sql_mode='NO_BACKSLASH_ESCAPES';" | |
123 | con = yield from self.connect(init_command=init_command) | |
124 | self.assertEqual(con.escape("foo'bar"), "'foo''bar'") | |
125 | ||
126 | @run_until_complete | |
127 | def test_autocommit(self): | |
128 | con = self.connections[0] | |
129 | self.assertFalse(con.get_autocommit()) | |
130 | ||
131 | cur = yield from con.cursor() | |
132 | yield from cur.execute("SET AUTOCOMMIT=1") | |
133 | self.assertTrue(con.get_autocommit()) | |
134 | ||
135 | yield from con.autocommit(False) | |
136 | self.assertFalse(con.get_autocommit()) | |
137 | yield from cur.execute("SELECT @@AUTOCOMMIT") | |
138 | r = yield from cur.fetchone() | |
139 | self.assertEqual(r[0], 0) | |
140 | ||
141 | @run_until_complete | |
142 | def test_select_db(self): | |
143 | con = self.connections[0] | |
144 | current_db = self.db | |
145 | other_db = self.other_db | |
146 | cur = yield from con.cursor() | |
147 | yield from cur.execute('SELECT database()') | |
148 | r = yield from cur.fetchone() | |
149 | self.assertEqual(r[0], current_db) | |
150 | ||
151 | yield from con.select_db(other_db) | |
152 | yield from cur.execute('SELECT database()') | |
153 | r = yield from cur.fetchone() | |
154 | self.assertEqual(r[0], other_db) | |
155 | ||
156 | @run_until_complete | |
157 | def test_connection_gone_away(self): | |
158 | # test | |
159 | # http://dev.mysql.com/doc/refman/5.0/en/gone-away.html | |
160 | # http://dev.mysql.com/doc/refman/5.0/en/error-messages-client.html | |
161 | # error_cr_server_gone_error | |
162 | conn = yield from self.connect() | |
163 | cur = yield from conn.cursor() | |
164 | yield from cur.execute("SET wait_timeout=1") | |
165 | yield from asyncio.sleep(2, loop=self.loop) | |
166 | with self.assertRaises(aiomysql.OperationalError) as cm: | |
167 | yield from cur.execute("SELECT 1+1") | |
168 | # error occures while reading, not writing because of socket buffer. | |
169 | # self.assertEqual(cm.exception.args[0], 2006) | |
170 | self.assertIn(cm.exception.args[0], (2006, 2013)) | |
171 | conn.close() | |
172 | ||
173 | @run_until_complete | |
174 | def test_connection_info_methods(self): | |
175 | conn = yield from self.connect() | |
176 | # trhead id is int | |
177 | self.assertIsInstance(conn.thread_id(), int) | |
178 | self.assertIn(conn.character_set_name(), ('latin1', 'utf8mb4')) | |
179 | self.assertTrue(str(conn.port) in conn.get_host_info()) | |
180 | self.assertIsInstance(conn.get_server_info(), str) | |
181 | # protocol id is int | |
182 | self.assertIsInstance(conn.get_proto_info(), int) | |
183 | conn.close() | |
184 | ||
185 | @run_until_complete | |
186 | def test_connection_set_charset(self): | |
187 | conn = yield from self.connect() | |
188 | self.assertIn(conn.character_set_name(), ('latin1', 'utf8mb4')) | |
189 | yield from conn.set_charset('utf8') | |
190 | self.assertEqual(conn.character_set_name(), 'utf8') | |
191 | ||
192 | @run_until_complete | |
193 | def test_connection_ping(self): | |
194 | conn = yield from self.connect() | |
195 | yield from conn.ping() | |
196 | self.assertEqual(conn.closed, False) | |
197 | conn.close() | |
198 | yield from conn.ping() | |
199 | self.assertEqual(conn.closed, False) | |
200 | ||
201 | @run_until_complete | |
202 | def test_connection_properties(self): | |
203 | conn = yield from self.connect() | |
204 | self.assertEqual(conn.host, self.host) | |
205 | self.assertEqual(conn.port, self.port) | |
206 | self.assertEqual(conn.user, self.user) | |
207 | self.assertEqual(conn.db, self.db) | |
208 | self.assertEqual(conn.echo, False) | |
209 | conn.close() | |
210 | ||
211 | @run_until_complete | |
212 | def test_connection_double_ensure_closed(self): | |
213 | conn = yield from self.connect() | |
214 | self.assertFalse(conn.closed) | |
215 | yield from conn.ensure_closed() | |
216 | self.assertTrue(conn.closed) | |
217 | yield from conn.ensure_closed() | |
218 | self.assertTrue(conn.closed) | |
219 | ||
220 | @unittest.skipIf(not PY_341, | |
221 | "Python 3.3 doesnt support __del__ calls from GC") | |
222 | @run_until_complete | |
223 | def test___del__(self): | |
224 | conn = yield from aiomysql.connect(loop=self.loop, host=self.host, | |
225 | port=self.port, db=self.db, | |
226 | user=self.user, | |
227 | password=self.password) | |
228 | with self.assertWarns(ResourceWarning): | |
229 | del conn | |
230 | gc.collect() | |
231 | ||
232 | @run_until_complete | |
233 | def test_no_delay_warning(self): | |
234 | with self.assertWarns(DeprecationWarning): | |
235 | conn = yield from self.connect(no_delay=True) | |
236 | conn.close() | |
237 | ||
238 | @run_until_complete | |
239 | def test_no_delay_default_arg(self): | |
240 | conn = yield from self.connect() | |
241 | self.assertTrue(conn._no_delay) | |
242 | conn.close() | |
243 | ||
244 | @run_until_complete | |
245 | def test_previous_cursor_not_closed(self): | |
246 | conn = yield from self.connect() | |
247 | cur1 = yield from conn.cursor() | |
248 | yield from cur1.execute("SELECT 1; SELECT 2") | |
249 | cur2 = yield from conn.cursor() | |
250 | yield from cur2.execute("SELECT 3;") | |
251 | resp = yield from cur2.fetchone() | |
252 | self.assertEqual(resp[0], 3) | |
253 | ||
254 | @run_until_complete | |
255 | def test_commit_during_multi_result(self): | |
256 | conn = yield from self.connect() | |
257 | cur = yield from conn.cursor() | |
258 | yield from cur.execute("SELECT 1; SELECT 2;") | |
259 | yield from conn.commit() | |
260 | yield from cur.execute("SELECT 3;") | |
261 | resp = yield from cur.fetchone() | |
262 | self.assertEqual(resp[0], 3) | |
7 | ||
8 | ||
9 | @pytest.fixture() | |
10 | def fill_my_cnf(mysql_params): | |
11 | tests_root = os.path.abspath(os.path.dirname(__file__)) | |
12 | ||
13 | if "unix_socket" in mysql_params: | |
14 | tmpl_path = "fixtures/my.cnf.unix.tmpl" | |
15 | else: | |
16 | tmpl_path = "fixtures/my.cnf.tcp.tmpl" | |
17 | ||
18 | path1 = os.path.join(tests_root, tmpl_path) | |
19 | path2 = os.path.join(tests_root, 'fixtures/my.cnf') | |
20 | with open(path1) as f1: | |
21 | tmpl = f1.read() | |
22 | with open(path2, 'w') as f2: | |
23 | f2.write(tmpl.format_map(mysql_params)) | |
24 | ||
25 | ||
26 | @pytest.mark.run_loop | |
27 | async def test_connect_timeout(connection_creator): | |
28 | # All exceptions are caught and raised as operational errors | |
29 | with pytest.raises(aiomysql.OperationalError): | |
30 | await connection_creator(connect_timeout=0.000000000001) | |
31 | ||
32 | ||
33 | @pytest.mark.run_loop | |
34 | async def test_config_file(fill_my_cnf, connection_creator, mysql_params): | |
35 | tests_root = os.path.abspath(os.path.dirname(__file__)) | |
36 | path = os.path.join(tests_root, 'fixtures/my.cnf') | |
37 | conn = await connection_creator(read_default_file=path) | |
38 | ||
39 | if "unix_socket" in mysql_params: | |
40 | assert conn.unix_socket == mysql_params["unix_socket"] | |
41 | else: | |
42 | assert conn.host == mysql_params['host'] | |
43 | assert conn.port == mysql_params['port'] | |
44 | assert conn.user, mysql_params['user'] | |
45 | ||
46 | # make sure connection is working | |
47 | cur = await conn.cursor() | |
48 | await cur.execute('SELECT 42;') | |
49 | (r, ) = await cur.fetchone() | |
50 | assert r == 42 | |
51 | conn.close() | |
52 | ||
53 | ||
54 | @pytest.mark.run_loop | |
55 | async def test_config_file_with_different_group(fill_my_cnf, | |
56 | connection_creator, | |
57 | mysql_params): | |
58 | # same test with config file but actual settings | |
59 | # located in not default group. | |
60 | tests_root = os.path.abspath(os.path.dirname(__file__)) | |
61 | path = os.path.join(tests_root, 'fixtures/my.cnf') | |
62 | group = 'client_with_unix_socket' | |
63 | conn = await connection_creator(read_default_file=path, | |
64 | read_default_group=group) | |
65 | ||
66 | assert conn.charset == 'utf8' | |
67 | assert conn.user == 'root' | |
68 | ||
69 | # make sure connection is working | |
70 | cur = await conn.cursor() | |
71 | await cur.execute('SELECT 42;') | |
72 | (r, ) = await cur.fetchone() | |
73 | assert r == 42 | |
74 | conn.close() | |
75 | ||
76 | ||
77 | @pytest.mark.run_loop | |
78 | async def test_utf8mb4(connection_creator): | |
79 | """This test requires MySQL >= 5.5""" | |
80 | charset = 'utf8mb4' | |
81 | conn = await connection_creator(charset=charset) | |
82 | assert conn.charset == charset | |
83 | conn.close() | |
84 | ||
85 | ||
86 | @pytest.mark.run_loop | |
87 | async def test_largedata(connection_creator): | |
88 | """Large query and response (>=16MB)""" | |
89 | conn = await connection_creator() | |
90 | cur = await conn.cursor() | |
91 | await cur.execute("SELECT @@max_allowed_packet") | |
92 | r = await cur.fetchone() | |
93 | if r[0] < 16 * 1024 * 1024 + 10: | |
94 | pytest.skip('Set max_allowed_packet to bigger than 17MB') | |
95 | else: | |
96 | t = 'a' * (16 * 1024 * 1024) | |
97 | await cur.execute("SELECT '" + t + "'") | |
98 | r = await cur.fetchone() | |
99 | assert r[0] == t | |
100 | ||
101 | ||
102 | @pytest.mark.run_loop | |
103 | async def test_escape_string(connection_creator): | |
104 | con = await connection_creator() | |
105 | cur = await con.cursor() | |
106 | ||
107 | assert con.escape("foo'bar") == "'foo\\'bar'" | |
108 | # literal is alias for escape | |
109 | assert con.literal("foo'bar") == "'foo\\'bar'" | |
110 | await cur.execute("SET sql_mode='NO_BACKSLASH_ESCAPES'") | |
111 | assert con.escape("foo'bar") == "'foo''bar'" | |
112 | ||
113 | ||
114 | @pytest.mark.run_loop | |
115 | async def test_sql_mode_param(connection_creator): | |
116 | con = await connection_creator(sql_mode='NO_BACKSLASH_ESCAPES') | |
117 | assert con.escape("foo'bar") == "'foo''bar'" | |
118 | ||
119 | ||
120 | @pytest.mark.run_loop | |
121 | async def test_init_param(connection_creator): | |
122 | init_command = "SET sql_mode='NO_BACKSLASH_ESCAPES';" | |
123 | con = await connection_creator(init_command=init_command) | |
124 | assert con.escape("foo'bar") == "'foo''bar'" | |
125 | ||
126 | ||
127 | @pytest.mark.run_loop | |
128 | async def test_autocommit(connection_creator): | |
129 | con = await connection_creator() | |
130 | assert con.get_autocommit() is False | |
131 | ||
132 | cur = await con.cursor() | |
133 | await cur.execute("SET AUTOCOMMIT=1") | |
134 | assert con.get_autocommit() is True | |
135 | ||
136 | await con.autocommit(False) | |
137 | assert con.get_autocommit() is False | |
138 | await cur.execute("SELECT @@AUTOCOMMIT") | |
139 | r = await cur.fetchone() | |
140 | assert r[0] == 0 | |
141 | ||
142 | ||
143 | @pytest.mark.run_loop | |
144 | async def test_select_db(connection_creator): | |
145 | con = await connection_creator() | |
146 | current_db = 'test_pymysql' | |
147 | other_db = 'test_pymysql2' | |
148 | cur = await con.cursor() | |
149 | await cur.execute('SELECT database()') | |
150 | r = await cur.fetchone() | |
151 | assert r[0] == current_db | |
152 | ||
153 | await con.select_db(other_db) | |
154 | await cur.execute('SELECT database()') | |
155 | r = await cur.fetchone() | |
156 | assert r[0] == other_db | |
157 | ||
158 | ||
159 | @pytest.mark.run_loop | |
160 | async def test_connection_gone_away(connection_creator): | |
161 | # test | |
162 | # http://dev.mysql.com/doc/refman/5.0/en/gone-away.html | |
163 | # http://dev.mysql.com/doc/refman/5.0/en/error-messages-client.html | |
164 | # error_cr_server_gone_error | |
165 | conn = await connection_creator() | |
166 | cur = await conn.cursor() | |
167 | await cur.execute("SET wait_timeout=1") | |
168 | await asyncio.sleep(2) | |
169 | with pytest.raises(aiomysql.OperationalError) as cm: | |
170 | await cur.execute("SELECT 1+1") | |
171 | # error occures while reading, not writing because of socket buffer. | |
172 | # assert cm.exception.args[0] == 2006 | |
173 | assert cm.value.args[0] in (2006, 2013) | |
174 | conn.close() | |
175 | ||
176 | ||
177 | @pytest.mark.run_loop | |
178 | async def test_connection_info_methods(connection_creator, mysql_params): | |
179 | conn = await connection_creator() | |
180 | # trhead id is int | |
181 | assert isinstance(conn.thread_id(), int) | |
182 | assert conn.character_set_name() in ('latin1', 'utf8mb4') | |
183 | if "unix_socket" in mysql_params: | |
184 | assert mysql_params["unix_socket"] in conn.get_host_info() | |
185 | else: | |
186 | assert str(conn.port) in conn.get_host_info() | |
187 | assert isinstance(conn.get_server_info(), str) | |
188 | # protocol id is int | |
189 | assert isinstance(conn.get_proto_info(), int) | |
190 | conn.close() | |
191 | ||
192 | ||
193 | @pytest.mark.run_loop | |
194 | async def test_connection_set_charset(connection_creator): | |
195 | conn = await connection_creator() | |
196 | assert conn.character_set_name(), ('latin1' in 'utf8mb4') | |
197 | await conn.set_charset('utf8') | |
198 | assert conn.character_set_name() == 'utf8' | |
199 | ||
200 | ||
201 | @pytest.mark.run_loop | |
202 | async def test_connection_ping(connection_creator): | |
203 | conn = await connection_creator() | |
204 | await conn.ping() | |
205 | assert conn.closed is False | |
206 | conn.close() | |
207 | await conn.ping() | |
208 | assert conn.closed is False | |
209 | ||
210 | ||
211 | @pytest.mark.run_loop | |
212 | async def test_connection_properties(connection_creator, mysql_params): | |
213 | conn = await connection_creator() | |
214 | if "unix_socket" in mysql_params: | |
215 | assert conn.unix_socket == mysql_params["unix_socket"] | |
216 | else: | |
217 | assert conn.host == mysql_params['host'] | |
218 | assert conn.port == mysql_params['port'] | |
219 | assert conn.user == mysql_params['user'] | |
220 | assert conn.db == mysql_params['db'] | |
221 | assert conn.echo is False | |
222 | conn.close() | |
223 | ||
224 | ||
225 | @pytest.mark.run_loop | |
226 | async def test_connection_double_ensure_closed(connection_creator): | |
227 | conn = await connection_creator() | |
228 | assert conn.closed is False | |
229 | await conn.ensure_closed() | |
230 | assert conn.closed is True | |
231 | await conn.ensure_closed() | |
232 | assert conn.closed is True | |
233 | ||
234 | ||
235 | @pytest.mark.run_loop | |
236 | @pytest.mark.usefixtures("disable_gc") | |
237 | async def test___del__(connection_creator): | |
238 | conn = await connection_creator() | |
239 | with pytest.warns(ResourceWarning): | |
240 | del conn | |
241 | gc.collect() | |
242 | ||
243 | ||
244 | @pytest.mark.run_loop | |
245 | async def test_previous_cursor_not_closed(connection_creator): | |
246 | conn = await connection_creator() | |
247 | cur1 = await conn.cursor() | |
248 | await cur1.execute("SELECT 1; SELECT 2") | |
249 | cur2 = await conn.cursor() | |
250 | await cur2.execute("SELECT 3;") | |
251 | resp = await cur2.fetchone() | |
252 | assert resp[0] == 3 | |
253 | ||
254 | ||
255 | @pytest.mark.run_loop | |
256 | async def test_commit_during_multi_result(connection_creator): | |
257 | conn = await connection_creator() | |
258 | cur = await conn.cursor() | |
259 | await cur.execute("SELECT 1; SELECT 2;") | |
260 | await conn.commit() | |
261 | await cur.execute("SELECT 3;") | |
262 | resp = await cur.fetchone() | |
263 | assert resp[0] == 3 |
0 | 0 | import asyncio |
1 | from tests import base | |
2 | from tests._testutils import run_until_complete | |
3 | ||
4 | from aiomysql import ProgrammingError, Cursor, InterfaceError | |
5 | ||
6 | ||
7 | class TestCursor(base.AIOPyMySQLTestCase): | |
8 | ||
9 | @asyncio.coroutine | |
10 | def _prepare(self, conn): | |
11 | cur = yield from conn.cursor() | |
12 | yield from cur.execute("DROP TABLE IF EXISTS tbl;") | |
13 | ||
14 | yield from cur.execute("""CREATE TABLE tbl ( | |
15 | id MEDIUMINT NOT NULL AUTO_INCREMENT, | |
16 | name VARCHAR(255) NOT NULL, | |
17 | PRIMARY KEY (id));""") | |
18 | ||
19 | for i in [(1, 'a'), (2, 'b'), (3, 'c')]: | |
20 | yield from cur.execute("INSERT INTO tbl VALUES(%s, %s)", i) | |
21 | yield from cur.execute("DROP TABLE IF EXISTS tbl2") | |
22 | yield from cur.execute("""CREATE TABLE tbl2 | |
23 | (id int, name varchar(255))""") | |
24 | yield from conn.commit() | |
25 | ||
26 | @asyncio.coroutine | |
27 | def _prepare_procedure(self, conn): | |
28 | cur = yield from conn.cursor() | |
29 | yield from cur.execute("DROP PROCEDURE IF EXISTS myinc;") | |
30 | yield from cur.execute("""CREATE PROCEDURE myinc(p1 INT) | |
31 | BEGIN | |
32 | SELECT p1 + 1; | |
33 | END | |
34 | """) | |
35 | yield from conn.commit() | |
36 | ||
37 | @run_until_complete | |
38 | def test_description(self): | |
39 | conn = self.connections[0] | |
40 | yield from self._prepare(conn) | |
41 | cur = yield from conn.cursor() | |
42 | self.assertEqual(None, cur.description) | |
43 | yield from cur.execute('SELECT * from tbl;') | |
44 | ||
45 | self.assertEqual(len(cur.description), 2, | |
46 | 'cursor.description describes too many columns') | |
47 | ||
48 | self.assertEqual(len(cur.description[0]), 7, | |
49 | 'cursor.description[x] tuples must have ' | |
50 | '7 elements') | |
51 | ||
52 | self.assertEqual(cur.description[0][0].lower(), 'id', | |
53 | 'cursor.description[x][0] must return column ' | |
54 | 'name') | |
55 | ||
56 | self.assertEqual(cur.description[1][0].lower(), 'name', | |
57 | 'cursor.description[x][0] must return column ' | |
58 | 'name') | |
59 | ||
60 | # Make sure self.description gets reset, cursor should be | |
61 | # set to None in case of none resulting queries like DDL | |
62 | yield from cur.execute('DROP TABLE IF EXISTS foobar;') | |
63 | self.assertEqual(None, cur.description) | |
64 | ||
65 | @run_until_complete | |
66 | def test_cursor_properties(self): | |
67 | conn = self.connections[0] | |
68 | cur = yield from conn.cursor() | |
69 | self.assertIs(cur.connection, conn) | |
70 | cur.setinputsizes() | |
71 | cur.setoutputsizes() | |
72 | self.assertEqual(cur.echo, conn.echo) | |
73 | ||
74 | @run_until_complete | |
75 | def test_scroll_relative(self): | |
76 | conn = self.connections[0] | |
77 | yield from self._prepare(conn) | |
78 | cur = yield from conn.cursor() | |
79 | yield from cur.execute('SELECT * FROM tbl;') | |
80 | yield from cur.scroll(1) | |
81 | ret = yield from cur.fetchone() | |
82 | self.assertEqual((2, 'b'), ret) | |
83 | ||
84 | @run_until_complete | |
85 | def test_scroll_absolute(self): | |
86 | conn = self.connections[0] | |
87 | yield from self._prepare(conn) | |
88 | cur = yield from conn.cursor() | |
89 | yield from cur.execute('SELECT * FROM tbl;') | |
90 | yield from cur.scroll(2, mode='absolute') | |
91 | ret = yield from cur.fetchone() | |
92 | self.assertEqual((3, 'c'), ret) | |
93 | ||
94 | @run_until_complete | |
95 | def test_scroll_errors(self): | |
96 | conn = self.connections[0] | |
97 | cur = yield from conn.cursor() | |
98 | ||
99 | with self.assertRaises(ProgrammingError): | |
100 | yield from cur.scroll(2, mode='absolute') | |
101 | ||
102 | cur = yield from conn.cursor() | |
103 | yield from cur.execute('SELECT * FROM tbl;') | |
104 | ||
105 | with self.assertRaises(ProgrammingError): | |
106 | yield from cur.scroll(2, mode='not_valid_mode') | |
107 | ||
108 | @run_until_complete | |
109 | def test_scroll_index_error(self): | |
110 | conn = self.connections[0] | |
111 | yield from self._prepare(conn) | |
112 | cur = yield from conn.cursor() | |
113 | yield from cur.execute('SELECT * FROM tbl;') | |
114 | with self.assertRaises(IndexError): | |
115 | yield from cur.scroll(1000) | |
116 | ||
117 | @run_until_complete | |
118 | def test_close(self): | |
119 | conn = self.connections[0] | |
120 | cur = yield from conn.cursor() | |
121 | yield from cur.close() | |
122 | self.assertTrue(cur.closed) | |
123 | with self.assertRaises(ProgrammingError): | |
124 | yield from cur.execute('SELECT 1') | |
125 | # try to close for second time | |
126 | yield from cur.close() | |
127 | ||
128 | @run_until_complete | |
129 | def test_arraysize(self): | |
130 | conn = self.connections[0] | |
131 | cur = yield from conn.cursor() | |
132 | self.assertEqual(1, cur.arraysize) | |
133 | cur.arraysize = 10 | |
134 | self.assertEqual(10, cur.arraysize) | |
135 | ||
136 | @run_until_complete | |
137 | def test_rows(self): | |
138 | conn = self.connections[0] | |
139 | yield from self._prepare(conn) | |
140 | ||
141 | cur = yield from conn.cursor() | |
142 | yield from cur.execute('SELECT * from tbl') | |
143 | self.assertEqual(3, cur.rowcount) | |
144 | self.assertEqual(0, cur.rownumber) | |
145 | yield from cur.fetchone() | |
146 | self.assertEqual(1, cur.rownumber) | |
147 | self.assertEqual(None, cur.lastrowid) | |
148 | yield from cur.execute('INSERT INTO tbl VALUES (%s, %s)', (4, 'd')) | |
149 | self.assertNotEqual(0, cur.lastrowid) | |
150 | yield from conn.commit() | |
151 | ||
152 | @run_until_complete | |
153 | def test_callproc(self): | |
154 | conn = yield from self.connect() | |
155 | yield from self._prepare_procedure(conn) | |
156 | cur = yield from conn.cursor() | |
157 | yield from cur.callproc('myinc', [1]) | |
158 | ret = yield from cur.fetchone() | |
159 | self.assertEqual((2,), ret) | |
160 | yield from cur.close() | |
161 | with self.assertRaises(ProgrammingError): | |
162 | yield from cur.callproc('myinc', [1]) | |
163 | conn.close() | |
164 | ||
165 | @run_until_complete | |
166 | def test_fetchone_no_result(self): | |
167 | # test a fetchone() with no rows | |
168 | conn = self.connections[0] | |
169 | c = yield from conn.cursor() | |
170 | yield from c.execute("create table test_nr (b varchar(32))") | |
171 | try: | |
172 | data = "pymysql" | |
173 | yield from c.execute("insert into test_nr (b) values (%s)", | |
174 | (data,)) | |
175 | r = yield from c.fetchone() | |
176 | self.assertEqual(None, r) | |
177 | finally: | |
178 | yield from c.execute("drop table test_nr") | |
179 | ||
180 | @run_until_complete | |
181 | def test_fetchmany_no_result(self): | |
182 | conn = self.connections[0] | |
183 | cur = yield from conn.cursor() | |
184 | yield from cur.execute('DROP TABLE IF EXISTS foobar;') | |
185 | r = yield from cur.fetchmany() | |
186 | self.assertEqual([], r) | |
187 | ||
188 | @run_until_complete | |
189 | def test_fetchall_no_result(self): | |
190 | # test a fetchone() with no rows | |
191 | conn = self.connections[0] | |
192 | cur = yield from conn.cursor() | |
193 | yield from cur.execute('DROP TABLE IF EXISTS foobar;') | |
194 | r = yield from cur.fetchall() | |
195 | self.assertEqual([], r) | |
196 | ||
197 | @run_until_complete | |
198 | def test_fetchall_with_scroll(self): | |
199 | conn = self.connections[0] | |
200 | yield from self._prepare(conn) | |
201 | cur = yield from conn.cursor() | |
202 | yield from cur.execute('SELECT * FROM tbl;') | |
203 | yield from cur.scroll(1) | |
204 | ret = yield from cur.fetchall() | |
205 | self.assertEqual(((2, 'b'), (3, 'c')), ret) | |
206 | ||
207 | @run_until_complete | |
208 | def test_aggregates(self): | |
209 | """ test aggregate functions """ | |
210 | conn = self.connections[0] | |
211 | c = yield from conn.cursor() | |
212 | try: | |
213 | yield from c.execute('create table test_aggregates (i integer)') | |
214 | for i in range(0, 10): | |
215 | yield from c.execute( | |
216 | 'insert into test_aggregates (i) values (%s)', (i,)) | |
217 | yield from c.execute('select sum(i) from test_aggregates') | |
218 | r, = yield from c.fetchone() | |
219 | self.assertEqual(sum(range(0, 10)), r) | |
220 | finally: | |
221 | yield from c.execute('drop table test_aggregates') | |
222 | ||
223 | @run_until_complete | |
224 | def test_single_tuple(self): | |
225 | """ test a single tuple """ | |
226 | conn = self.connections[0] | |
227 | c = yield from conn.cursor() | |
228 | try: | |
229 | yield from c.execute( | |
230 | "create table mystuff (id integer primary key)") | |
231 | yield from c.execute("insert into mystuff (id) values (1)") | |
232 | yield from c.execute("insert into mystuff (id) values (2)") | |
233 | yield from c.execute("select id from mystuff where id in %s", | |
234 | ((1,),)) | |
235 | r = yield from c.fetchall() | |
236 | self.assertEqual([(1,)], list(r)) | |
237 | finally: | |
238 | yield from c.execute("drop table mystuff") | |
239 | ||
240 | @run_until_complete | |
241 | def test_executemany(self): | |
242 | conn = self.connections[0] | |
243 | yield from self._prepare(conn) | |
244 | cur = yield from conn.cursor() | |
245 | self.assertEqual(None, cur.description) | |
246 | args = [1, 2, 3] | |
247 | row_count = yield from cur.executemany( | |
248 | 'SELECT * FROM tbl WHERE id = %s;', args) | |
249 | self.assertEqual(row_count, 3) | |
250 | r = yield from cur.fetchall() | |
251 | # TODO: if this right behaviour | |
252 | self.assertEqual(((3, 'c'),), r) | |
253 | ||
254 | # calling execute many without args | |
255 | row_count = yield from cur.executemany('SELECT 1;', ()) | |
256 | self.assertIsNone(row_count) | |
257 | ||
258 | @run_until_complete | |
259 | def test_custom_cursor(self): | |
260 | class MyCursor(Cursor): | |
261 | pass | |
262 | conn = self.connections[0] | |
263 | cur = yield from conn.cursor(MyCursor) | |
264 | self.assertIsInstance(cur, MyCursor) | |
265 | yield from cur.execute("SELECT 42;") | |
266 | (r, ) = yield from cur.fetchone() | |
267 | self.assertEqual(r, 42) | |
268 | ||
269 | @run_until_complete | |
270 | def test_custom_cursor_not_cursor_subclass(self): | |
271 | class MyCursor2: | |
272 | pass | |
273 | conn = self.connections[0] | |
274 | with self.assertRaises(TypeError): | |
275 | yield from conn.cursor(MyCursor2) | |
276 | ||
277 | @run_until_complete | |
278 | def test_morgify(self): | |
279 | conn = self.connections[0] | |
280 | cur = yield from conn.cursor() | |
281 | pairs = [(1, 'a'), (2, 'b'), (3, 'c')] | |
282 | sql = "INSERT INTO tbl VALUES(%s, %s)" | |
283 | results = [cur.mogrify(sql, p) for p in pairs] | |
284 | expected = ["INSERT INTO tbl VALUES(1, 'a')", | |
285 | "INSERT INTO tbl VALUES(2, 'b')", | |
286 | "INSERT INTO tbl VALUES(3, 'c')"] | |
287 | self.assertEqual(results, expected) | |
288 | ||
289 | @run_until_complete | |
290 | def test_execute_cancel(self): | |
291 | conn = self.connections[0] | |
292 | cur = yield from conn.cursor() | |
293 | # Cancel a cursor in the middle of execution, before it could | |
294 | # read even the first packet (SLEEP assures the timings) | |
295 | task = self.loop.create_task(cur.execute( | |
296 | "SELECT 1 as id, SLEEP(0.1) as xxx")) | |
297 | yield from asyncio.sleep(0.05, loop=self.loop) | |
298 | task.cancel() | |
299 | try: | |
300 | yield from task | |
301 | except asyncio.CancelledError: | |
302 | pass | |
303 | ||
304 | with self.assertRaises(InterfaceError): | |
305 | yield from conn.cursor() | |
1 | ||
2 | import pytest | |
3 | ||
4 | from aiomysql import ProgrammingError, Cursor, InterfaceError, OperationalError | |
5 | from aiomysql.cursors import RE_INSERT_VALUES | |
6 | ||
7 | ||
8 | async def _prepare(conn): | |
9 | cur = await conn.cursor() | |
10 | await cur.execute("DROP TABLE IF EXISTS tbl;") | |
11 | ||
12 | await cur.execute("""CREATE TABLE tbl ( | |
13 | id MEDIUMINT NOT NULL AUTO_INCREMENT, | |
14 | name VARCHAR(255) NOT NULL, | |
15 | PRIMARY KEY (id));""") | |
16 | ||
17 | for i in [(1, 'a'), (2, 'b'), (3, 'c')]: | |
18 | await cur.execute("INSERT INTO tbl VALUES(%s, %s)", i) | |
19 | await cur.execute("DROP TABLE IF EXISTS tbl2") | |
20 | await cur.execute("""CREATE TABLE tbl2 | |
21 | (id int, name varchar(255))""") | |
22 | await conn.commit() | |
23 | ||
24 | ||
25 | async def _prepare_procedure(conn): | |
26 | cur = await conn.cursor() | |
27 | await cur.execute("DROP PROCEDURE IF EXISTS myinc;") | |
28 | await cur.execute("""CREATE PROCEDURE myinc(p1 INT) | |
29 | BEGIN | |
30 | SELECT p1 + 1; | |
31 | END | |
32 | """) | |
33 | await conn.commit() | |
34 | ||
35 | ||
36 | @pytest.mark.run_loop | |
37 | async def test_description(connection_creator): | |
38 | conn = await connection_creator() | |
39 | await _prepare(conn) | |
40 | cur = await conn.cursor() | |
41 | assert cur.description is None | |
42 | await cur.execute('SELECT * from tbl;') | |
43 | ||
44 | assert len(cur.description) == 2, \ | |
45 | 'cursor.description describes too many columns' | |
46 | ||
47 | assert len(cur.description[0]) == 7, \ | |
48 | 'cursor.description[x] tuples must have 7 elements' | |
49 | ||
50 | assert cur.description[0][0].lower() == 'id', \ | |
51 | 'cursor.description[x][0] must return column name' | |
52 | ||
53 | assert cur.description[1][0].lower() == 'name', \ | |
54 | 'cursor.description[x][0] must return column name' | |
55 | ||
56 | # Make sure self.description gets reset, cursor should be | |
57 | # set to None in case of none resulting queries like DDL | |
58 | await cur.execute('DROP TABLE IF EXISTS foobar;') | |
59 | assert cur.description is None | |
60 | ||
61 | ||
62 | @pytest.mark.run_loop | |
63 | async def test_cursor_properties(connection_creator): | |
64 | conn = await connection_creator() | |
65 | cur = await conn.cursor() | |
66 | assert cur.connection is conn | |
67 | cur.setinputsizes() | |
68 | cur.setoutputsizes() | |
69 | assert cur.echo == conn.echo | |
70 | ||
71 | ||
72 | @pytest.mark.run_loop | |
73 | async def test_scroll_relative(connection_creator): | |
74 | conn = await connection_creator() | |
75 | await _prepare(conn) | |
76 | cur = await conn.cursor() | |
77 | await cur.execute('SELECT * FROM tbl;') | |
78 | await cur.scroll(1) | |
79 | ret = await cur.fetchone() | |
80 | assert (2, 'b') == ret | |
81 | ||
82 | ||
83 | @pytest.mark.run_loop | |
84 | async def test_scroll_absolute(connection_creator): | |
85 | conn = await connection_creator() | |
86 | await _prepare(conn) | |
87 | cur = await conn.cursor() | |
88 | await cur.execute('SELECT * FROM tbl;') | |
89 | await cur.scroll(2, mode='absolute') | |
90 | ret = await cur.fetchone() | |
91 | assert (3, 'c') == ret | |
92 | ||
93 | ||
94 | @pytest.mark.run_loop | |
95 | async def test_scroll_errors(connection_creator): | |
96 | conn = await connection_creator() | |
97 | await _prepare(conn) | |
98 | cur = await conn.cursor() | |
99 | ||
100 | with pytest.raises(ProgrammingError): | |
101 | await cur.scroll(2, mode='absolute') | |
102 | ||
103 | cur = await conn.cursor() | |
104 | await cur.execute('SELECT * FROM tbl;') | |
105 | ||
106 | with pytest.raises(ProgrammingError): | |
107 | await cur.scroll(2, mode='not_valid_mode') | |
108 | ||
109 | ||
110 | @pytest.mark.run_loop | |
111 | async def test_scroll_index_error(connection_creator): | |
112 | conn = await connection_creator() | |
113 | await _prepare(conn) | |
114 | cur = await conn.cursor() | |
115 | await cur.execute('SELECT * FROM tbl;') | |
116 | with pytest.raises(IndexError): | |
117 | await cur.scroll(1000) | |
118 | ||
119 | ||
120 | @pytest.mark.run_loop | |
121 | async def test_close(connection_creator): | |
122 | conn = await connection_creator() | |
123 | cur = await conn.cursor() | |
124 | await cur.close() | |
125 | assert cur.closed is True | |
126 | with pytest.raises(ProgrammingError): | |
127 | await cur.execute('SELECT 1') | |
128 | # try to close for second time | |
129 | await cur.close() | |
130 | ||
131 | ||
132 | @pytest.mark.run_loop | |
133 | async def test_arraysize(connection_creator): | |
134 | conn = await connection_creator() | |
135 | cur = await conn.cursor() | |
136 | assert 1 == cur.arraysize | |
137 | cur.arraysize = 10 | |
138 | assert 10 == cur.arraysize | |
139 | ||
140 | ||
141 | @pytest.mark.run_loop | |
142 | async def test_rows(connection_creator): | |
143 | conn = await connection_creator() | |
144 | await _prepare(conn) | |
145 | ||
146 | cur = await conn.cursor() | |
147 | await cur.execute('SELECT * from tbl') | |
148 | assert 3 == cur.rowcount | |
149 | assert 0 == cur.rownumber | |
150 | await cur.fetchone() | |
151 | assert 1 == cur.rownumber | |
152 | assert cur.lastrowid is None | |
153 | await cur.execute('INSERT INTO tbl VALUES (%s, %s)', (4, 'd')) | |
154 | assert 0 != cur.lastrowid | |
155 | await conn.commit() | |
156 | ||
157 | ||
158 | @pytest.mark.run_loop | |
159 | async def test_callproc(connection_creator): | |
160 | conn = await connection_creator() | |
161 | await _prepare_procedure(conn) | |
162 | cur = await conn.cursor() | |
163 | await cur.callproc('myinc', [1]) | |
164 | ret = await cur.fetchone() | |
165 | assert (2,) == ret | |
166 | await cur.close() | |
167 | with pytest.raises(ProgrammingError): | |
168 | await cur.callproc('myinc', [1]) | |
169 | conn.close() | |
170 | ||
171 | ||
172 | @pytest.mark.run_loop | |
173 | async def test_fetchone_no_result(connection_creator): | |
174 | # test a fetchone() with no rows | |
175 | conn = await connection_creator() | |
176 | c = await conn.cursor() | |
177 | await c.execute("create table test_nr (b varchar(32))") | |
178 | try: | |
179 | data = "pymysql" | |
180 | await c.execute("insert into test_nr (b) values (%s)", (data,)) | |
181 | r = await c.fetchone() | |
182 | assert r is None | |
183 | finally: | |
184 | await c.execute("drop table test_nr") | |
185 | ||
186 | ||
187 | @pytest.mark.run_loop | |
188 | async def test_fetchmany_no_result(connection_creator): | |
189 | conn = await connection_creator() | |
190 | cur = await conn.cursor() | |
191 | await cur.execute('DROP TABLE IF EXISTS foobar;') | |
192 | r = await cur.fetchmany() | |
193 | assert [] == r | |
194 | ||
195 | ||
196 | @pytest.mark.run_loop | |
197 | async def test_fetchall_no_result(connection_creator): | |
198 | # test a fetchone() with no rows | |
199 | conn = await connection_creator() | |
200 | cur = await conn.cursor() | |
201 | await cur.execute('DROP TABLE IF EXISTS foobar;') | |
202 | r = await cur.fetchall() | |
203 | assert [] == r | |
204 | ||
205 | ||
206 | @pytest.mark.run_loop | |
207 | async def test_fetchall_with_scroll(connection_creator): | |
208 | conn = await connection_creator() | |
209 | await _prepare(conn) | |
210 | cur = await conn.cursor() | |
211 | await cur.execute('SELECT * FROM tbl;') | |
212 | await cur.scroll(1) | |
213 | ret = await cur.fetchall() | |
214 | assert ((2, 'b'), (3, 'c')) == ret | |
215 | ||
216 | ||
217 | @pytest.mark.run_loop | |
218 | async def test_aggregates(connection_creator): | |
219 | """ test aggregate functions """ | |
220 | conn = await connection_creator() | |
221 | c = await conn.cursor() | |
222 | try: | |
223 | await c.execute('create table test_aggregates (i integer)') | |
224 | for i in range(0, 10): | |
225 | await c.execute( | |
226 | 'insert into test_aggregates (i) values (%s)', (i,)) | |
227 | await c.execute('select sum(i) from test_aggregates') | |
228 | r, = await c.fetchone() | |
229 | assert sum(range(0, 10)) == r | |
230 | finally: | |
231 | await c.execute('drop table test_aggregates') | |
232 | ||
233 | ||
234 | @pytest.mark.run_loop | |
235 | async def test_single_tuple(connection_creator): | |
236 | """ test a single tuple """ | |
237 | conn = await connection_creator() | |
238 | c = await conn.cursor() | |
239 | try: | |
240 | await c.execute( | |
241 | "create table mystuff (id integer primary key)") | |
242 | await c.execute("insert into mystuff (id) values (1)") | |
243 | await c.execute("insert into mystuff (id) values (2)") | |
244 | await c.execute("select id from mystuff where id in %s", ((1,),)) | |
245 | r = await c.fetchall() | |
246 | assert [(1,)] == list(r) | |
247 | finally: | |
248 | await c.execute("drop table mystuff") | |
249 | ||
250 | ||
251 | @pytest.mark.run_loop | |
252 | async def test_executemany(connection_creator): | |
253 | conn = await connection_creator() | |
254 | await _prepare(conn) | |
255 | cur = await conn.cursor() | |
256 | assert cur.description is None | |
257 | args = [1, 2, 3] | |
258 | row_count = await cur.executemany( | |
259 | 'SELECT * FROM tbl WHERE id = %s;', args) | |
260 | assert row_count == 3 | |
261 | r = await cur.fetchall() | |
262 | # TODO: if this right behaviour | |
263 | assert ((3, 'c'),) == r | |
264 | ||
265 | # calling execute many without args | |
266 | row_count = await cur.executemany('SELECT 1;', ()) | |
267 | assert row_count is None | |
268 | ||
269 | ||
270 | @pytest.mark.run_loop | |
271 | async def test_custom_cursor(connection_creator): | |
272 | class MyCursor(Cursor): | |
273 | pass | |
274 | conn = await connection_creator() | |
275 | cur = await conn.cursor(MyCursor) | |
276 | assert isinstance(cur, MyCursor) | |
277 | await cur.execute("SELECT 42;") | |
278 | (r, ) = await cur.fetchone() | |
279 | assert r == 42 | |
280 | ||
281 | ||
282 | @pytest.mark.run_loop | |
283 | async def test_custom_cursor_not_cursor_subclass(connection_creator): | |
284 | class MyCursor2: | |
285 | pass | |
286 | conn = await connection_creator() | |
287 | with pytest.raises(TypeError): | |
288 | await conn.cursor(MyCursor2) | |
289 | ||
290 | ||
291 | @pytest.mark.run_loop | |
292 | async def test_morgify(connection_creator): | |
293 | conn = await connection_creator() | |
294 | cur = await conn.cursor() | |
295 | pairs = [(1, 'a'), (2, 'b'), (3, 'c')] | |
296 | sql = "INSERT INTO tbl VALUES(%s, %s)" | |
297 | results = [cur.mogrify(sql, p) for p in pairs] | |
298 | expected = ["INSERT INTO tbl VALUES(1, 'a')", | |
299 | "INSERT INTO tbl VALUES(2, 'b')", | |
300 | "INSERT INTO tbl VALUES(3, 'c')"] | |
301 | assert results == expected | |
302 | ||
303 | ||
304 | @pytest.mark.run_loop | |
305 | async def test_execute_cancel(connection_creator): | |
306 | conn = await connection_creator() | |
307 | cur = await conn.cursor() | |
308 | # Cancel a cursor in the middle of execution, before it could | |
309 | # read even the first packet (SLEEP assures the timings) | |
310 | task = asyncio.ensure_future(cur.execute( | |
311 | "SELECT 1 as id, SLEEP(0.1) as xxx")) | |
312 | await asyncio.sleep(0.05) | |
313 | task.cancel() | |
314 | try: | |
315 | await task | |
316 | except asyncio.CancelledError: | |
317 | pass | |
318 | ||
319 | with pytest.raises(InterfaceError): | |
320 | await conn.cursor() | |
321 | ||
322 | ||
323 | @pytest.mark.run_loop | |
324 | async def test_execute_percentage(connection_creator): | |
325 | # %% in column set | |
326 | conn = await connection_creator() | |
327 | async with conn.cursor() as cur: | |
328 | await cur.execute("DROP TABLE IF EXISTS percent_test") | |
329 | await cur.execute("""\ | |
330 | CREATE TABLE percent_test ( | |
331 | `A%` INTEGER, | |
332 | `B%` INTEGER)""") | |
333 | ||
334 | q = "INSERT INTO percent_test (`A%%`, `B%%`) VALUES (%s, %s)" | |
335 | ||
336 | await cur.execute(q, (3, 4)) | |
337 | ||
338 | ||
339 | @pytest.mark.run_loop | |
340 | async def test_executemany_percentage(connection_creator): | |
341 | # %% in column set | |
342 | conn = await connection_creator() | |
343 | async with conn.cursor() as cur: | |
344 | await cur.execute("DROP TABLE IF EXISTS percent_test") | |
345 | await cur.execute("""\ | |
346 | CREATE TABLE percent_test ( | |
347 | `A%` INTEGER, | |
348 | `B%` INTEGER)""") | |
349 | ||
350 | q = "INSERT INTO percent_test (`A%%`, `B%%`) VALUES (%s, %s)" | |
351 | ||
352 | assert RE_INSERT_VALUES.match(q) is not None | |
353 | await cur.executemany(q, [(3, 4), (5, 6)]) | |
354 | assert cur._last_executed.endswith(b"(3, 4),(5, 6)"), \ | |
355 | "executemany with %% not in one query" | |
356 | ||
357 | ||
358 | @pytest.mark.run_loop | |
359 | async def test_max_execution_time(mysql_server, connection_creator): | |
360 | conn = await connection_creator() | |
361 | await _prepare(conn) | |
362 | async with conn.cursor() as cur: | |
363 | # MySQL MAX_EXECUTION_TIME takes ms | |
364 | # MariaDB max_statement_time takes seconds as int/float, introduced in 10.1 | |
365 | ||
366 | # this will sleep 0.01 seconds per row | |
367 | if mysql_server["db_type"] == "mysql": | |
368 | sql = """ | |
369 | SELECT /*+ MAX_EXECUTION_TIME(2000) */ | |
370 | name, sleep(0.01) FROM tbl | |
371 | """ | |
372 | else: | |
373 | sql = """ | |
374 | SET STATEMENT max_statement_time=2 FOR | |
375 | SELECT name, sleep(0.01) FROM tbl | |
376 | """ | |
377 | ||
378 | await cur.execute(sql) | |
379 | # unlike SSCursor, Cursor returns a tuple of tuples here | |
380 | assert (await cur.fetchall()) == ( | |
381 | ("a", 0), | |
382 | ("b", 0), | |
383 | ("c", 0), | |
384 | ) | |
385 | ||
386 | if mysql_server["db_type"] == "mysql": | |
387 | sql = """ | |
388 | SELECT /*+ MAX_EXECUTION_TIME(2000) */ | |
389 | name, sleep(0.01) FROM tbl | |
390 | """ | |
391 | else: | |
392 | sql = """ | |
393 | SET STATEMENT max_statement_time=2 FOR | |
394 | SELECT name, sleep(0.01) FROM tbl | |
395 | """ | |
396 | await cur.execute(sql) | |
397 | assert (await cur.fetchone()) == ("a", 0) | |
398 | ||
399 | # this discards the previous unfinished query | |
400 | await cur.execute("SELECT 1") | |
401 | assert (await cur.fetchone()) == (1,) | |
402 | ||
403 | if mysql_server["db_type"] == "mysql": | |
404 | sql = """ | |
405 | SELECT /*+ MAX_EXECUTION_TIME(1) */ | |
406 | name, sleep(1) FROM tbl | |
407 | """ | |
408 | else: | |
409 | sql = """ | |
410 | SET STATEMENT max_statement_time=0.001 FOR | |
411 | SELECT name, sleep(1) FROM tbl | |
412 | """ | |
413 | with pytest.raises(OperationalError) as cm: | |
414 | # in a buffered cursor this should reliably raise an | |
415 | # OperationalError | |
416 | await cur.execute(sql) | |
417 | ||
418 | if mysql_server["db_type"] == "mysql": | |
419 | # this constant was only introduced in MySQL 5.7, not sure | |
420 | # what was returned before, may have been ER_QUERY_INTERRUPTED | |
421 | ||
422 | # this constant is pending a new PyMySQL release | |
423 | # assert cm.value.args[0] == pymysql.constants.ER.QUERY_TIMEOUT | |
424 | assert cm.value.args[0] == 3024 | |
425 | else: | |
426 | # this constant is pending a new PyMySQL release | |
427 | # assert cm.value.args[0] == pymysql.constants.ER.STATEMENT_TIMEOUT | |
428 | assert cm.value.args[0] == 1969 | |
429 | ||
430 | # connection should still be fine at this point | |
431 | await cur.execute("SELECT 1") | |
432 | assert (await cur.fetchone()) == (1,) |
0 | 0 | import copy |
1 | import asyncio | |
2 | 1 | |
3 | 2 | import aiomysql.cursors |
4 | from tests import base | |
5 | from tests._testutils import run_until_complete | |
3 | ||
4 | import pytest | |
6 | 5 | |
7 | 6 | |
8 | class TestDeserializeCursor(base.AIOPyMySQLTestCase): | |
9 | bob = ("bob", 21, {"k1": "pretty", "k2": [18, 25]}) | |
10 | jim = ("jim", 56, {"k1": "rich", "k2": [20, 60]}) | |
11 | fred = ("fred", 100, {"k1": "longevity", "k2": [100, 160]}) | |
7 | BOB = ("bob", 21, {"k1": "pretty", "k2": [18, 25]}) | |
8 | JIM = ("jim", 56, {"k1": "rich", "k2": [20, 60]}) | |
9 | FRED = ("fred", 100, {"k1": "longevity", "k2": [100, 160]}) | |
10 | ||
11 | ||
12 | @pytest.fixture() | |
13 | async def prepare(connection): | |
14 | ||
12 | 15 | havejson = True |
13 | 16 | |
14 | cursor_type = aiomysql.cursors.DeserializationCursor | |
17 | c = await connection.cursor(aiomysql.cursors.DeserializationCursor) | |
15 | 18 | |
16 | def setUp(self): | |
17 | super(TestDeserializeCursor, self).setUp() | |
18 | self.conn = conn = self.connections[0] | |
19 | # create a table ane some data to query | |
20 | await c.execute("drop table if exists deserialize_cursor") | |
21 | await c.execute("select VERSION()") | |
22 | v = await c.fetchone() | |
23 | version, *db_type = v[0].split('-', 1) | |
24 | version = float(".".join(version.split('.', 2)[:2])) | |
25 | ismariadb = db_type and 'mariadb' in db_type[0].lower() | |
26 | if ismariadb or version < 5.7: | |
27 | await c.execute( | |
28 | """CREATE TABLE deserialize_cursor | |
29 | (name char(20), age int , claim text)""") | |
30 | havejson = False | |
31 | else: | |
32 | await c.execute( | |
33 | """CREATE TABLE deserialize_cursor | |
34 | (name char(20), age int , claim json)""") | |
35 | data = [("bob", 21, '{"k1": "pretty", "k2": [18, 25]}'), | |
36 | ("jim", 56, '{"k1": "rich", "k2": [20, 60]}'), | |
37 | ("fred", 100, '{"k1": "longevity", "k2": [100, 160]}')] | |
38 | await c.executemany("insert into deserialize_cursor values " | |
39 | "(%s,%s,%s)", data) | |
19 | 40 | |
20 | @asyncio.coroutine | |
21 | def prepare(): | |
22 | c = yield from conn.cursor(self.cursor_type) | |
41 | return havejson | |
23 | 42 | |
24 | # create a table ane some data to query | |
25 | yield from c.execute("drop table if exists deserialize_cursor") | |
26 | yield from c.execute("select VERSION()") | |
27 | v = yield from c.fetchone() | |
28 | version, *db_type = v[0].split('-', 1) | |
29 | version = float(".".join(version.split('.', 2)[:2])) | |
30 | ismariadb = db_type and 'mariadb' in db_type[0].lower() | |
31 | if ismariadb or version < 5.7: | |
32 | yield from c.execute( | |
33 | """CREATE TABLE deserialize_cursor | |
34 | (name char(20), age int , claim text)""") | |
35 | self.havejson = False | |
36 | else: | |
37 | yield from c.execute( | |
38 | """CREATE TABLE deserialize_cursor | |
39 | (name char(20), age int , claim json)""") | |
40 | data = [("bob", 21, '{"k1": "pretty", "k2": [18, 25]}'), | |
41 | ("jim", 56, '{"k1": "rich", "k2": [20, 60]}'), | |
42 | ("fred", 100, '{"k1": "longevity", "k2": [100, 160]}')] | |
43 | yield from c.executemany("insert into deserialize_cursor values " | |
44 | "(%s,%s,%s)", | |
45 | data) | |
46 | 43 | |
47 | self.loop.run_until_complete(prepare()) | |
44 | @pytest.mark.run_loop | |
45 | async def test_deserialize_cursor(prepare, connection): | |
46 | havejson = await prepare | |
47 | if not havejson: | |
48 | return | |
49 | bob, jim, fred = copy.deepcopy(BOB), copy.deepcopy( | |
50 | JIM), copy.deepcopy(FRED) | |
51 | # all assert test compare to the structure as would come | |
52 | # out from MySQLdb | |
53 | conn = connection | |
54 | c = await conn.cursor(aiomysql.cursors.DeserializationCursor) | |
48 | 55 | |
49 | def tearDown(self): | |
50 | @asyncio.coroutine | |
51 | def shutdown(): | |
52 | c = yield from self.conn.cursor() | |
53 | yield from c.execute("drop table deserialize_cursor;") | |
56 | # pull back the single row dict for bob and check | |
57 | await c.execute("SELECT * from deserialize_cursor " | |
58 | "where name='bob'") | |
59 | r = await c.fetchone() | |
60 | assert bob == r, "fetchone via DeserializeCursor failed" | |
61 | # same again, but via fetchall => tuple) | |
62 | await c.execute("SELECT * from deserialize_cursor " | |
63 | "where name='bob'") | |
64 | r = await c.fetchall() | |
65 | assert [bob] == r, \ | |
66 | "fetch a 1 row result via fetchall failed via DeserializeCursor" | |
67 | # get all 3 row via fetchall | |
68 | await c.execute("SELECT * from deserialize_cursor") | |
69 | r = await c.fetchall() | |
70 | assert [bob, jim, fred] == r, "fetchall failed via DictCursor" | |
54 | 71 | |
55 | self.loop.run_until_complete(shutdown()) | |
56 | super(TestDeserializeCursor, self).tearDown() | |
72 | # get all 2 row via fetchmany | |
73 | await c.execute("SELECT * from deserialize_cursor") | |
74 | r = await c.fetchmany(2) | |
75 | assert [bob, jim] == r, "fetchmany failed via DictCursor" | |
76 | await c.execute('commit') | |
57 | 77 | |
58 | @run_until_complete | |
59 | def test_deserialize_cursor(self): | |
60 | if not self.havejson: | |
61 | return | |
62 | bob, jim, fred = copy.deepcopy(self.bob), copy.deepcopy( | |
63 | self.jim), copy.deepcopy(self.fred) | |
64 | # all assert test compare to the structure as would come | |
65 | # out from MySQLdb | |
66 | conn = self.conn | |
67 | c = yield from conn.cursor(self.cursor_type) | |
68 | 78 | |
69 | # pull back the single row dict for bob and check | |
70 | yield from c.execute("SELECT * from deserialize_cursor " | |
71 | "where name='bob'") | |
72 | r = yield from c.fetchone() | |
73 | self.assertEqual(bob, r, "fetchone via DeserializeCursor failed") | |
74 | # same again, but via fetchall => tuple) | |
75 | yield from c.execute("SELECT * from deserialize_cursor " | |
76 | "where name='bob'") | |
77 | r = yield from c.fetchall() | |
78 | self.assertEqual([bob], r, | |
79 | "fetch a 1 row result via fetchall failed via " | |
80 | "DeserializeCursor") | |
81 | # get all 3 row via fetchall | |
82 | yield from c.execute("SELECT * from deserialize_cursor") | |
83 | r = yield from c.fetchall() | |
84 | self.assertEqual([bob, jim, fred], r, | |
85 | "fetchall failed via DictCursor") | |
79 | @pytest.mark.run_loop | |
80 | async def test_deserialize_cursor_low_version(prepare, connection): | |
81 | havejson = await prepare | |
82 | if havejson: | |
83 | return | |
84 | bob = ("bob", 21, '{"k1": "pretty", "k2": [18, 25]}') | |
85 | jim = ("jim", 56, '{"k1": "rich", "k2": [20, 60]}') | |
86 | fred = ("fred", 100, '{"k1": "longevity", "k2": [100, 160]}') | |
87 | # all assert test compare to the structure as would come | |
88 | # out from MySQLdb | |
89 | conn = connection | |
90 | c = await conn.cursor(aiomysql.cursors.DeserializationCursor) | |
86 | 91 | |
87 | # get all 2 row via fetchmany | |
88 | yield from c.execute("SELECT * from deserialize_cursor") | |
89 | r = yield from c.fetchmany(2) | |
90 | self.assertEqual([bob, jim], r, "fetchmany failed via DictCursor") | |
91 | yield from c.execute('commit') | |
92 | # pull back the single row dict for bob and check | |
93 | await c.execute("SELECT * from deserialize_cursor where name='bob'") | |
94 | r = await c.fetchone() | |
95 | assert bob == r, "fetchone via DeserializeCursor failed" | |
96 | # same again, but via fetchall => tuple) | |
97 | await c.execute("SELECT * from deserialize_cursor " | |
98 | "where name='bob'") | |
99 | r = await c.fetchall() | |
100 | assert [bob] == r, \ | |
101 | "fetch a 1 row result via fetchall failed via DeserializeCursor" | |
102 | # get all 3 row via fetchall | |
103 | await c.execute("SELECT * from deserialize_cursor") | |
104 | r = await c.fetchall() | |
105 | assert [bob, jim, fred] == r, "fetchall failed via DictCursor" | |
92 | 106 | |
93 | @run_until_complete | |
94 | def test_deserialize_cursor_low_version(self): | |
95 | if self.havejson: | |
96 | return | |
97 | bob = ("bob", 21, '{"k1": "pretty", "k2": [18, 25]}') | |
98 | jim = ("jim", 56, '{"k1": "rich", "k2": [20, 60]}') | |
99 | fred = ("fred", 100, '{"k1": "longevity", "k2": [100, 160]}') | |
100 | # all assert test compare to the structure as would come | |
101 | # out from MySQLdb | |
102 | conn = self.conn | |
103 | c = yield from conn.cursor(self.cursor_type) | |
107 | # get all 2 row via fetchmany | |
108 | await c.execute("SELECT * from deserialize_cursor") | |
109 | r = await c.fetchmany(2) | |
110 | assert [bob, jim] == r, "fetchmany failed via DictCursor" | |
111 | await c.execute('commit') | |
104 | 112 | |
105 | # pull back the single row dict for bob and check | |
106 | yield from c.execute("SELECT * from deserialize_cursor " | |
107 | "where name='bob'") | |
108 | r = yield from c.fetchone() | |
109 | self.assertEqual(bob, r, "fetchone via DeserializeCursor failed") | |
110 | # same again, but via fetchall => tuple) | |
111 | yield from c.execute("SELECT * from deserialize_cursor " | |
112 | "where name='bob'") | |
113 | r = yield from c.fetchall() | |
114 | self.assertEqual([bob], r, | |
115 | "fetch a 1 row result via fetchall failed via " | |
116 | "DeserializeCursor") | |
117 | # get all 3 row via fetchall | |
118 | yield from c.execute("SELECT * from deserialize_cursor") | |
119 | r = yield from c.fetchall() | |
120 | self.assertEqual([bob, jim, fred], r, | |
121 | "fetchall failed via DictCursor") | |
122 | 113 | |
123 | # get all 2 row via fetchmany | |
124 | yield from c.execute("SELECT * from deserialize_cursor") | |
125 | r = yield from c.fetchmany(2) | |
126 | self.assertEqual([bob, jim], r, "fetchmany failed via DictCursor") | |
127 | yield from c.execute('commit') | |
114 | @pytest.mark.run_loop | |
115 | async def test_deserializedictcursor(prepare, connection): | |
116 | havejson = await prepare | |
117 | if not havejson: | |
118 | return | |
119 | bob = {'name': 'bob', 'age': 21, | |
120 | 'claim': {"k1": "pretty", "k2": [18, 25]}} | |
121 | conn = connection | |
122 | c = await conn.cursor(aiomysql.cursors.DeserializationCursor, | |
123 | aiomysql.cursors.DictCursor) | |
124 | await c.execute("SELECT * from deserialize_cursor " | |
125 | "where name='bob'") | |
126 | r = await c.fetchall() | |
127 | assert [bob] == r, \ | |
128 | "fetch a 1 row result via fetchall failed via DeserializationCursor" | |
128 | 129 | |
129 | @run_until_complete | |
130 | def test_deserializedictcursor(self): | |
131 | if not self.havejson: | |
132 | return | |
133 | bob = {'name': 'bob', 'age': 21, | |
134 | 'claim': {"k1": "pretty", "k2": [18, 25]}} | |
135 | conn = self.conn | |
136 | c = yield from conn.cursor(aiomysql.cursors.DeserializationCursor, | |
137 | aiomysql.cursors.DictCursor) | |
138 | yield from c.execute("SELECT * from deserialize_cursor " | |
139 | "where name='bob'") | |
140 | r = yield from c.fetchall() | |
141 | self.assertEqual([bob], r, | |
142 | "fetch a 1 row result via fetchall failed via " | |
143 | "DeserializationCursor") | |
144 | 130 | |
145 | @run_until_complete | |
146 | def test_ssdeserializecursor(self): | |
147 | if not self.havejson: | |
148 | return | |
149 | conn = self.conn | |
150 | c = yield from conn.cursor(aiomysql.cursors.SSCursor, | |
151 | aiomysql.cursors.DeserializationCursor) | |
152 | yield from c.execute("SELECT * from deserialize_cursor " | |
153 | "where name='bob'") | |
154 | r = yield from c.fetchall() | |
155 | self.assertEqual([self.bob], r, | |
156 | "fetch a 1 row result via fetchall failed via " | |
157 | "DeserializationCursor") | |
131 | @pytest.mark.run_loop | |
132 | async def test_ssdeserializecursor(prepare, connection): | |
133 | havejson = await prepare | |
134 | if not havejson: | |
135 | return | |
136 | conn = connection | |
137 | c = await conn.cursor(aiomysql.cursors.SSCursor, | |
138 | aiomysql.cursors.DeserializationCursor) | |
139 | await c.execute("SELECT * from deserialize_cursor " | |
140 | "where name='bob'") | |
141 | r = await c.fetchall() | |
142 | assert [BOB] == r, \ | |
143 | "fetch a 1 row result via fetchall failed via DeserializationCursor" | |
158 | 144 | |
159 | @run_until_complete | |
160 | def test_ssdeserializedictcursor(self): | |
161 | if not self.havejson: | |
162 | return | |
163 | bob = {'name': 'bob', 'age': 21, | |
164 | 'claim': {"k1": "pretty", "k2": [18, 25]}} | |
165 | conn = self.conn | |
166 | c = yield from conn.cursor(aiomysql.cursors.SSCursor, | |
167 | aiomysql.cursors.DeserializationCursor, | |
168 | aiomysql.cursors.DictCursor) | |
169 | yield from c.execute("SELECT * from deserialize_cursor " | |
170 | "where name='bob'") | |
171 | r = yield from c.fetchall() | |
172 | self.assertEqual([bob], r, | |
173 | "fetch a 1 row result via fetchall failed via " | |
174 | "DeserializationCursor") | |
145 | ||
146 | @pytest.mark.run_loop | |
147 | async def test_ssdeserializedictcursor(prepare, connection): | |
148 | havejson = await prepare | |
149 | if not havejson: | |
150 | return | |
151 | bob = {'name': 'bob', 'age': 21, | |
152 | 'claim': {"k1": "pretty", "k2": [18, 25]}} | |
153 | conn = connection | |
154 | c = await conn.cursor(aiomysql.cursors.SSCursor, | |
155 | aiomysql.cursors.DeserializationCursor, | |
156 | aiomysql.cursors.DictCursor) | |
157 | await c.execute("SELECT * from deserialize_cursor " | |
158 | "where name='bob'") | |
159 | r = await c.fetchall() | |
160 | assert [bob] == r, \ | |
161 | "fetch a 1 row result via fetchall failed via DeserializationCursor" |
0 | import asyncio | |
1 | 0 | import datetime |
2 | 1 | |
2 | import pytest | |
3 | ||
3 | 4 | import aiomysql.cursors |
4 | from tests import base | |
5 | from tests._testutils import run_until_complete | |
6 | 5 | |
7 | 6 | |
8 | class TestDictCursor(base.AIOPyMySQLTestCase): | |
9 | bob = {'name': 'bob', 'age': 21, | |
10 | 'DOB': datetime.datetime(1990, 2, 6, 23, 4, 56)} | |
11 | jim = {'name': 'jim', 'age': 56, | |
12 | 'DOB': datetime.datetime(1955, 5, 9, 13, 12, 45)} | |
13 | fred = {'name': 'fred', 'age': 100, | |
14 | 'DOB': datetime.datetime(1911, 9, 12, 1, 1, 1)} | |
7 | BOB = {'name': 'bob', 'age': 21, | |
8 | 'DOB': datetime.datetime(1990, 2, 6, 23, 4, 56)} | |
9 | JIM = {'name': 'jim', 'age': 56, | |
10 | 'DOB': datetime.datetime(1955, 5, 9, 13, 12, 45)} | |
11 | FRED = {'name': 'fred', 'age': 100, | |
12 | 'DOB': datetime.datetime(1911, 9, 12, 1, 1, 1)} | |
15 | 13 | |
16 | cursor_type = aiomysql.cursors.DictCursor | |
14 | CURSOR_TYPE = aiomysql.cursors.DictCursor | |
17 | 15 | |
18 | def setUp(self): | |
19 | super(TestDictCursor, self).setUp() | |
20 | self.conn = conn = self.connections[0] | |
21 | 16 | |
22 | @asyncio.coroutine | |
23 | def prepare(): | |
24 | c = yield from conn.cursor(self.cursor_type) | |
17 | async def prepare(conn): | |
18 | c = await conn.cursor(CURSOR_TYPE) | |
25 | 19 | |
26 | # create a table ane some data to query | |
27 | yield from c.execute("drop table if exists dictcursor") | |
28 | yield from c.execute( | |
29 | """CREATE TABLE dictcursor (name char(20), age int , | |
30 | DOB datetime)""") | |
31 | data = [("bob", 21, "1990-02-06 23:04:56"), | |
32 | ("jim", 56, "1955-05-09 13:12:45"), | |
33 | ("fred", 100, "1911-09-12 01:01:01")] | |
34 | yield from c.executemany("insert into dictcursor values " | |
35 | "(%s,%s,%s)", | |
36 | data) | |
20 | # create a table ane some data to query | |
21 | await c.execute("drop table if exists dictcursor") | |
22 | await c.execute( | |
23 | """CREATE TABLE dictcursor (name char(20), age int , | |
24 | DOB datetime)""") | |
25 | data = [("bob", 21, "1990-02-06 23:04:56"), | |
26 | ("jim", 56, "1955-05-09 13:12:45"), | |
27 | ("fred", 100, "1911-09-12 01:01:01")] | |
28 | await c.executemany("insert into dictcursor values " | |
29 | "(%s,%s,%s)", data) | |
37 | 30 | |
38 | self.loop.run_until_complete(prepare()) | |
39 | 31 | |
40 | def tearDown(self): | |
41 | @asyncio.coroutine | |
42 | def shutdown(): | |
43 | c = yield from self.conn.cursor() | |
44 | yield from c.execute("drop table dictcursor;") | |
32 | @pytest.mark.run_loop | |
33 | async def test_dictcursor(connection): | |
34 | conn = connection | |
35 | await prepare(connection) | |
45 | 36 | |
46 | self.loop.run_until_complete(shutdown()) | |
47 | super(TestDictCursor, self).tearDown() | |
37 | bob, jim, fred = BOB.copy(), JIM.copy(), FRED.copy() | |
38 | # all assert test compare to the structure as would come | |
39 | # out from MySQLdb | |
40 | c = await conn.cursor(CURSOR_TYPE) | |
48 | 41 | |
49 | @run_until_complete | |
50 | def test_dictcursor(self): | |
51 | bob, jim, fred = self.bob.copy(), self.jim.copy(), self.fred.copy() | |
52 | # all assert test compare to the structure as would come | |
53 | # out from MySQLdb | |
54 | conn = self.conn | |
55 | c = yield from conn.cursor(self.cursor_type) | |
42 | # try an update which should return no rows | |
43 | await c.execute("update dictcursor set age=20 where name='bob'") | |
44 | bob['age'] = 20 | |
45 | # pull back the single row dict for bob and check | |
46 | await c.execute("SELECT * from dictcursor where name='bob'") | |
47 | r = await c.fetchone() | |
48 | assert bob == r, "fetchone via DictCursor failed" | |
49 | # same again, but via fetchall => tuple) | |
50 | await c.execute("SELECT * from dictcursor where name='bob'") | |
51 | r = await c.fetchall() | |
52 | assert [bob] == r, \ | |
53 | "fetch a 1 row result via fetchall failed via DictCursor" | |
56 | 54 | |
57 | # try an update which should return no rows | |
58 | yield from c.execute("update dictcursor set age=20 where name='bob'") | |
59 | bob['age'] = 20 | |
60 | # pull back the single row dict for bob and check | |
61 | yield from c.execute("SELECT * from dictcursor where name='bob'") | |
62 | r = yield from c.fetchone() | |
63 | self.assertEqual(bob, r, "fetchone via DictCursor failed") | |
64 | # same again, but via fetchall => tuple) | |
65 | yield from c.execute("SELECT * from dictcursor where name='bob'") | |
66 | r = yield from c.fetchall() | |
67 | self.assertEqual([bob], r, | |
68 | "fetch a 1 row result via fetchall failed via " | |
69 | "DictCursor") | |
55 | # get all 3 row via fetchall | |
56 | await c.execute("SELECT * from dictcursor") | |
57 | r = await c.fetchall() | |
58 | assert [bob, jim, fred] == r, "fetchall failed via DictCursor" | |
70 | 59 | |
71 | # get all 3 row via fetchall | |
72 | yield from c.execute("SELECT * from dictcursor") | |
73 | r = yield from c.fetchall() | |
74 | self.assertEqual([bob, jim, fred], r, "fetchall failed via DictCursor") | |
60 | # get all 2 row via fetchmany | |
61 | await c.execute("SELECT * from dictcursor") | |
62 | r = await c.fetchmany(2) | |
63 | assert [bob, jim] == r, "fetchmany failed via DictCursor" | |
64 | await c.execute('commit') | |
75 | 65 | |
76 | # get all 2 row via fetchmany | |
77 | yield from c.execute("SELECT * from dictcursor") | |
78 | r = yield from c.fetchmany(2) | |
79 | self.assertEqual([bob, jim], r, "fetchmany failed via DictCursor") | |
80 | yield from c.execute('commit') | |
81 | 66 | |
82 | @run_until_complete | |
83 | def test_custom_dict(self): | |
84 | class MyDict(dict): | |
85 | pass | |
67 | @pytest.mark.run_loop | |
68 | async def test_custom_dict(connection): | |
69 | conn = connection | |
70 | await prepare(connection) | |
86 | 71 | |
87 | class MyDictCursor(self.cursor_type): | |
88 | dict_type = MyDict | |
72 | class MyDict(dict): | |
73 | pass | |
89 | 74 | |
90 | keys = ['name', 'age', 'DOB'] | |
91 | bob = MyDict([(k, self.bob[k]) for k in keys]) | |
92 | jim = MyDict([(k, self.jim[k]) for k in keys]) | |
93 | fred = MyDict([(k, self.fred[k]) for k in keys]) | |
75 | class MyDictCursor(CURSOR_TYPE): | |
76 | dict_type = MyDict | |
94 | 77 | |
95 | cur = yield from self.conn.cursor(MyDictCursor) | |
96 | yield from cur.execute("SELECT * FROM dictcursor WHERE name='bob'") | |
97 | r = yield from cur.fetchone() | |
98 | self.assertEqual(bob, r, "fetchone() returns MyDictCursor") | |
78 | keys = ['name', 'age', 'DOB'] | |
79 | bob = MyDict([(k, BOB[k]) for k in keys]) | |
80 | jim = MyDict([(k, JIM[k]) for k in keys]) | |
81 | fred = MyDict([(k, FRED[k]) for k in keys]) | |
99 | 82 | |
100 | yield from cur.execute("SELECT * FROM dictcursor") | |
101 | r = yield from cur.fetchall() | |
102 | self.assertEqual([bob, jim, fred], r, | |
103 | "fetchall failed via MyDictCursor") | |
83 | cur = await conn.cursor(MyDictCursor) | |
84 | await cur.execute("SELECT * FROM dictcursor WHERE name='bob'") | |
85 | r = await cur.fetchone() | |
86 | assert bob == r, "fetchone() returns MyDictCursor" | |
104 | 87 | |
105 | yield from cur.execute("SELECT * FROM dictcursor") | |
106 | r = yield from cur.fetchmany(2) | |
107 | self.assertEqual([bob, jim], r, | |
108 | "list failed via MyDictCursor") | |
88 | await cur.execute("SELECT * FROM dictcursor") | |
89 | r = await cur.fetchall() | |
90 | assert [bob, jim, fred] == r, "fetchall failed via MyDictCursor" | |
109 | 91 | |
110 | @run_until_complete | |
111 | def test_ssdictcursor(self): | |
112 | conn = self.conn | |
113 | c = yield from conn.cursor(aiomysql.cursors.SSDictCursor) | |
114 | yield from c.execute("SELECT * from dictcursor where name='bob'") | |
115 | r = yield from c.fetchall() | |
116 | self.assertEqual([self.bob], r, | |
117 | "fetch a 1 row result via fetchall failed via " | |
118 | "DictCursor") | |
92 | await cur.execute("SELECT * FROM dictcursor") | |
93 | r = await cur.fetchmany(2) | |
94 | assert [bob, jim] == r, "list failed via MyDictCursor" | |
95 | ||
96 | ||
97 | @pytest.mark.run_loop | |
98 | async def test_ssdictcursor(connection): | |
99 | conn = connection | |
100 | await prepare(connection) | |
101 | ||
102 | c = await conn.cursor(aiomysql.cursors.SSDictCursor) | |
103 | await c.execute("SELECT * from dictcursor where name='bob'") | |
104 | r = await c.fetchall() | |
105 | assert [BOB] == r,\ | |
106 | "fetch a 1 row result via fetchall failed via DictCursor" |
0 | 0 | import datetime |
1 | import unittest | |
2 | import aiomysql | |
3 | 1 | |
4 | 2 | import pytest |
5 | 3 | from pymysql.err import Warning |
6 | from tests import base | |
7 | from tests._testutils import run_until_complete | |
8 | ||
9 | ||
10 | class TestOldIssues(base.AIOPyMySQLTestCase): | |
11 | @run_until_complete | |
12 | def test_issue_3(self): | |
13 | """ undefined methods datetime_or_None, date_or_None """ | |
14 | conn = self.connections[0] | |
15 | c = yield from conn.cursor() | |
16 | yield from c.execute("drop table if exists issue3") | |
17 | yield from c.execute( | |
18 | "create table issue3 (d date, t time, dt datetime, ts timestamp)") | |
4 | ||
5 | import aiomysql | |
6 | ||
7 | ||
8 | @pytest.mark.run_loop | |
9 | async def test_issue_3(connection): | |
10 | """ undefined methods datetime_or_None, date_or_None """ | |
11 | conn = connection | |
12 | c = await conn.cursor() | |
13 | await c.execute("drop table if exists issue3") | |
14 | await c.execute( | |
15 | "create table issue3 (d date, t time, dt datetime, ts timestamp)") | |
16 | try: | |
17 | await c.execute( | |
18 | "insert into issue3 (d, t, dt, ts) values (%s,%s,%s,%s)", | |
19 | (None, None, None, None)) | |
20 | await c.execute("select d from issue3") | |
21 | r = await c.fetchone() | |
22 | assert r[0] is None | |
23 | await c.execute("select t from issue3") | |
24 | r = await c.fetchone() | |
25 | assert r[0] is None | |
26 | await c.execute("select dt from issue3") | |
27 | r = await c.fetchone() | |
28 | assert r[0] is None | |
29 | await c.execute("select ts from issue3") | |
30 | r = await c.fetchone() | |
31 | assert type(r[0]) in (type(None), datetime.datetime) | |
32 | finally: | |
33 | await c.execute("drop table issue3") | |
34 | ||
35 | ||
36 | @pytest.mark.run_loop | |
37 | async def test_issue_4(connection): | |
38 | """ can't retrieve TIMESTAMP fields """ | |
39 | conn = connection | |
40 | c = await conn.cursor() | |
41 | await c.execute("drop table if exists issue4") | |
42 | await c.execute("create table issue4 (ts timestamp)") | |
43 | try: | |
44 | await c.execute("insert into issue4 (ts) values (now())") | |
45 | await c.execute("select ts from issue4") | |
46 | r = await c.fetchone() | |
47 | assert isinstance(r[0], datetime.datetime) | |
48 | finally: | |
49 | await c.execute("drop table issue4") | |
50 | ||
51 | ||
52 | @pytest.mark.run_loop | |
53 | async def test_issue_5(connection): | |
54 | """ query on information_schema.tables fails """ | |
55 | conn = connection | |
56 | cur = await conn.cursor() | |
57 | await cur.execute("select * from information_schema.tables") | |
58 | ||
59 | ||
60 | @pytest.mark.run_loop | |
61 | async def test_issue_6(connection_creator): | |
62 | # test for exception: TypeError: ord() expected a character, | |
63 | # but string of length 0 found | |
64 | conn = await connection_creator(db='mysql') | |
65 | c = await conn.cursor() | |
66 | assert conn.db == 'mysql' | |
67 | await c.execute("select * from user") | |
68 | await conn.ensure_closed() | |
69 | ||
70 | ||
71 | @pytest.mark.run_loop | |
72 | async def test_issue_8(connection): | |
73 | """ Primary Key and Index error when selecting data """ | |
74 | conn = connection | |
75 | c = await conn.cursor() | |
76 | await c.execute("drop table if exists test") | |
77 | await c.execute("""CREATE TABLE `test` ( | |
78 | `station` int(10) NOT NULL DEFAULT '0', | |
79 | `dh` datetime NOT NULL DEFAULT '2020-04-25 22:39:12', | |
80 | `echeance` int(1) NOT NULL DEFAULT '0', `me` double DEFAULT NULL, | |
81 | `mo` double DEFAULT NULL, PRIMARY | |
82 | KEY (`station`,`dh`,`echeance`)) ENGINE=MyISAM DEFAULT | |
83 | CHARSET=latin1;""") | |
84 | try: | |
85 | await c.execute("SELECT * FROM test") | |
86 | assert 0 == c.rowcount | |
87 | await c.execute( | |
88 | "ALTER TABLE `test` ADD INDEX `idx_station` (`station`)") | |
89 | await c.execute("SELECT * FROM test") | |
90 | assert 0 == c.rowcount | |
91 | finally: | |
92 | await c.execute("drop table test") | |
93 | ||
94 | ||
95 | @pytest.mark.run_loop | |
96 | async def test_issue_13(connection): | |
97 | """ can't handle large result fields """ | |
98 | conn = connection | |
99 | cur = await conn.cursor() | |
100 | await cur.execute("drop table if exists issue13") | |
101 | try: | |
102 | await cur.execute("create table issue13 (t text)") | |
103 | # ticket says 18k | |
104 | size = 18 * 1024 | |
105 | await cur.execute("insert into issue13 (t) values (%s)", | |
106 | ("x" * size,)) | |
107 | await cur.execute("select t from issue13") | |
108 | # use assertTrue so that obscenely huge error messages don't print | |
109 | r = await cur.fetchone() | |
110 | assert "x" * size == r[0] | |
111 | finally: | |
112 | await cur.execute("drop table issue13") | |
113 | ||
114 | ||
115 | @pytest.mark.run_loop | |
116 | async def test_issue_15(connection): | |
117 | """ query should be expanded before perform character encoding """ | |
118 | conn = connection | |
119 | c = await conn.cursor() | |
120 | await c.execute("drop table if exists issue15") | |
121 | await c.execute("create table issue15 (t varchar(32))") | |
122 | try: | |
123 | await c.execute("insert into issue15 (t) values (%s)", | |
124 | (u'\xe4\xf6\xfc',)) | |
125 | await c.execute("select t from issue15") | |
126 | r = await c.fetchone() | |
127 | assert u'\xe4\xf6\xfc' == r[0] | |
128 | finally: | |
129 | await c.execute("drop table issue15") | |
130 | ||
131 | ||
132 | @pytest.mark.run_loop | |
133 | async def test_issue_16(connection): | |
134 | """ Patch for string and tuple escaping """ | |
135 | conn = connection | |
136 | c = await conn.cursor() | |
137 | await c.execute("drop table if exists issue16") | |
138 | await c.execute("create table issue16 (name varchar(32) " | |
139 | "primary key, email varchar(32))") | |
140 | try: | |
141 | await c.execute("insert into issue16 (name, email) values " | |
142 | "('pete', 'floydophone')") | |
143 | await c.execute("select email from issue16 where name=%s", | |
144 | ("pete",)) | |
145 | r = await c.fetchone() | |
146 | assert "floydophone" == r[0] | |
147 | finally: | |
148 | await c.execute("drop table issue16") | |
149 | ||
150 | ||
151 | @pytest.mark.skip( | |
152 | "test_issue_17() requires a custom, legacy MySQL configuration and " | |
153 | "will not be run.") | |
154 | @pytest.mark.run_loop | |
155 | async def test_issue_17(connection, connection_creator, mysql_params): | |
156 | """ could not connect mysql use passwod """ | |
157 | conn = connection | |
158 | c = await conn.cursor() | |
159 | db = mysql_params['db'] | |
160 | # grant access to a table to a user with a password | |
161 | try: | |
162 | await c.execute("drop table if exists issue17") | |
163 | await c.execute( | |
164 | "create table issue17 (x varchar(32) primary key)") | |
165 | await c.execute( | |
166 | "insert into issue17 (x) values ('hello, world!')") | |
167 | await c.execute("grant all privileges on %s.issue17 to " | |
168 | "'issue17user'@'%%' identified by '1234'" | |
169 | % db) | |
170 | await conn.commit() | |
171 | ||
172 | conn2 = await connection_creator(user="issue17user", | |
173 | passwd="1234") | |
174 | c2 = await conn2.cursor() | |
175 | await c2.execute("select x from issue17") | |
176 | r = await c2.fetchone() | |
177 | assert "hello == world!", r[0] | |
178 | finally: | |
179 | await c.execute("drop table issue17") | |
180 | ||
181 | ||
182 | @pytest.mark.run_loop | |
183 | async def test_issue_34(connection_creator): | |
184 | try: | |
185 | await connection_creator(host="localhost", port=1237, | |
186 | user="root", unix_socket=None) | |
187 | pytest.fail() | |
188 | except aiomysql.OperationalError as e: | |
189 | assert 2003 == e.args[0] | |
190 | except Exception: | |
191 | pytest.fail() | |
192 | ||
193 | ||
194 | @pytest.mark.run_loop | |
195 | async def test_issue_33(connection_creator): | |
196 | conn = await connection_creator(charset='utf8') | |
197 | c = await conn.cursor() | |
198 | try: | |
199 | await c.execute( | |
200 | b"drop table if exists hei\xc3\x9fe".decode("utf8")) | |
201 | await c.execute( | |
202 | b"create table hei\xc3\x9fe (name varchar(32))".decode("utf8")) | |
203 | await c.execute(b"insert into hei\xc3\x9fe (name) " | |
204 | b"values ('Pi\xc3\xb1ata')". | |
205 | decode("utf8")) | |
206 | await c.execute( | |
207 | b"select name from hei\xc3\x9fe".decode("utf8")) | |
208 | r = await c.fetchone() | |
209 | assert b"Pi\xc3\xb1ata".decode("utf8") == r[0] | |
210 | finally: | |
211 | await c.execute(b"drop table hei\xc3\x9fe".decode("utf8")) | |
212 | ||
213 | ||
214 | @pytest.mark.skip("This test requires manual intervention") | |
215 | @pytest.mark.run_loop | |
216 | async def test_issue_35(connection): | |
217 | conn = connection | |
218 | c = await conn.cursor() | |
219 | print("sudo killall -9 mysqld within the next 10 seconds") | |
220 | try: | |
221 | await c.execute("select sleep(10)") | |
222 | pytest.fail() | |
223 | except aiomysql.OperationalError as e: | |
224 | assert 2013 == e.args[0] | |
225 | ||
226 | ||
227 | @pytest.mark.run_loop | |
228 | async def test_issue_36(connection_creator): | |
229 | conn = await connection_creator() | |
230 | c = await conn.cursor() | |
231 | # kill connections[0] | |
232 | await c.execute("show processlist") | |
233 | kill_id = None | |
234 | rows = await c.fetchall() | |
235 | for row in rows: | |
236 | id = row[0] | |
237 | info = row[7] | |
238 | if info == "show processlist": | |
239 | kill_id = id | |
240 | break | |
241 | try: | |
242 | # now nuke the connection | |
243 | await conn.kill(kill_id) | |
244 | # make sure this connection has broken | |
245 | await c.execute("show tables") | |
246 | pytest.fail() | |
247 | except Exception: | |
248 | pass | |
249 | ||
250 | # check the process list from the other connection | |
251 | conn2 = await connection_creator() | |
252 | c = await conn2.cursor() | |
253 | await c.execute("show processlist") | |
254 | rows = await c.fetchall() | |
255 | ids = [row[0] for row in rows] | |
256 | ||
257 | try: | |
258 | assert kill_id not in ids | |
259 | except AssertionError: | |
260 | # FIXME: figure out why this is failing | |
261 | pytest.xfail("https://github.com/aio-libs/aiomysql/issues/714") | |
262 | ||
263 | ||
264 | @pytest.mark.run_loop | |
265 | async def test_issue_37(connection): | |
266 | conn = connection | |
267 | c = await conn.cursor() | |
268 | assert 1 == (await c.execute("SELECT @foo")) | |
269 | ||
270 | r = await c.fetchone() | |
271 | assert (None,) == r | |
272 | assert 0 == (await c.execute("SET @foo = 'bar'")) | |
273 | await c.execute("set @foo = 'bar'") | |
274 | ||
275 | ||
276 | @pytest.mark.run_loop | |
277 | async def test_issue_38(connection): | |
278 | conn = connection | |
279 | c = await conn.cursor() | |
280 | # reduced size for most default mysql installs | |
281 | datum = "a" * 1024 * 1023 | |
282 | ||
283 | try: | |
284 | await c.execute("drop table if exists issue38") | |
285 | await c.execute( | |
286 | "create table issue38 (id integer, data mediumblob)") | |
287 | await c.execute("insert into issue38 values (1, %s)", | |
288 | (datum,)) | |
289 | finally: | |
290 | await c.execute("drop table issue38") | |
291 | ||
292 | ||
293 | @pytest.mark.run_loop | |
294 | async def disabled_test_issue_54(connection): | |
295 | conn = connection | |
296 | c = await conn.cursor() | |
297 | await c.execute("drop table if exists issue54") | |
298 | big_sql = "select * from issue54 where " | |
299 | big_sql += " and ".join("%d=%d" % (i, i) for i in range(0, 100000)) | |
300 | ||
301 | try: | |
302 | await c.execute( | |
303 | "create table issue54 (id integer primary key)") | |
304 | await c.execute("insert into issue54 (id) values (7)") | |
305 | await c.execute(big_sql) | |
306 | ||
307 | r = await c.fetchone() | |
308 | assert 7 == r[0] | |
309 | finally: | |
310 | await c.execute("drop table issue54") | |
311 | ||
312 | ||
313 | @pytest.mark.run_loop | |
314 | async def test_issue_66(connection): | |
315 | """ 'Connection' object has no attribute 'insert_id' """ | |
316 | conn = connection | |
317 | c = await conn.cursor() | |
318 | assert 0 == conn.insert_id() | |
319 | try: | |
320 | await c.execute("drop table if exists issue66") | |
321 | await c.execute("create table issue66 (id integer primary " | |
322 | "key auto_increment, x integer)") | |
323 | await c.execute("insert into issue66 (x) values (1)") | |
324 | await c.execute("insert into issue66 (x) values (1)") | |
325 | assert 2 == conn.insert_id() | |
326 | finally: | |
327 | await c.execute("drop table issue66") | |
328 | ||
329 | ||
330 | @pytest.mark.run_loop | |
331 | async def test_issue_79(connection): | |
332 | """ Duplicate field overwrites the previous one in the result | |
333 | of DictCursor """ | |
334 | conn = connection | |
335 | c = await conn.cursor(aiomysql.cursors.DictCursor) | |
336 | ||
337 | await c.execute("drop table if exists a") | |
338 | await c.execute("drop table if exists b") | |
339 | await c.execute("""CREATE TABLE a (id int, value int)""") | |
340 | await c.execute("""CREATE TABLE b (id int, value int)""") | |
341 | ||
342 | a = (1, 11) | |
343 | b = (1, 22) | |
344 | try: | |
345 | await c.execute("insert into a values (%s, %s)", a) | |
346 | await c.execute("insert into b values (%s, %s)", b) | |
347 | ||
348 | await c.execute("SELECT * FROM a inner join b on a.id = b.id") | |
349 | r, *_ = await c.fetchall() | |
350 | assert r['id'] == 1 | |
351 | assert r['value'] == 11 | |
352 | assert r['b.value'] == 22 | |
353 | finally: | |
354 | await c.execute("drop table a") | |
355 | await c.execute("drop table b") | |
356 | ||
357 | ||
358 | @pytest.mark.run_loop | |
359 | async def test_issue_95(connection): | |
360 | """ Leftover trailing OK packet for "CALL my_sp" queries """ | |
361 | conn = connection | |
362 | cur = await conn.cursor() | |
363 | await cur.execute("DROP PROCEDURE IF EXISTS `foo`") | |
364 | await cur.execute("""CREATE PROCEDURE `foo` () | |
365 | BEGIN | |
366 | SELECT 1; | |
367 | END""") | |
368 | try: | |
369 | await cur.execute("""CALL foo()""") | |
370 | await cur.execute("""SELECT 1""") | |
371 | r = await cur.fetchone() | |
372 | assert r[0] == 1 | |
373 | finally: | |
374 | await cur.execute("DROP PROCEDURE IF EXISTS `foo`") | |
375 | ||
376 | ||
377 | @pytest.mark.run_loop | |
378 | async def test_issue_114(connection_creator): | |
379 | """ autocommit is not set after reconnecting with ping() """ | |
380 | conn = await connection_creator(charset="utf8") | |
381 | await conn.autocommit(False) | |
382 | c = await conn.cursor() | |
383 | await c.execute("""select @@autocommit;""") | |
384 | r = await c.fetchone() | |
385 | assert not r[0] | |
386 | await conn.ensure_closed() | |
387 | await conn.ping() | |
388 | await c.execute("""select @@autocommit;""") | |
389 | r = await c.fetchone() | |
390 | assert not r[0] | |
391 | await conn.ensure_closed() | |
392 | ||
393 | # Ensure autocommit() is still working | |
394 | conn = await connection_creator(charset="utf8") | |
395 | c = await conn.cursor() | |
396 | await c.execute("""select @@autocommit;""") | |
397 | r = await c.fetchone() | |
398 | assert not r[0] | |
399 | await conn.ensure_closed() | |
400 | await conn.ping() | |
401 | await conn.autocommit(True) | |
402 | await c.execute("""select @@autocommit;""") | |
403 | r = await c.fetchone() | |
404 | assert r[0] | |
405 | await conn.ensure_closed() | |
406 | ||
407 | ||
408 | @pytest.mark.run_loop | |
409 | async def test_issue_175(connection): | |
410 | """ The number of fields returned by server is read in wrong way """ | |
411 | conn = connection | |
412 | cur = await conn.cursor() | |
413 | for length in (200, 300): | |
414 | cols = ', '.join('c{0} integer'.format(i) for i in range(length)) | |
415 | sql = 'create table test_field_count ({0})'.format(cols) | |
19 | 416 | try: |
20 | yield from c.execute( | |
21 | "insert into issue3 (d, t, dt, ts) values (%s,%s,%s,%s)", | |
22 | (None, None, None, None)) | |
23 | yield from c.execute("select d from issue3") | |
24 | r = yield from c.fetchone() | |
25 | self.assertEqual(None, r[0]) | |
26 | yield from c.execute("select t from issue3") | |
27 | r = yield from c.fetchone() | |
28 | self.assertEqual(None, r[0]) | |
29 | yield from c.execute("select dt from issue3") | |
30 | r = yield from c.fetchone() | |
31 | self.assertEqual(None, r[0]) | |
32 | yield from c.execute("select ts from issue3") | |
33 | r = yield from c.fetchone() | |
34 | self.assertTrue(isinstance(r[0], datetime.datetime)) | |
417 | await cur.execute(sql) | |
418 | await cur.execute('select * from test_field_count') | |
419 | assert len(cur.description) == length | |
35 | 420 | finally: |
36 | yield from c.execute("drop table issue3") | |
37 | ||
38 | @run_until_complete | |
39 | def test_issue_4(self): | |
40 | """ can't retrieve TIMESTAMP fields """ | |
41 | conn = self.connections[0] | |
42 | c = yield from conn.cursor() | |
43 | yield from c.execute("drop table if exists issue4") | |
44 | yield from c.execute("create table issue4 (ts timestamp)") | |
45 | try: | |
46 | yield from c.execute("insert into issue4 (ts) values (now())") | |
47 | yield from c.execute("select ts from issue4") | |
48 | r = yield from c.fetchone() | |
49 | self.assertTrue(isinstance(r[0], datetime.datetime)) | |
50 | finally: | |
51 | yield from c.execute("drop table issue4") | |
52 | ||
53 | @run_until_complete | |
54 | def test_issue_5(self): | |
55 | """ query on information_schema.tables fails """ | |
56 | con = self.connections[0] | |
57 | cur = yield from con.cursor() | |
58 | yield from cur.execute("select * from information_schema.tables") | |
59 | ||
60 | @run_until_complete | |
61 | def test_issue_6(self): | |
62 | # test for exception: TypeError: ord() expected a character, | |
63 | # but string of length 0 found | |
64 | conn = yield from self.connect(db='mysql') | |
65 | c = yield from conn.cursor() | |
66 | self.assertEqual(conn.db, 'mysql') | |
67 | yield from c.execute("select * from user") | |
68 | yield from conn.ensure_closed() | |
69 | ||
70 | @run_until_complete | |
71 | def test_issue_8(self): | |
72 | """ Primary Key and Index error when selecting data """ | |
73 | conn = self.connections[0] | |
74 | c = yield from conn.cursor() | |
75 | yield from c.execute("drop table if exists test") | |
76 | yield from c.execute("""CREATE TABLE `test` (`station` int(10) NOT | |
77 | NULL DEFAULT '0', `dh` | |
78 | datetime NOT NULL DEFAULT '0000-00-00 00:00:00', | |
79 | `echeance` int(1) NOT NULL DEFAULT '0', `me` double DEFAULT NULL, | |
80 | `mo` double DEFAULT NULL, PRIMARY | |
81 | KEY (`station`,`dh`,`echeance`)) ENGINE=MyISAM DEFAULT | |
82 | CHARSET=latin1;""") | |
83 | try: | |
84 | yield from c.execute("SELECT * FROM test") | |
85 | self.assertEqual(0, c.rowcount) | |
86 | yield from c.execute( | |
87 | "ALTER TABLE `test` ADD INDEX `idx_station` (`station`)") | |
88 | yield from c.execute("SELECT * FROM test") | |
89 | self.assertEqual(0, c.rowcount) | |
90 | finally: | |
91 | yield from c.execute("drop table test") | |
92 | ||
93 | @run_until_complete | |
94 | def test_issue_13(self): | |
95 | """ can't handle large result fields """ | |
96 | conn = self.connections[0] | |
97 | cur = yield from conn.cursor() | |
98 | yield from cur.execute("drop table if exists issue13") | |
99 | try: | |
100 | yield from cur.execute("create table issue13 (t text)") | |
101 | # ticket says 18k | |
102 | size = 18 * 1024 | |
103 | yield from cur.execute("insert into issue13 (t) values (%s)", | |
104 | ("x" * size,)) | |
105 | yield from cur.execute("select t from issue13") | |
106 | # use assertTrue so that obscenely huge error messages don't print | |
107 | r = yield from cur.fetchone() | |
108 | self.assertTrue("x" * size == r[0]) | |
109 | finally: | |
110 | yield from cur.execute("drop table issue13") | |
111 | ||
112 | @run_until_complete | |
113 | def test_issue_15(self): | |
114 | """ query should be expanded before perform character encoding """ | |
115 | conn = self.connections[0] | |
116 | c = yield from conn.cursor() | |
117 | yield from c.execute("drop table if exists issue15") | |
118 | yield from c.execute("create table issue15 (t varchar(32))") | |
119 | try: | |
120 | yield from c.execute("insert into issue15 (t) values (%s)", | |
121 | (u'\xe4\xf6\xfc',)) | |
122 | yield from c.execute("select t from issue15") | |
123 | r = yield from c.fetchone() | |
124 | self.assertEqual(u'\xe4\xf6\xfc', r[0]) | |
125 | finally: | |
126 | yield from c.execute("drop table issue15") | |
127 | ||
128 | @run_until_complete | |
129 | def test_issue_16(self): | |
130 | """ Patch for string and tuple escaping """ | |
131 | conn = self.connections[0] | |
132 | c = yield from conn.cursor() | |
133 | yield from c.execute("drop table if exists issue16") | |
134 | yield from c.execute("create table issue16 (name varchar(32) " | |
135 | "primary key, email varchar(32))") | |
136 | try: | |
137 | yield from c.execute("insert into issue16 (name, email) values " | |
138 | "('pete', 'floydophone')") | |
139 | yield from c.execute("select email from issue16 where name=%s", | |
140 | ("pete",)) | |
141 | r = yield from c.fetchone() | |
142 | self.assertEqual("floydophone", r[0]) | |
143 | finally: | |
144 | yield from c.execute("drop table issue16") | |
145 | ||
146 | @unittest.skip( | |
147 | "test_issue_17() requires a custom, legacy MySQL configuration and " | |
148 | "will not be run.") | |
149 | @run_until_complete | |
150 | def test_issue_17(self): | |
151 | """ could not connect mysql use passwod """ | |
152 | conn = self.connections[0] | |
153 | host = self.host | |
154 | db = self.db | |
155 | c = yield from conn.cursor() | |
156 | # grant access to a table to a user with a password | |
157 | try: | |
158 | yield from c.execute("drop table if exists issue17") | |
159 | yield from c.execute( | |
160 | "create table issue17 (x varchar(32) primary key)") | |
161 | yield from c.execute( | |
162 | "insert into issue17 (x) values ('hello, world!')") | |
163 | yield from c.execute("grant all privileges on %s.issue17 to " | |
164 | "'issue17user'@'%%' identified by '1234'" | |
165 | % db) | |
166 | yield from conn.commit() | |
167 | ||
168 | conn2 = yield from aiomysql.connect(host=host, user="issue17user", | |
169 | passwd="1234", db=db, | |
170 | loop=self.loop) | |
171 | c2 = yield from conn2.cursor() | |
172 | yield from c2.execute("select x from issue17") | |
173 | r = yield from c2.fetchone() | |
174 | self.assertEqual("hello, world!", r[0]) | |
175 | finally: | |
176 | yield from c.execute("drop table issue17") | |
177 | ||
178 | ||
179 | class TestNewIssues(base.AIOPyMySQLTestCase): | |
180 | @run_until_complete | |
181 | def test_issue_34(self): | |
182 | try: | |
183 | yield from aiomysql.connect(host="localhost", port=1237, | |
184 | user="root", loop=self.loop) | |
185 | self.fail() | |
186 | except aiomysql.OperationalError as e: | |
187 | self.assertEqual(2003, e.args[0]) | |
188 | except Exception: | |
189 | self.fail() | |
190 | ||
191 | @run_until_complete | |
192 | def test_issue_33(self): | |
193 | conn = yield from self.connect(charset='utf8') | |
194 | c = yield from conn.cursor() | |
195 | try: | |
196 | yield from c.execute( | |
197 | b"drop table if exists hei\xc3\x9fe".decode("utf8")) | |
198 | yield from c.execute( | |
199 | b"create table hei\xc3\x9fe (name varchar(32))".decode("utf8")) | |
200 | yield from c.execute(b"insert into hei\xc3\x9fe (name) " | |
201 | b"values ('Pi\xc3\xb1ata')". | |
202 | decode("utf8")) | |
203 | yield from c.execute( | |
204 | b"select name from hei\xc3\x9fe".decode("utf8")) | |
205 | r = yield from c.fetchone() | |
206 | self.assertEqual(b"Pi\xc3\xb1ata".decode("utf8"), r[0]) | |
207 | finally: | |
208 | yield from c.execute(b"drop table hei\xc3\x9fe".decode("utf8")) | |
209 | ||
210 | @unittest.skip("This test requires manual intervention") | |
211 | @run_until_complete | |
212 | def test_issue_35(self): | |
213 | conn = self.connections[0] | |
214 | c = yield from conn.cursor() | |
215 | print("sudo killall -9 mysqld within the next 10 seconds") | |
216 | try: | |
217 | yield from c.execute("select sleep(10)") | |
218 | self.fail() | |
219 | except aiomysql.OperationalError as e: | |
220 | self.assertEqual(2013, e.args[0]) | |
221 | ||
222 | @run_until_complete | |
223 | def test_issue_36(self): | |
224 | conn = self.connections[0] | |
225 | c = yield from conn.cursor() | |
226 | # kill connections[0] | |
227 | yield from c.execute("show processlist") | |
228 | kill_id = None | |
229 | rows = yield from c.fetchall() | |
230 | for row in rows: | |
231 | id = row[0] | |
232 | info = row[7] | |
233 | if info == "show processlist": | |
234 | kill_id = id | |
235 | break | |
236 | try: | |
237 | # now nuke the connection | |
238 | yield from conn.kill(kill_id) | |
239 | # make sure this connection has broken | |
240 | yield from c.execute("show tables") | |
241 | self.fail() | |
242 | except Exception: | |
243 | pass | |
244 | # check the process list from the other connection | |
245 | try: | |
246 | c = yield from self.connections[1].cursor() | |
247 | yield from c.execute("show processlist") | |
248 | rows = yield from c.fetchall() | |
249 | ids = [row[0] for row in rows] | |
250 | ||
251 | self.assertFalse(kill_id in ids) | |
252 | finally: | |
253 | del self.connections[0] | |
254 | ||
255 | @run_until_complete | |
256 | def test_issue_37(self): | |
257 | conn = self.connections[0] | |
258 | c = yield from conn.cursor() | |
259 | self.assertEqual(1, (yield from c.execute("SELECT @foo"))) | |
260 | ||
261 | r = yield from c.fetchone() | |
262 | self.assertEqual((None,), r) | |
263 | self.assertEqual(0, (yield from c.execute("SET @foo = 'bar'"))) | |
264 | yield from c.execute("set @foo = 'bar'") | |
265 | ||
266 | @run_until_complete | |
267 | def test_issue_38(self): | |
268 | conn = self.connections[0] | |
269 | c = yield from conn.cursor() | |
270 | # reduced size for most default mysql installs | |
271 | datum = "a" * 1024 * 1023 | |
272 | ||
273 | try: | |
274 | yield from c.execute("drop table if exists issue38") | |
275 | yield from c.execute( | |
276 | "create table issue38 (id integer, data mediumblob)") | |
277 | yield from c.execute("insert into issue38 values (1, %s)", | |
278 | (datum,)) | |
279 | finally: | |
280 | yield from c.execute("drop table issue38") | |
281 | ||
282 | @run_until_complete | |
283 | def disabled_test_issue_54(self): | |
284 | conn = self.connections[0] | |
285 | c = yield from conn.cursor() | |
286 | yield from c.execute("drop table if exists issue54") | |
287 | big_sql = "select * from issue54 where " | |
288 | big_sql += " and ".join("%d=%d" % (i, i) for i in range(0, 100000)) | |
289 | ||
290 | try: | |
291 | yield from c.execute( | |
292 | "create table issue54 (id integer primary key)") | |
293 | yield from c.execute("insert into issue54 (id) values (7)") | |
294 | yield from c.execute(big_sql) | |
295 | ||
296 | r = yield from c.fetchone() | |
297 | self.assertEqual(7, r[0]) | |
298 | finally: | |
299 | yield from c.execute("drop table issue54") | |
300 | ||
301 | ||
302 | class TestGitHubIssues(base.AIOPyMySQLTestCase): | |
303 | @run_until_complete | |
304 | def test_issue_66(self): | |
305 | """ 'Connection' object has no attribute 'insert_id' """ | |
306 | conn = self.connections[0] | |
307 | c = yield from conn.cursor() | |
308 | self.assertEqual(0, conn.insert_id()) | |
309 | try: | |
310 | yield from c.execute("drop table if exists issue66") | |
311 | yield from c.execute("create table issue66 (id integer primary " | |
312 | "key auto_increment, x integer)") | |
313 | yield from c.execute("insert into issue66 (x) values (1)") | |
314 | yield from c.execute("insert into issue66 (x) values (1)") | |
315 | self.assertEqual(2, conn.insert_id()) | |
316 | finally: | |
317 | yield from c.execute("drop table issue66") | |
318 | ||
319 | @run_until_complete | |
320 | def test_issue_79(self): | |
321 | """ Duplicate field overwrites the previous one in the result | |
322 | of DictCursor """ | |
323 | conn = self.connections[0] | |
324 | c = yield from conn.cursor(aiomysql.cursors.DictCursor) | |
325 | ||
326 | yield from c.execute("drop table if exists a") | |
327 | yield from c.execute("drop table if exists b") | |
328 | yield from c.execute("""CREATE TABLE a (id int, value int)""") | |
329 | yield from c.execute("""CREATE TABLE b (id int, value int)""") | |
330 | ||
331 | a = (1, 11) | |
332 | b = (1, 22) | |
333 | try: | |
334 | yield from c.execute("insert into a values (%s, %s)", a) | |
335 | yield from c.execute("insert into b values (%s, %s)", b) | |
336 | ||
337 | yield from c.execute("SELECT * FROM a inner join b on a.id = b.id") | |
338 | r, *_ = yield from c.fetchall() | |
339 | self.assertEqual(r['id'], 1) | |
340 | self.assertEqual(r['value'], 11) | |
341 | self.assertEqual(r['b.value'], 22) | |
342 | finally: | |
343 | yield from c.execute("drop table a") | |
344 | yield from c.execute("drop table b") | |
345 | ||
346 | @run_until_complete | |
347 | def test_issue_95(self): | |
348 | """ Leftover trailing OK packet for "CALL my_sp" queries """ | |
349 | conn = self.connections[0] | |
350 | cur = yield from conn.cursor() | |
351 | yield from cur.execute("DROP PROCEDURE IF EXISTS `foo`") | |
352 | yield from cur.execute("""CREATE PROCEDURE `foo` () | |
353 | BEGIN | |
354 | SELECT 1; | |
355 | END""") | |
356 | try: | |
357 | yield from cur.execute("""CALL foo()""") | |
358 | yield from cur.execute("""SELECT 1""") | |
359 | r = yield from cur.fetchone() | |
360 | self.assertEqual(r[0], 1) | |
361 | finally: | |
362 | yield from cur.execute("DROP PROCEDURE IF EXISTS `foo`") | |
363 | ||
364 | @run_until_complete | |
365 | def test_issue_114(self): | |
366 | """ autocommit is not set after reconnecting with ping() """ | |
367 | conn = yield from self.connect(charset="utf8") | |
368 | yield from conn.autocommit(False) | |
369 | c = yield from conn.cursor() | |
370 | yield from c.execute("""select @@autocommit;""") | |
371 | r = yield from c.fetchone() | |
372 | self.assertFalse(r[0]) | |
373 | yield from conn.ensure_closed() | |
374 | yield from conn.ping() | |
375 | yield from c.execute("""select @@autocommit;""") | |
376 | r = yield from c.fetchone() | |
377 | self.assertFalse(r[0]) | |
378 | yield from conn.ensure_closed() | |
379 | ||
380 | # Ensure autocommit() is still working | |
381 | conn = yield from self.connect(charset="utf8") | |
382 | c = yield from conn.cursor() | |
383 | yield from c.execute("""select @@autocommit;""") | |
384 | r = yield from c.fetchone() | |
385 | self.assertFalse(r[0]) | |
386 | yield from conn.ensure_closed() | |
387 | yield from conn.ping() | |
388 | yield from conn.autocommit(True) | |
389 | yield from c.execute("""select @@autocommit;""") | |
390 | r = yield from c.fetchone() | |
391 | self.assertTrue(r[0]) | |
392 | yield from conn.ensure_closed() | |
393 | ||
394 | @run_until_complete | |
395 | def test_issue_175(self): | |
396 | """ The number of fields returned by server is read in wrong way """ | |
397 | conn = self.connections[0] | |
398 | cur = yield from conn.cursor() | |
399 | for length in (200, 300): | |
400 | cols = ', '.join('c{0} integer'.format(i) for i in range(length)) | |
401 | sql = 'create table test_field_count ({0})'.format(cols) | |
402 | try: | |
403 | yield from cur.execute(sql) | |
404 | yield from cur.execute('select * from test_field_count') | |
405 | assert len(cur.description) == length | |
406 | finally: | |
407 | yield from cur.execute('drop table if exists test_field_count') | |
421 | await cur.execute('drop table if exists test_field_count') | |
408 | 422 | |
409 | 423 | |
410 | 424 | # MySQL will get you to renegotiate if sent a cleartext password |
414 | 428 | loop=loop) as pool: |
415 | 429 | async with pool.get() as conn: |
416 | 430 | async with conn.cursor() as cur: |
417 | create_db = "CREATE DATABASE IF NOT EXISTS bugtest;" | |
431 | drop_db = "DROP DATABASE IF EXISTS bugtest;" | |
432 | await cur.execute(drop_db) | |
433 | ||
434 | create_db = "CREATE DATABASE bugtest;" | |
418 | 435 | await cur.execute(create_db) |
419 | 436 | |
420 | 437 | create_table = """CREATE TABLE IF NOT EXISTS `bugtest`.`testtable` ( |
426 | 443 | await cur.execute(create_table) |
427 | 444 | |
428 | 445 | try: |
446 | recwarn.clear() | |
447 | ||
429 | 448 | async with conn.cursor() as cur: |
430 | 449 | await cur.execute("INSERT INTO `bugtest`.`testtable` " |
431 | 450 | "(bindata) VALUES (%s);", |
433 | 452 | |
434 | 453 | warnings = [warn for warn in recwarn.list |
435 | 454 | if warn.category is Warning] |
436 | assert len(warnings) == 0, "Got unexpected MySQL warning" | |
455 | assert len(warnings) == 0, \ | |
456 | "Got unexpected MySQL warning {}".\ | |
457 | format(' '.join(str(x) for x in warnings)) | |
437 | 458 | |
438 | 459 | await cur.execute("SELECT * FROM `bugtest`.`testtable`;") |
439 | 460 | rows = await cur.fetchall() |
5 | 5 | from pymysql.err import OperationalError |
6 | 6 | |
7 | 7 | |
8 | @pytest.yield_fixture | |
8 | @pytest.fixture | |
9 | 9 | def table_local_file(connection, loop): |
10 | 10 | |
11 | 11 | async def prepare_table(conn): |
134 | 134 | fut1 = pool.acquire() |
135 | 135 | fut2 = pool.acquire() |
136 | 136 | |
137 | conn1, conn2 = await asyncio.gather(fut1, fut2, loop=loop) | |
137 | conn1, conn2 = await asyncio.gather(fut1, fut2) | |
138 | 138 | assert 2 == pool.size |
139 | 139 | assert 0 == pool.freesize |
140 | 140 | assert {conn1, conn2} == pool._used |
163 | 163 | fut2 = pool.acquire() |
164 | 164 | fut3 = pool.acquire() |
165 | 165 | |
166 | conn1, conn2, conn3 = await asyncio.gather(fut1, fut2, fut3, | |
167 | loop=loop) | |
166 | conn1, conn2, conn3 = await asyncio.gather(fut1, fut2, fut3) | |
168 | 167 | assert 3 == pool.size |
169 | 168 | assert 0 == pool.freesize |
170 | 169 | assert {conn1, conn2, conn3} == pool._used |
241 | 240 | assert 1 == pool.size |
242 | 241 | |
243 | 242 | conn = await asyncio.wait_for(pool.acquire(), |
244 | timeout=0.5, loop=loop) | |
243 | timeout=0.5) | |
245 | 244 | assert 0 == pool.freesize |
246 | 245 | assert 2 == pool.size |
247 | 246 | pool.release(conn) |
301 | 300 | conn = await pool.acquire() |
302 | 301 | maxsize = max(maxsize, pool.size) |
303 | 302 | minfreesize = min(minfreesize, pool.freesize) |
304 | await asyncio.sleep(0.01, loop=loop) | |
303 | await asyncio.sleep(0.01) | |
305 | 304 | pool.release(conn) |
306 | 305 | maxsize = max(maxsize, pool.size) |
307 | 306 | minfreesize = min(minfreesize, pool.freesize) |
308 | 307 | |
309 | await asyncio.gather(inner(), inner(), loop=loop) | |
308 | await asyncio.gather(inner(), inner()) | |
310 | 309 | |
311 | 310 | assert 1 == maxsize |
312 | 311 | assert 0 == minfreesize |
333 | 332 | ops = [] |
334 | 333 | |
335 | 334 | async def do_release(conn): |
336 | await asyncio.sleep(0, loop=loop) | |
335 | await asyncio.sleep(0) | |
337 | 336 | pool.release(conn) |
338 | 337 | ops.append('release') |
339 | 338 | |
342 | 341 | ops.append('wait_closed') |
343 | 342 | |
344 | 343 | pool.close() |
345 | await asyncio.gather(wait_closed(), do_release(c1), do_release(c2), | |
346 | loop=loop) | |
344 | await asyncio.gather(wait_closed(), do_release(c1), do_release(c2)) | |
347 | 345 | assert ['release', 'release', 'wait_closed'] == ops |
348 | 346 | assert 0 == pool.freesize |
347 | assert pool.closed | |
349 | 348 | |
350 | 349 | |
351 | 350 | @pytest.mark.run_loop |
414 | 413 | pool.close() |
415 | 414 | |
416 | 415 | with pytest.raises(asyncio.TimeoutError): |
417 | await asyncio.wait_for(pool.wait_closed(), 0.1, loop=loop) | |
416 | await asyncio.wait_for(pool.wait_closed(), 0.1) | |
418 | 417 | pool.release(conn) |
419 | 418 | |
420 | 419 | |
434 | 433 | conn = await connection_creator() |
435 | 434 | await _set_global_conn_timeout(conn, 2) |
436 | 435 | await conn.ensure_closed() |
436 | ||
437 | pool = conn = None | |
437 | 438 | try: |
438 | 439 | pool = await pool_creator(minsize=3, maxsize=3) |
439 | 440 | # sleep, more then connection timeout |
440 | await asyncio.sleep(3, loop=loop) | |
441 | await asyncio.sleep(3) | |
441 | 442 | conn = await pool.acquire() |
442 | 443 | cur = await conn.cursor() |
443 | 444 | # query should not throw exception OperationalError |
444 | 445 | await cur.execute('SELECT 1;') |
445 | 446 | pool.release(conn) |
447 | conn = None | |
446 | 448 | pool.close() |
447 | 449 | await pool.wait_closed() |
448 | 450 | finally: |
451 | # TODO: this could probably be done better | |
452 | # if this isn't closed it blocks forever | |
453 | try: | |
454 | if conn is not None: | |
455 | pool.release(conn) | |
456 | if pool is not None: | |
457 | pool.close() | |
458 | await pool.wait_closed() | |
459 | except Exception: | |
460 | pass | |
461 | ||
449 | 462 | # setup default timeouts |
450 | 463 | conn = await connection_creator() |
451 | 464 | await _set_global_conn_timeout(conn, 28800) |
475 | 488 | # timings) |
476 | 489 | task = loop.create_task(curs.execute( |
477 | 490 | "SELECT 1 as id, SLEEP(0.1) as xxx")) |
478 | await asyncio.sleep(0.05, loop=loop) | |
491 | await asyncio.sleep(0.05) | |
479 | 492 | task.cancel() |
480 | 493 | await task |
481 | 494 | except asyncio.CancelledError: |
501 | 514 | val = await cur.fetchone() |
502 | 515 | assert (1,) == val |
503 | 516 | |
504 | await asyncio.sleep(5, loop=loop) | |
517 | await asyncio.sleep(5) | |
505 | 518 | |
506 | 519 | assert 1 == pool.freesize |
507 | 520 | async with pool.get() as conn: |
525 | 538 | async with pool.get() as conn: |
526 | 539 | cur = await conn.cursor() |
527 | 540 | await cur.execute('SELECT 1;') |
541 | ||
542 | ||
543 | @pytest.mark.run_loop | |
544 | async def test_pool_maxsize_unlimited(pool_creator, loop): | |
545 | pool = await pool_creator(minsize=0, maxsize=0) | |
546 | ||
547 | async with pool.acquire() as conn: | |
548 | cur = await conn.cursor() | |
549 | await cur.execute('SELECT 1;') | |
550 | ||
551 | ||
552 | @pytest.mark.run_loop | |
553 | async def test_pool_maxsize_unlimited_minsize_1(pool_creator, loop): | |
554 | pool = await pool_creator(minsize=1, maxsize=0) | |
555 | ||
556 | async with pool.acquire() as conn: | |
557 | cur = await conn.cursor() | |
558 | await cur.execute('SELECT 1;') |
20 | 20 | # ]) |
21 | 21 | |
22 | 22 | |
23 | @pytest.mark.mysql_verison('8.0') | |
23 | def ensure_mysql_version(mysql_server): | |
24 | if mysql_server["db_type"] != "mysql" \ | |
25 | or mysql_server["server_version_tuple_short"] != (8, 0): | |
26 | pytest.skip("Not applicable for {0} version: {1}" | |
27 | .format(mysql_server["db_type"], | |
28 | mysql_server["server_version_tuple_short"])) | |
29 | ||
30 | ||
24 | 31 | @pytest.mark.run_loop |
25 | 32 | async def test_sha256_nopw(mysql_server, loop): |
33 | ensure_mysql_version(mysql_server) | |
34 | ||
26 | 35 | connection_data = copy.copy(mysql_server['conn_params']) |
27 | 36 | connection_data['user'] = 'nopass_sha256' |
28 | 37 | connection_data['password'] = None |
35 | 44 | assert conn._auth_plugin_used == 'sha256_password' |
36 | 45 | |
37 | 46 | |
38 | @pytest.mark.mysql_verison('8.0') | |
39 | 47 | @pytest.mark.run_loop |
40 | 48 | async def test_sha256_pw(mysql_server, loop): |
49 | ensure_mysql_version(mysql_server) | |
50 | ||
51 | # https://dev.mysql.com/doc/refman/8.0/en/sha256-pluggable-authentication.html | |
52 | # Unlike caching_sha2_password, the sha256_password plugin does not treat | |
53 | # shared-memory connections as secure, even though share-memory transport | |
54 | # is secure by default. | |
55 | if "unix_socket" in mysql_server['conn_params']: | |
56 | pytest.skip("sha256_password is not supported on unix sockets") | |
57 | ||
41 | 58 | connection_data = copy.copy(mysql_server['conn_params']) |
42 | 59 | connection_data['user'] = 'user_sha256' |
43 | 60 | connection_data['password'] = 'pass_sha256' |
50 | 67 | assert conn._auth_plugin_used == 'sha256_password' |
51 | 68 | |
52 | 69 | |
53 | @pytest.mark.mysql_verison('8.0') | |
54 | 70 | @pytest.mark.run_loop |
55 | 71 | async def test_cached_sha256_nopw(mysql_server, loop): |
72 | ensure_mysql_version(mysql_server) | |
73 | ||
56 | 74 | connection_data = copy.copy(mysql_server['conn_params']) |
57 | 75 | connection_data['user'] = 'nopass_caching_sha2' |
58 | 76 | connection_data['password'] = None |
65 | 83 | assert conn._auth_plugin_used == 'caching_sha2_password' |
66 | 84 | |
67 | 85 | |
68 | @pytest.mark.mysql_verison('8.0') | |
69 | 86 | @pytest.mark.run_loop |
70 | 87 | async def test_cached_sha256_pw(mysql_server, loop): |
88 | ensure_mysql_version(mysql_server) | |
89 | ||
71 | 90 | connection_data = copy.copy(mysql_server['conn_params']) |
72 | 91 | connection_data['user'] = 'user_caching_sha2' |
73 | 92 | connection_data['password'] = 'pass_caching_sha2' |
0 | 0 | import asyncio |
1 | ||
2 | import pytest | |
1 | 3 | from pymysql import NotSupportedError |
4 | ||
5 | from aiomysql import ProgrammingError, InterfaceError, OperationalError | |
2 | 6 | from aiomysql.cursors import SSCursor |
3 | from tests import base | |
4 | from tests._testutils import run_until_complete | |
5 | ||
6 | from aiomysql import ProgrammingError, InterfaceError | |
7 | ||
8 | ||
9 | class TestSSCursor(base.AIOPyMySQLTestCase): | |
10 | data = [ | |
11 | ('America', '', 'America/Jamaica'), | |
12 | ('America', '', 'America/Los_Angeles'), | |
13 | ('America', '', 'America/Lima'), | |
14 | ('America', '', 'America/New_York'), | |
15 | ('America', '', 'America/Menominee'), | |
16 | ('America', '', 'America/Havana'), | |
17 | ('America', '', 'America/El_Salvador'), | |
18 | ('America', '', 'America/Costa_Rica'), | |
19 | ('America', '', 'America/Denver'), | |
20 | ('America', '', 'America/Detroit'), ] | |
21 | ||
22 | @asyncio.coroutine | |
23 | def _prepare(self, conn): | |
24 | cursor = yield from conn.cursor() | |
25 | # Create table | |
26 | yield from cursor.execute('DROP TABLE IF EXISTS tz_data;') | |
27 | yield from cursor.execute('CREATE TABLE tz_data (' | |
28 | 'region VARCHAR(64),' | |
29 | 'zone VARCHAR(64),' | |
30 | 'name VARCHAR(64))') | |
31 | ||
32 | yield from cursor.executemany( | |
33 | 'INSERT INTO tz_data VALUES (%s, %s, %s)', self.data) | |
34 | yield from conn.commit() | |
35 | yield from cursor.close() | |
36 | ||
37 | @run_until_complete | |
38 | def test_ssursor(self): | |
39 | # affected_rows = 18446744073709551615 | |
40 | conn = self.connections[0] | |
41 | cursor = yield from conn.cursor(SSCursor) | |
42 | # Create table | |
43 | yield from cursor.execute('DROP TABLE IF EXISTS tz_data;') | |
44 | yield from cursor.execute(('CREATE TABLE tz_data (' | |
45 | 'region VARCHAR(64),' | |
46 | 'zone VARCHAR(64),' | |
47 | 'name VARCHAR(64))')) | |
48 | ||
49 | # Test INSERT | |
50 | for i in self.data: | |
51 | yield from cursor.execute( | |
52 | 'INSERT INTO tz_data VALUES (%s, %s, %s)', i) | |
53 | self.assertEqual(conn.affected_rows(), 1, | |
54 | 'affected_rows does not match') | |
55 | yield from conn.commit() | |
56 | ||
57 | # Test update, affected_rows() | |
58 | yield from cursor.execute('UPDATE tz_data SET zone = %s', ['Foo']) | |
59 | yield from conn.commit() | |
60 | ||
61 | self.assertEqual(cursor.rowcount, len(self.data), | |
62 | 'Update failed. affected_rows != %s' % ( | |
63 | str(len(self.data)))) | |
64 | ||
65 | yield from cursor.close() | |
66 | yield from cursor.close() | |
67 | ||
68 | @run_until_complete | |
69 | def test_sscursor_fetchall(self): | |
70 | conn = self.connections[0] | |
71 | cursor = yield from conn.cursor(SSCursor) | |
72 | ||
73 | yield from self._prepare(conn) | |
74 | yield from cursor.execute('SELECT * FROM tz_data') | |
75 | fetched_data = yield from cursor.fetchall() | |
76 | self.assertEqual(len(fetched_data), len(self.data), | |
77 | 'fetchall failed. Number of rows does not match') | |
78 | ||
79 | @run_until_complete | |
80 | def test_sscursor_fetchmany(self): | |
81 | conn = self.connections[0] | |
82 | cursor = yield from conn.cursor(SSCursor) | |
83 | yield from self._prepare(conn) | |
84 | yield from cursor.execute('SELECT * FROM tz_data') | |
85 | fetched_data = yield from cursor.fetchmany(2) | |
86 | self.assertEqual(len(fetched_data), 2, | |
87 | 'fetchmany failed. Number of rows does not match') | |
88 | ||
89 | yield from cursor.close() | |
90 | # test default fetchmany size | |
91 | cursor = yield from conn.cursor(SSCursor) | |
92 | yield from cursor.execute('SELECT * FROM tz_data;') | |
93 | fetched_data = yield from cursor.fetchmany() | |
94 | self.assertEqual(len(fetched_data), 1) | |
95 | ||
96 | @run_until_complete | |
97 | def test_sscursor_executemany(self): | |
98 | conn = self.connections[0] | |
99 | yield from self._prepare(conn) | |
100 | cursor = yield from conn.cursor(SSCursor) | |
101 | # Test executemany | |
102 | yield from cursor.executemany( | |
103 | 'INSERT INTO tz_data VALUES (%s, %s, %s)', self.data) | |
104 | msg = 'executemany failed. cursor.rowcount != %s' | |
105 | self.assertEqual(cursor.rowcount, len(self.data), | |
106 | msg % (str(len(self.data)))) | |
107 | ||
108 | @run_until_complete | |
109 | def test_sscursor_scroll_relative(self): | |
110 | conn = self.connections[0] | |
111 | yield from self._prepare(conn) | |
112 | cursor = yield from conn.cursor(SSCursor) | |
113 | yield from cursor.execute('SELECT * FROM tz_data;') | |
114 | yield from cursor.scroll(1) | |
115 | ret = yield from cursor.fetchone() | |
116 | self.assertEqual(('America', '', 'America/Los_Angeles'), ret) | |
117 | ||
118 | @run_until_complete | |
119 | def test_sscursor_scroll_absolute(self): | |
120 | conn = self.connections[0] | |
121 | yield from self._prepare(conn) | |
122 | cursor = yield from conn.cursor(SSCursor) | |
123 | yield from cursor.execute('SELECT * FROM tz_data;') | |
124 | yield from cursor.scroll(2, mode='absolute') | |
125 | ret = yield from cursor.fetchone() | |
126 | self.assertEqual(('America', '', 'America/Lima'), ret) | |
127 | ||
128 | @run_until_complete | |
129 | def test_sscursor_scroll_errors(self): | |
130 | conn = self.connections[0] | |
131 | cursor = yield from conn.cursor(SSCursor) | |
132 | ||
133 | yield from cursor.execute('SELECT * FROM tz_data;') | |
134 | ||
135 | with self.assertRaises(NotSupportedError): | |
136 | yield from cursor.scroll(-2, mode='relative') | |
137 | ||
138 | yield from cursor.scroll(2, mode='absolute') | |
139 | ||
140 | with self.assertRaises(NotSupportedError): | |
141 | yield from cursor.scroll(1, mode='absolute') | |
142 | with self.assertRaises(ProgrammingError): | |
143 | yield from cursor.scroll(2, mode='not_valid_mode') | |
144 | ||
145 | @run_until_complete | |
146 | def test_sscursor_cancel(self): | |
147 | conn = self.connections[0] | |
148 | cur = yield from conn.cursor(SSCursor) | |
149 | # Prepare ALOT of data | |
150 | ||
151 | yield from cur.execute('DROP TABLE IF EXISTS long_seq;') | |
152 | yield from cur.execute( | |
153 | """ CREATE TABLE long_seq ( | |
154 | id int(11) | |
155 | ) | |
156 | """) | |
157 | ||
158 | ids = [(x) for x in range(100000)] | |
159 | yield from cur.executemany('INSERT INTO long_seq VALUES (%s)', ids) | |
160 | ||
161 | # Will return several results. All we need at this point | |
162 | big_str = "x" * 10000 | |
163 | yield from cur.execute( | |
164 | """SELECT '{}' as id FROM long_seq; | |
165 | """.format(big_str)) | |
166 | first = yield from cur.fetchone() | |
167 | self.assertEqual(first, (big_str,)) | |
168 | ||
169 | @asyncio.coroutine | |
170 | def read_cursor(): | |
171 | while True: | |
172 | res = yield from cur.fetchone() | |
173 | if res is None: | |
174 | break | |
175 | task = self.loop.create_task(read_cursor()) | |
176 | yield from asyncio.sleep(0, loop=self.loop) | |
177 | assert not task.done(), "Test failed to produce needed condition." | |
178 | task.cancel() | |
179 | try: | |
180 | yield from task | |
181 | except asyncio.CancelledError: | |
182 | pass | |
183 | ||
184 | with self.assertRaises(InterfaceError): | |
185 | yield from conn.cursor(SSCursor) | |
7 | ||
8 | ||
9 | DATA = [ | |
10 | ('America', '', 'America/Jamaica'), | |
11 | ('America', '', 'America/Los_Angeles'), | |
12 | ('America', '', 'America/Lima'), | |
13 | ('America', '', 'America/New_York'), | |
14 | ('America', '', 'America/Menominee'), | |
15 | ('America', '', 'America/Havana'), | |
16 | ('America', '', 'America/El_Salvador'), | |
17 | ('America', '', 'America/Costa_Rica'), | |
18 | ('America', '', 'America/Denver'), | |
19 | ('America', '', 'America/Detroit'), ] | |
20 | ||
21 | ||
22 | async def _prepare(conn): | |
23 | cursor = await conn.cursor() | |
24 | # Create table | |
25 | await cursor.execute('DROP TABLE IF EXISTS tz_data;') | |
26 | await cursor.execute('CREATE TABLE tz_data (' | |
27 | 'region VARCHAR(64),' | |
28 | 'zone VARCHAR(64),' | |
29 | 'name VARCHAR(64))') | |
30 | ||
31 | await cursor.executemany( | |
32 | 'INSERT INTO tz_data VALUES (%s, %s, %s)', DATA) | |
33 | await conn.commit() | |
34 | await cursor.close() | |
35 | ||
36 | ||
37 | @pytest.mark.run_loop | |
38 | async def test_ssursor(connection): | |
39 | # affected_rows = 18446744073709551615 | |
40 | conn = connection | |
41 | cursor = await conn.cursor(SSCursor) | |
42 | # Create table | |
43 | await cursor.execute('DROP TABLE IF EXISTS tz_data;') | |
44 | await cursor.execute(('CREATE TABLE tz_data (' | |
45 | 'region VARCHAR(64),' | |
46 | 'zone VARCHAR(64),' | |
47 | 'name VARCHAR(64))')) | |
48 | ||
49 | # Test INSERT | |
50 | for i in DATA: | |
51 | await cursor.execute( | |
52 | 'INSERT INTO tz_data VALUES (%s, %s, %s)', i) | |
53 | assert conn.affected_rows() == 1, 'affected_rows does not match' | |
54 | await conn.commit() | |
55 | ||
56 | # Test update, affected_rows() | |
57 | await cursor.execute('UPDATE tz_data SET zone = %s', ['Foo']) | |
58 | await conn.commit() | |
59 | ||
60 | assert cursor.rowcount == len(DATA), \ | |
61 | 'Update failed. affected_rows != %s' % (str(len(DATA))) | |
62 | ||
63 | await cursor.close() | |
64 | await cursor.close() | |
65 | ||
66 | ||
67 | @pytest.mark.run_loop | |
68 | async def test_sscursor_fetchall(connection): | |
69 | conn = connection | |
70 | cursor = await conn.cursor(SSCursor) | |
71 | ||
72 | await _prepare(conn) | |
73 | await cursor.execute('SELECT * FROM tz_data') | |
74 | fetched_data = await cursor.fetchall() | |
75 | assert len(fetched_data) == len(DATA), \ | |
76 | 'fetchall failed. Number of rows does not match' | |
77 | ||
78 | ||
79 | @pytest.mark.run_loop | |
80 | async def test_sscursor_fetchmany(connection): | |
81 | conn = connection | |
82 | cursor = await conn.cursor(SSCursor) | |
83 | await _prepare(conn) | |
84 | await cursor.execute('SELECT * FROM tz_data') | |
85 | fetched_data = await cursor.fetchmany(2) | |
86 | assert len(fetched_data) == 2, \ | |
87 | 'fetchmany failed. Number of rows does not match' | |
88 | ||
89 | await cursor.close() | |
90 | # test default fetchmany size | |
91 | cursor = await conn.cursor(SSCursor) | |
92 | await cursor.execute('SELECT * FROM tz_data;') | |
93 | fetched_data = await cursor.fetchmany() | |
94 | assert len(fetched_data) == 1 | |
95 | ||
96 | ||
97 | @pytest.mark.run_loop | |
98 | async def test_sscursor_executemany(connection): | |
99 | conn = connection | |
100 | await _prepare(conn) | |
101 | cursor = await conn.cursor(SSCursor) | |
102 | # Test executemany | |
103 | await cursor.executemany( | |
104 | 'INSERT INTO tz_data VALUES (%s, %s, %s)', DATA) | |
105 | msg = 'executemany failed. cursor.rowcount != %s' | |
106 | assert cursor.rowcount == len(DATA), msg % (str(len(DATA))) | |
107 | ||
108 | ||
109 | @pytest.mark.run_loop | |
110 | async def test_sscursor_scroll_relative(connection): | |
111 | conn = connection | |
112 | await _prepare(conn) | |
113 | cursor = await conn.cursor(SSCursor) | |
114 | await cursor.execute('SELECT * FROM tz_data;') | |
115 | await cursor.scroll(1) | |
116 | ret = await cursor.fetchone() | |
117 | assert ('America', '', 'America/Los_Angeles') == ret | |
118 | ||
119 | ||
120 | @pytest.mark.run_loop | |
121 | async def test_sscursor_scroll_absolute(connection): | |
122 | conn = connection | |
123 | await _prepare(conn) | |
124 | cursor = await conn.cursor(SSCursor) | |
125 | await cursor.execute('SELECT * FROM tz_data;') | |
126 | await cursor.scroll(2, mode='absolute') | |
127 | ret = await cursor.fetchone() | |
128 | assert ('America', '', 'America/Lima') == ret | |
129 | ||
130 | ||
131 | @pytest.mark.run_loop | |
132 | async def test_sscursor_scroll_errors(connection): | |
133 | conn = connection | |
134 | await _prepare(conn) | |
135 | cursor = await conn.cursor(SSCursor) | |
136 | ||
137 | await cursor.execute('SELECT * FROM tz_data;') | |
138 | ||
139 | with pytest.raises(NotSupportedError): | |
140 | await cursor.scroll(-2, mode='relative') | |
141 | ||
142 | await cursor.scroll(2, mode='absolute') | |
143 | ||
144 | with pytest.raises(NotSupportedError): | |
145 | await cursor.scroll(1, mode='absolute') | |
146 | with pytest.raises(ProgrammingError): | |
147 | await cursor.scroll(2, mode='not_valid_mode') | |
148 | ||
149 | ||
150 | @pytest.mark.run_loop | |
151 | async def test_sscursor_cancel(connection): | |
152 | conn = connection | |
153 | cur = await conn.cursor(SSCursor) | |
154 | # Prepare A LOT of data | |
155 | ||
156 | await cur.execute('DROP TABLE IF EXISTS long_seq;') | |
157 | await cur.execute( | |
158 | """ CREATE TABLE long_seq ( | |
159 | id int(11) | |
160 | ) | |
161 | """) | |
162 | ||
163 | ids = [(x) for x in range(100000)] | |
164 | await cur.executemany('INSERT INTO long_seq VALUES (%s)', ids) | |
165 | ||
166 | # Will return several results. All we need at this point | |
167 | big_str = "x" * 10000 | |
168 | await cur.execute( | |
169 | """SELECT '{}' as id FROM long_seq; | |
170 | """.format(big_str)) | |
171 | first = await cur.fetchone() | |
172 | assert first == (big_str,) | |
173 | ||
174 | async def read_cursor(): | |
175 | while True: | |
176 | res = await cur.fetchone() | |
177 | if res is None: | |
178 | break | |
179 | task = asyncio.ensure_future(read_cursor()) | |
180 | await asyncio.sleep(0) | |
181 | assert not task.done(), "Test failed to produce needed condition." | |
182 | task.cancel() | |
183 | try: | |
184 | await task | |
185 | except asyncio.CancelledError: | |
186 | pass | |
187 | ||
188 | with pytest.raises(InterfaceError): | |
189 | await conn.cursor(SSCursor) | |
190 | ||
191 | ||
192 | @pytest.mark.run_loop | |
193 | async def test_sscursor_discarded_result(connection): | |
194 | conn = connection | |
195 | await _prepare(conn) | |
196 | async with conn.cursor(SSCursor) as cursor: | |
197 | await cursor.execute("select 1") | |
198 | await cursor.execute("select 2") | |
199 | ret = await cursor.fetchone() | |
200 | assert (2,) == ret | |
201 | ||
202 | ||
203 | @pytest.mark.run_loop | |
204 | async def test_max_execution_time(mysql_server, connection): | |
205 | conn = connection | |
206 | ||
207 | async with connection.cursor() as cur: | |
208 | await cur.execute("DROP TABLE IF EXISTS tbl;") | |
209 | ||
210 | await cur.execute( | |
211 | """ | |
212 | CREATE TABLE tbl ( | |
213 | id MEDIUMINT NOT NULL AUTO_INCREMENT, | |
214 | name VARCHAR(255) NOT NULL, | |
215 | PRIMARY KEY (id)); | |
216 | """ | |
217 | ) | |
218 | ||
219 | for i in [(1, "a"), (2, "b"), (3, "c")]: | |
220 | await cur.execute("INSERT INTO tbl VALUES(%s, %s)", i) | |
221 | ||
222 | await conn.commit() | |
223 | ||
224 | async with conn.cursor(SSCursor) as cur: | |
225 | # MySQL MAX_EXECUTION_TIME takes ms | |
226 | # MariaDB max_statement_time takes seconds as int/float, introduced in 10.1 | |
227 | ||
228 | # this will sleep 0.01 seconds per row | |
229 | if mysql_server["db_type"] == "mysql": | |
230 | sql = """ | |
231 | SELECT /*+ MAX_EXECUTION_TIME(2000) */ | |
232 | name, sleep(0.01) FROM tbl | |
233 | """ | |
234 | else: | |
235 | sql = """ | |
236 | SET STATEMENT max_statement_time=2 FOR | |
237 | SELECT name, sleep(0.01) FROM tbl | |
238 | """ | |
239 | ||
240 | await cur.execute(sql) | |
241 | # unlike Cursor, SSCursor returns a list of tuples here | |
242 | ||
243 | assert (await cur.fetchall()) == [ | |
244 | ("a", 0), | |
245 | ("b", 0), | |
246 | ("c", 0), | |
247 | ] | |
248 | ||
249 | if mysql_server["db_type"] == "mysql": | |
250 | sql = """ | |
251 | SELECT /*+ MAX_EXECUTION_TIME(2000) */ | |
252 | name, sleep(0.01) FROM tbl | |
253 | """ | |
254 | else: | |
255 | sql = """ | |
256 | SET STATEMENT max_statement_time=2 FOR | |
257 | SELECT name, sleep(0.01) FROM tbl | |
258 | """ | |
259 | await cur.execute(sql) | |
260 | assert (await cur.fetchone()) == ("a", 0) | |
261 | ||
262 | # this discards the previous unfinished query and raises an | |
263 | # incomplete unbuffered query warning | |
264 | with pytest.warns(UserWarning): | |
265 | await cur.execute("SELECT 1") | |
266 | assert (await cur.fetchone()) == (1,) | |
267 | ||
268 | # SSCursor will not read the EOF packet until we try to read | |
269 | # another row. Skipping this will raise an incomplete unbuffered | |
270 | # query warning in the next cur.execute(). | |
271 | assert (await cur.fetchone()) is None | |
272 | ||
273 | if mysql_server["db_type"] == "mysql": | |
274 | sql = """ | |
275 | SELECT /*+ MAX_EXECUTION_TIME(1) */ | |
276 | name, sleep(1) FROM tbl | |
277 | """ | |
278 | else: | |
279 | sql = """ | |
280 | SET STATEMENT max_statement_time=0.001 FOR | |
281 | SELECT name, sleep(1) FROM tbl | |
282 | """ | |
283 | with pytest.raises(OperationalError) as cm: | |
284 | # in an unbuffered cursor the OperationalError may not show up | |
285 | # until fetching the entire result | |
286 | await cur.execute(sql) | |
287 | await cur.fetchall() | |
288 | ||
289 | if mysql_server["db_type"] == "mysql": | |
290 | # this constant was only introduced in MySQL 5.7, not sure | |
291 | # what was returned before, may have been ER_QUERY_INTERRUPTED | |
292 | ||
293 | # this constant is pending a new PyMySQL release | |
294 | # assert cm.value.args[0] == pymysql.constants.ER.QUERY_TIMEOUT | |
295 | assert cm.value.args[0] == 3024 | |
296 | else: | |
297 | # this constant is pending a new PyMySQL release | |
298 | # assert cm.value.args[0] == pymysql.constants.ER.STATEMENT_TIMEOUT | |
299 | assert cm.value.args[0] == 1969 | |
300 | ||
301 | # connection should still be fine at this point | |
302 | await cur.execute("SELECT 1") | |
303 | assert (await cur.fetchone()) == (1,) |
3 | 3 | |
4 | 4 | |
5 | 5 | @pytest.mark.run_loop |
6 | async def test_tls_connect(mysql_server, loop): | |
6 | async def test_tls_connect(mysql_server, loop, mysql_params): | |
7 | if "unix_socket" in mysql_params: | |
8 | pytest.skip("TLS is not supported on unix sockets") | |
9 | ||
7 | 10 | async with create_pool(**mysql_server['conn_params'], |
8 | 11 | loop=loop) as pool: |
9 | 12 | async with pool.get() as conn: |
31 | 34 | |
32 | 35 | # MySQL will get you to renegotiate if sent a cleartext password |
33 | 36 | @pytest.mark.run_loop |
34 | async def test_auth_plugin_renegotiation(mysql_server, loop): | |
37 | async def test_auth_plugin_renegotiation(mysql_server, loop, mysql_params): | |
38 | if "unix_socket" in mysql_params: | |
39 | pytest.skip("TLS is not supported on unix sockets") | |
40 | ||
35 | 41 | async with create_pool(**mysql_server['conn_params'], |
36 | 42 | auth_plugin='mysql_clear_password', |
37 | 43 | loop=loop) as pool: |