Update upstream source from tag 'upstream/0.2.3'
Update to upstream version '0.2.3'
with Debian dir dbc093aa423367d1f1c63dd1c0f2b9f52980a88f
Federico Ceratto
2 years ago
0 | on: [push, pull_request] | |
1 | name: build | |
2 | jobs: | |
3 | tests: | |
4 | runs-on: ubuntu-20.04 | |
5 | strategy: | |
6 | matrix: | |
7 | use-numpy: | |
8 | - 0 | |
9 | python-version: | |
10 | - "3.5" | |
11 | - "3.6" | |
12 | - "3.7" | |
13 | - "3.8" | |
14 | - "3.9" | |
15 | - "3.10" | |
16 | - "pypy-3.6" | |
17 | - "pypy-3.7" | |
18 | clickhouse-version: | |
19 | - 21.12.3.32 | |
20 | - 21.9.3.30 | |
21 | - 21.9.3.30 | |
22 | - 21.4.6.55 | |
23 | - 21.3.10.1 | |
24 | - 21.2.10.48 | |
25 | - 21.1.9.41 | |
26 | - 20.11.2.1 | |
27 | - 20.10.2.20 | |
28 | - 20.9.3.45 | |
29 | - 20.8.4.11 | |
30 | - 20.7.4.11 | |
31 | - 20.6.8.5 | |
32 | - 20.5.5.74 | |
33 | - 20.4.9.110 | |
34 | - 20.3.20.6 | |
35 | - 19.16.17.80 | |
36 | - 19.15.3.6 | |
37 | - 19.9.2.4 # allow_suspicious_low_cardinality_types | |
38 | - 19.8.3.8 # SimpleAggregateFunction | |
39 | - 19.3.3 | |
40 | - 18.12.17 | |
41 | include: | |
42 | - clickhouse-version: 20.3.20.6 | |
43 | use-numpy: 1 | |
44 | python-version: 3.8 | |
45 | ||
46 | name: ${{ matrix.python-version }} CH=${{ matrix.clickhouse-version }} NUMPY=${{ matrix.use-numpy }} | |
47 | steps: | |
48 | - uses: actions/checkout@v2 | |
49 | - name: Set up Python | |
50 | uses: actions/setup-python@v2 | |
51 | with: | |
52 | python-version: ${{ matrix.python-version }} | |
53 | architecture: x64 | |
54 | # - name: Login to Docker Hub | |
55 | # uses: docker/login-action@v1 | |
56 | # with: | |
57 | # username: ${{ secrets.DOCKER_HUB_USERNAME }} | |
58 | # password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }} | |
59 | - name: Install flake8 | |
60 | run: | | |
61 | pip install --upgrade pip setuptools wheel | |
62 | pip install flake8 flake8-print | |
63 | - name: Run flake8 | |
64 | run: flake8 | |
65 | - name: Start ClickHouse server and client containers | |
66 | run: | | |
67 | echo "VERSION=${{ matrix.clickhouse-version }}" > tests/.env | |
68 | docker-compose -f tests/docker-compose.yml up -d | |
69 | - name: Setup clickhouse-client proxy for docker | |
70 | run: | | |
71 | # Faking clickhouse-client real communication with container via docker exec. | |
72 | echo -e '#!/bin/bash\n\ndocker exec -e "`env | grep ^TZ=`" test-clickhouse-client clickhouse-client "$@"' | sudo tee /usr/local/bin/clickhouse-client > /dev/null | |
73 | sudo chmod +x /usr/local/bin/clickhouse-client | |
74 | # Overriding setup.cfg. Set host=clickhouse-server | |
75 | sed -i 's/^host=localhost$/host=clickhouse-server/' setup.cfg | |
76 | # Make host think that clickhouse-server is localhost | |
77 | echo '127.0.0.1 clickhouse-server' | sudo tee /etc/hosts > /dev/null | |
78 | - name: Build cython extensions with tracing | |
79 | run: CYTHON_TRACE=1 python setup.py build_ext --define CYTHON_TRACE | |
80 | if: ${{ !contains(matrix.python-version, 'pypy') }} | |
81 | - name: Install requirements | |
82 | run: | | |
83 | # Newer coveralls do not work with github actions. | |
84 | pip install 'coveralls<3.0.0' | |
85 | pip install cython | |
86 | python testsrequire.py | |
87 | python setup.py develop | |
88 | # Limit each test time execution. | |
89 | pip install pytest-timeout | |
90 | env: | |
91 | USE_NUMPY: ${{ matrix.use-numpy }} | |
92 | - name: Run tests | |
93 | run: coverage run -m py.test --timeout=10 -v | |
94 | timeout-minutes: 5 | |
95 | env: | |
96 | # Set initial TZ for docker exec -e "`env | grep ^TZ`" | |
97 | TZ: UTC | |
98 | - name: Upload coverage | |
99 | run: coveralls | |
100 | env: | |
101 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} | |
102 | COVERALLS_PARALLEL: true | |
103 | COVERALLS_FLAG_NAME: ${{ matrix.python-version }} CH=${{ matrix.clickhouse-version }} NUMPY=${{ matrix.use-numpy }} | |
104 | ||
105 | coveralls-finished: | |
106 | name: Indicate completion to coveralls.io | |
107 | needs: tests | |
108 | runs-on: ubuntu-latest | |
109 | steps: | |
110 | - name: Finished | |
111 | uses: coverallsapp/github-action@1.1.3 | |
112 | with: | |
113 | github-token: ${{ secrets.GITHUB_TOKEN }} | |
114 | parallel-finished: true | |
115 | ||
116 | valgrind: | |
117 | name: Valgrind check | |
118 | needs: tests | |
119 | runs-on: ubuntu-20.04 | |
120 | steps: | |
121 | - uses: actions/checkout@v2 | |
122 | - name: Set up Python | |
123 | uses: actions/setup-python@v2 | |
124 | with: | |
125 | python-version: 3.8 | |
126 | architecture: x64 | |
127 | - name: Install valgrind | |
128 | run: sudo apt-get install -y valgrind | |
129 | # - name: Login to Docker Hub | |
130 | # uses: docker/login-action@v1 | |
131 | # with: | |
132 | # username: ${{ secrets.DOCKER_HUB_USERNAME }} | |
133 | # password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }} | |
134 | - name: Start ClickHouse server and client containers | |
135 | run: | | |
136 | echo "VERSION=$VERSION" > tests/.env | |
137 | docker-compose -f tests/docker-compose.yml up -d | |
138 | env: | |
139 | VERSION: 20.3.7.46 | |
140 | - name: Setup clickhouse-client proxy for docker | |
141 | run: | | |
142 | # Faking clickhouse-client real communication with container via docker exec. | |
143 | echo -e '#!/bin/bash\n\ndocker exec -e "`env | grep ^TZ=`" test-clickhouse-client clickhouse-client "$@"' | sudo tee /usr/local/bin/clickhouse-client > /dev/null | |
144 | sudo chmod +x /usr/local/bin/clickhouse-client | |
145 | # Overriding setup.cfg. Set host=clickhouse-server | |
146 | sed -i 's/^host=localhost$/host=clickhouse-server/' setup.cfg | |
147 | # Make host think that clickhouse-server is localhost | |
148 | echo '127.0.0.1 clickhouse-server' | sudo tee /etc/hosts > /dev/null | |
149 | - name: Install requirements | |
150 | run: | | |
151 | python testsrequire.py | |
152 | python setup.py develop | |
153 | env: | |
154 | USE_NUMPY: 1 | |
155 | - name: Run tests under valgrind | |
156 | run: valgrind --error-exitcode=1 --suppressions=valgrind.supp py.test -v | |
157 | env: | |
158 | # Set initial TZ for docker exec -e "`env | grep ^TZ`" | |
159 | TZ: UTC | |
160 | USE_NUMPY: 1 | |
161 | PYTHONMALLOC: malloc | |
162 | ||
163 | wheels-linux: | |
164 | name: Wheels for Linux | |
165 | needs: valgrind | |
166 | runs-on: ubuntu-20.04 | |
167 | steps: | |
168 | - uses: actions/checkout@v2 | |
169 | - name: Set up Python | |
170 | uses: actions/setup-python@v2 | |
171 | with: | |
172 | python-version: 3.8 | |
173 | architecture: x64 | |
174 | # - name: Login to Docker Hub | |
175 | # uses: docker/login-action@v1 | |
176 | # with: | |
177 | # username: ${{ secrets.DOCKER_HUB_USERNAME }} | |
178 | # password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }} | |
179 | - name: Install cibuildwheel | |
180 | run: | | |
181 | pip install --upgrade pip setuptools | |
182 | pip install cython cibuildwheel==$VERSION | |
183 | env: | |
184 | VERSION: 2.2.2 | |
185 | - name: Build wheels | |
186 | run: cibuildwheel --output-dir wheelhouse | |
187 | env: | |
188 | CIBW_BUILD: '*p3*' | |
189 | CIBW_BEFORE_BUILD: pip install cython | |
190 | - uses: ncipollo/release-action@v1 | |
191 | name: Upload wheels | |
192 | if: ${{ github.ref_type == 'tag' }} | |
193 | with: | |
194 | artifacts: "wheelhouse/*" | |
195 | allowUpdates: true | |
196 | draft: true | |
197 | tag: Linux | |
198 | token: ${{ secrets.GITHUB_TOKEN }} | |
199 | ||
200 | wheels-macos: | |
201 | name: Wheels for OS X | |
202 | needs: valgrind | |
203 | runs-on: macos-10.15 | |
204 | steps: | |
205 | - uses: actions/checkout@v2 | |
206 | - name: Set up Python | |
207 | uses: actions/setup-python@v2 | |
208 | with: | |
209 | python-version: 3.8 | |
210 | architecture: x64 | |
211 | # - name: Login to Docker Hub | |
212 | # uses: docker/login-action@v1 | |
213 | # with: | |
214 | # username: ${{ secrets.DOCKER_HUB_USERNAME }} | |
215 | # password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }} | |
216 | - name: Install cibuildwheel | |
217 | run: | | |
218 | pip install --upgrade pip setuptools | |
219 | pip install cython cibuildwheel==$VERSION | |
220 | env: | |
221 | VERSION: 2.2.2 | |
222 | - name: Build wheels | |
223 | run: cibuildwheel --output-dir wheelhouse | |
224 | env: | |
225 | CIBW_BUILD: '*p3*' | |
226 | CIBW_BEFORE_BUILD: pip install cython | |
227 | - uses: ncipollo/release-action@v1 | |
228 | name: Upload wheels | |
229 | if: ${{ github.ref_type == 'tag' }} | |
230 | with: | |
231 | artifacts: "wheelhouse/*" | |
232 | allowUpdates: true | |
233 | draft: true | |
234 | tag: OS X | |
235 | token: ${{ secrets.GITHUB_TOKEN }} | |
236 | ||
237 | wheels-windows: | |
238 | name: Wheels for Windows | |
239 | needs: valgrind | |
240 | runs-on: windows-2019 | |
241 | steps: | |
242 | - uses: actions/checkout@v2 | |
243 | - name: Set up Python | |
244 | uses: actions/setup-python@v2 | |
245 | with: | |
246 | python-version: 3.8 | |
247 | architecture: x64 | |
248 | # - name: Login to Docker Hub | |
249 | # uses: docker/login-action@v1 | |
250 | # with: | |
251 | # username: ${{ secrets.DOCKER_HUB_USERNAME }} | |
252 | # password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }} | |
253 | - name: Install cibuildwheel | |
254 | run: | | |
255 | pip install cibuildwheel==$env:VERSION | |
256 | env: | |
257 | VERSION: 2.2.2 | |
258 | - name: Build wheels | |
259 | run: cibuildwheel --output-dir wheelhouse | |
260 | env: | |
261 | CIBW_BUILD: '*p3*' | |
262 | CIBW_BEFORE_BUILD: pip install cython | |
263 | - uses: ncipollo/release-action@v1 | |
264 | name: Upload wheels | |
265 | if: ${{ github.ref_type == 'tag' }} | |
266 | with: | |
267 | artifacts: "wheelhouse/*" | |
268 | allowUpdates: true | |
269 | draft: true | |
270 | tag: Windows | |
271 | token: ${{ secrets.GITHUB_TOKEN }} | |
272 | ||
273 | wheels-linux-non-x86: | |
274 | name: Wheels for Linux non-x86 | |
275 | needs: valgrind | |
276 | runs-on: ubuntu-20.04 | |
277 | strategy: | |
278 | matrix: | |
279 | arch: | |
280 | - aarch64 | |
281 | - ppc64le | |
282 | - s390x | |
283 | steps: | |
284 | - uses: actions/checkout@v2.1.0 | |
285 | # - name: Login to Docker Hub | |
286 | # uses: docker/login-action@v1 | |
287 | # with: | |
288 | # username: ${{ secrets.DOCKER_HUB_USERNAME }} | |
289 | # password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }} | |
290 | - name: Set up Python | |
291 | uses: actions/setup-python@v2 | |
292 | with: | |
293 | python-version: 3.8 | |
294 | architecture: x64 | |
295 | - name: Set up QEMU | |
296 | id: qemu | |
297 | uses: docker/setup-qemu-action@v1.2.0 | |
298 | with: | |
299 | image: tonistiigi/binfmt:latest | |
300 | platforms: all | |
301 | - name: Install cibuildwheel | |
302 | run: | | |
303 | pip install --upgrade pip setuptools | |
304 | pip install cibuildwheel==$VERSION | |
305 | env: | |
306 | VERSION: 2.2.2 | |
307 | - name: Build wheels | |
308 | run: cibuildwheel --output-dir wheelhouse | |
309 | env: | |
310 | CIBW_BUILD: '*p3*' | |
311 | CIBW_BEFORE_BUILD: pip install cython | |
312 | CIBW_ARCHS: ${{ matrix.arch }} | |
313 | - uses: ncipollo/release-action@v1 | |
314 | name: Upload wheels | |
315 | if: ${{ github.ref_type == 'tag' }} | |
316 | with: | |
317 | artifacts: "wheelhouse/*" | |
318 | allowUpdates: true | |
319 | draft: true | |
320 | tag: Linux non-x86 | |
321 | token: ${{ secrets.GITHUB_TOKEN }} |
0 | env: | |
1 | - VERSION=21.9.3.30 | |
2 | - VERSION=21.4.6.55 | |
3 | - VERSION=21.3.10.1 | |
4 | - VERSION=21.2.10.48 | |
5 | - VERSION=21.1.9.41 | |
6 | - VERSION=20.11.2.1 | |
7 | - VERSION=20.10.2.20 | |
8 | - VERSION=20.9.3.45 | |
9 | - VERSION=20.8.4.11 | |
10 | - VERSION=20.7.4.11 | |
11 | - VERSION=20.6.8.5 | |
12 | - VERSION=20.5.5.74 | |
13 | - VERSION=20.4.9.110 | |
14 | - VERSION=20.3.20.6 | |
15 | - VERSION=20.3.20.6 USE_NUMPY=1 | |
16 | - VERSION=19.16.17.80 | |
17 | - VERSION=19.15.3.6 | |
18 | - VERSION=19.9.2.4 # allow_suspicious_low_cardinality_types | |
19 | - VERSION=19.8.3.8 # SimpleAggregateFunction | |
20 | - VERSION=19.3.3 | |
21 | - VERSION=18.12.17 | |
22 | # - VERSION=18.10.3 | |
23 | # - VERSION=18.6.0 | |
24 | # - VERSION=18.5.1 | |
25 | # - VERSION=18.4.0 | |
26 | # - VERSION=18.1.0 | |
27 | # - VERSION=1.1.54394 | |
28 | # - VERSION=1.1.54390 | |
29 | # - VERSION=1.1.54388 | |
30 | # - VERSION=1.1.54385 | |
31 | # - VERSION=1.1.54383 | |
32 | # - VERSION=1.1.54381 | |
33 | # - VERSION=1.1.54380 | |
34 | # - VERSION=1.1.54378 client's image miss tzdata package: https://github.com/yandex/ClickHouse/commit/1bf49fe8446c7dea95beaef2b131e6c6708b0b62#diff-cc737435a5ba74620a889b7718f39a80 | |
35 | # - VERSION=1.1.54343 | |
36 | # - VERSION=1.1.54342 | |
37 | ## - VERSION=1.1.54337 Broken network | |
38 | # - VERSION=1.1.54327 | |
39 | # - VERSION=1.1.54310 | |
40 | # - VERSION=1.1.54304 | |
41 | # - VERSION=1.1.54292 | |
42 | # - VERSION=1.1.54289 | |
43 | # - VERSION=1.1.54284 | |
44 | # - VERSION=1.1.54282 | |
45 | ||
46 | language: python | |
47 | python: | |
48 | - "3.4" | |
49 | - "3.5" | |
50 | - "3.6" | |
51 | - "3.7" | |
52 | - "3.8" | |
53 | - "3.9" | |
54 | - "pypy3.5" | |
55 | cache: pip | |
56 | services: | |
57 | - docker | |
58 | install: | |
59 | - pip install --upgrade pip setuptools | |
60 | # Check flake8 first | |
61 | - pip install flake8 flake8-print | |
62 | - flake8 | |
63 | - if [ ! -z $DOCKER_PASSWORD ] ; then echo "$DOCKER_PASSWORD" | docker login -u "$DOCKER_USERNAME" --password-stdin ; fi | |
64 | - docker run -e "TZ=Europe/Moscow" -d -p 127.0.0.1:9000:9000 --name test-clickhouse-server --ulimit nofile=262144:262144 yandex/clickhouse-server:$VERSION | |
65 | - docker run -d --entrypoint "/bin/sh" --name test-clickhouse-client --link test-clickhouse-server:clickhouse-server yandex/clickhouse-client:$VERSION -c 'while :; do sleep 1; done' | |
66 | - docker ps -a | |
67 | # Faking clickhouse-client real communication with container via docker exec. | |
68 | - echo -e '#!/bin/bash\n\ndocker exec -e "`env | grep ^TZ=`" test-clickhouse-client clickhouse-client "$@"' | sudo tee /usr/local/bin/clickhouse-client > /dev/null | |
69 | - sudo chmod +x /usr/local/bin/clickhouse-client | |
70 | # Overriding setup.cfg. Set host=clickhouse-server | |
71 | - sed -i 's/^host=localhost$/host=clickhouse-server/' setup.cfg | |
72 | # Make host think that clickhouse-server is localhost | |
73 | - echo '127.0.0.1 clickhouse-server' | sudo tee /etc/hosts > /dev/null | |
74 | - pip install coveralls cython 'cryptography<3.3' | |
75 | - if [ -z ${USE_NUMPY+x} ]; then pip uninstall -y numpy pandas; fi | |
76 | script: | |
77 | # Enable cython tracing only for cpython | |
78 | - if [ "$TRAVIS_PYTHON_VERSION" != "pypy3.5" ]; then CYTHON_TRACE=1 python setup.py build_ext --define CYTHON_TRACE ; fi | |
79 | - coverage run setup.py test | |
80 | after_success: | |
81 | coveralls | |
82 | ||
83 | jobs: | |
84 | # Exclude numpy unsupported versions, | |
85 | exclude: | |
86 | - python: 3.4 | |
87 | env: VERSION=20.3.20.6 USE_NUMPY=1 | |
88 | - python: 3.9-dev | |
89 | env: VERSION=20.3.20.6 USE_NUMPY=1 | |
90 | - python: pypy3.5 | |
91 | env: VERSION=20.3.20.6 USE_NUMPY=1 | |
92 | ||
93 | include: | |
94 | - stage: valgrind | |
95 | name: Valgrind check | |
96 | os: linux | |
97 | language: python | |
98 | python: | |
99 | - "3.6" | |
100 | addons: | |
101 | apt: | |
102 | packages: | |
103 | - valgrind | |
104 | install: | |
105 | - if [ ! -z $DOCKER_PASSWORD ] ; then echo "$DOCKER_PASSWORD" | docker login -u "$DOCKER_USERNAME" --password-stdin ; fi | |
106 | - docker run -e "TZ=Europe/Moscow" -d -p 127.0.0.1:9000:9000 --name test-clickhouse-server --ulimit nofile=262144:262144 yandex/clickhouse-server:$VERSION | |
107 | - docker run -d --entrypoint "/bin/sh" --name test-clickhouse-client --link test-clickhouse-server:clickhouse-server yandex/clickhouse-client:$VERSION -c 'while :; do sleep 1; done' | |
108 | - docker ps -a | |
109 | # Faking clickhouse-client real communication with container via docker exec. | |
110 | - echo -e '#!/bin/bash\n\ndocker exec -e "`env | grep ^TZ=`" test-clickhouse-client clickhouse-client "$@"' | sudo tee /usr/local/bin/clickhouse-client > /dev/null | |
111 | - sudo chmod +x /usr/local/bin/clickhouse-client | |
112 | # Overriding setup.cfg. Set host=clickhouse-server | |
113 | - sed -i 's/^host=localhost$/host=clickhouse-server/' setup.cfg | |
114 | # Make host think that clickhouse-server is localhost | |
115 | - echo '127.0.0.1 clickhouse-server' | sudo tee /etc/hosts > /dev/null | |
116 | - pip install --upgrade pip setuptools | |
117 | - pip install cython | |
118 | ||
119 | script: | |
120 | - valgrind --error-exitcode=1 python setup.py test | |
121 | ||
122 | env: | |
123 | - VERSION=20.3.7.46 | |
124 | - USE_NUMPY=1 | |
125 | - PYTHONMALLOC=malloc | |
126 | ||
127 | - stage: wheels | |
128 | name: Wheels for Linux | |
129 | os: linux | |
130 | language: python | |
131 | python: | |
132 | - "3.6" | |
133 | ||
134 | install: | |
135 | - pip install --upgrade pip setuptools | |
136 | - pip install cython cibuildwheel==1.11.0 | |
137 | ||
138 | script: | |
139 | - cibuildwheel --output-dir wheelhouse | |
140 | ||
141 | env: | |
142 | - CIBW_BUILD='*p3*' | |
143 | - CIBW_BEFORE_BUILD='pip install cython' | |
144 | deploy: | |
145 | name: Linux | |
146 | provider: releases | |
147 | api_key: $GITHUB_TOKEN | |
148 | file_glob: true | |
149 | file: wheelhouse/* | |
150 | skip_cleanup: true | |
151 | draft: true | |
152 | prerelease: true | |
153 | overwrite: true | |
154 | on: | |
155 | tags: true | |
156 | ||
157 | - stage: wheels | |
158 | name: Wheels for Linux aarch64 | |
159 | arch: arm64 | |
160 | os: linux | |
161 | language: python | |
162 | python: | |
163 | - "3.6" | |
164 | install: | |
165 | - pip install --upgrade pip setuptools | |
166 | - pip install cython cibuildwheel==1.11.0 | |
167 | ||
168 | script: | |
169 | - cibuildwheel --output-dir wheelhouse | |
170 | ||
171 | env: | |
172 | - CIBW_BUILD='*p3*' | |
173 | - CIBW_BEFORE_BUILD='pip install cython' | |
174 | deploy: | |
175 | name: Linux aarch64 | |
176 | provider: releases | |
177 | api_key: $GITHUB_TOKEN | |
178 | file_glob: true | |
179 | file: wheelhouse/* | |
180 | skip_cleanup: true | |
181 | draft: true | |
182 | prerelease: true | |
183 | overwrite: true | |
184 | on: | |
185 | tags: true | |
186 | ||
187 | - stage: wheels | |
188 | name: Wheels for OS X | |
189 | os: osx | |
190 | language: generic | |
191 | ||
192 | install: | |
193 | - pip3 install --upgrade pip setuptools | |
194 | - pip3 install cython cibuildwheel==1.11.0 | |
195 | ||
196 | script: | |
197 | - cibuildwheel --output-dir wheelhouse | |
198 | ||
199 | env: | |
200 | - CIBW_BUILD='*p3*' | |
201 | - CIBW_BEFORE_BUILD='pip install cython' | |
202 | deploy: | |
203 | name: Mac OS X | |
204 | provider: releases | |
205 | api_key: $GITHUB_TOKEN | |
206 | file_glob: true | |
207 | file: wheelhouse/* | |
208 | skip_cleanup: true | |
209 | draft: true | |
210 | prerelease: true | |
211 | overwrite: true | |
212 | on: | |
213 | tags: true | |
214 | ||
215 | - stage: wheels | |
216 | name: Wheels for Windows | |
217 | os: windows | |
218 | language: shell | |
219 | ||
220 | install: | |
221 | - choco install python3 --version 3.9.0 --params "/InstallDir:C:\Python39" | |
222 | script: | |
223 | - C:/Python39/python -m pip install cibuildwheel==1.11.0 | |
224 | - C:/Python39/python -m cibuildwheel --platform windows --output-dir wheelhouse | |
225 | ||
226 | env: | |
227 | - CIBW_BUILD='cp*' | |
228 | - CIBW_BEFORE_BUILD='python -m pip install cython' | |
229 | deploy: | |
230 | name: Windows Python | |
231 | provider: releases | |
232 | api_key: $GITHUB_TOKEN | |
233 | file_glob: true | |
234 | file: wheelhouse/* | |
235 | skip_cleanup: true | |
236 | draft: true | |
237 | prerelease: true | |
238 | overwrite: true | |
239 | on: | |
240 | tags: true | |
241 | ||
242 | - stage: wheels | |
243 | name: Wheels for Windows PyPy | |
244 | os: windows | |
245 | language: shell | |
246 | ||
247 | install: | |
248 | - choco install python3 --version 3.9.0 --params "/InstallDir:C:\Python39" | |
249 | script: | |
250 | - C:/Python39/python -m pip install cibuildwheel==1.11.0 | |
251 | - C:/Python39/python -m cibuildwheel --platform windows --output-dir wheelhouse | |
252 | ||
253 | env: | |
254 | - CIBW_BUILD='pp*' | |
255 | - CIBW_BEFORE_BUILD='python -m pip install cython' | |
256 | deploy: | |
257 | name: Windows Python PyPy | |
258 | provider: releases | |
259 | api_key: $GITHUB_TOKEN | |
260 | file_glob: true | |
261 | file: wheelhouse/* | |
262 | skip_cleanup: true | |
263 | draft: true | |
264 | prerelease: true | |
265 | overwrite: true | |
266 | on: | |
267 | tags: true |
0 | 0 | # Changelog |
1 | 1 | |
2 | 2 | ## Unreleased |
3 | ||
4 | ## [0.2.2] - 2021-10-24 | |
3 | ### Added | |
4 | - `tzlocal`>=4.0 support. Pull request [#263](https://github.com/mymarilyn/clickhouse-driver/pull/263) by [azat](https://github.com/azat). | |
5 | - `quota_key` support. | |
6 | - Wheels for Python 3.10. | |
7 | - Bool type. Pull request [#279](https://github.com/mymarilyn/clickhouse-driver/pull/279) by [adrian17](https://github.com/adrian17). | |
8 | - Nested type with `flatten_nested=0`. Pull request [#285](https://github.com/mymarilyn/clickhouse-driver/pull/285) by [spff](https://github.com/spff). | |
9 | ||
10 | ### Fixed | |
11 | - Handle partially consumed query. Solves issue [#117](https://github.com/mymarilyn/clickhouse-driver/issues/117). | |
12 | - Fallback to generic columns when NumPy support is not implemented for column type. Solves issue [#254](https://github.com/mymarilyn/clickhouse-driver/issues/254). | |
13 | - Broken ZSTD decompression. Solves issue [#269](https://github.com/mymarilyn/clickhouse-driver/issues/269). | |
14 | - External tables passing with NumPy. Solves issue [#267](https://github.com/mymarilyn/clickhouse-driver/issues/267). | |
15 | - Consider tzinfo for datetime parameters substitution. Solves issue [#268](https://github.com/mymarilyn/clickhouse-driver/issues/268). | |
16 | - Do not use NumPy columns inside generic columns. Solves issue [#272](https://github.com/mymarilyn/clickhouse-driver/issues/272). | |
17 | - Decimal128 and Decimal256 types_check. Solves issue [#274](https://github.com/mymarilyn/clickhouse-driver/issues/274). | |
18 | - Insertion using `execute` in DB API. Solves issue [#179](https://github.com/mymarilyn/clickhouse-driver/issues/179). Pull request [#276](https://github.com/mymarilyn/clickhouse-driver/pull/276) by [nnseva](https://github.com/nnseva). | |
19 | - Variables cannot be declared with `cpdef` in Cython 3. Pull request [#281](https://github.com/mymarilyn/clickhouse-driver/pull/281) by [ym](https://github.com/ym). | |
20 | ||
21 | ### Changed | |
22 | - Switch from nose test runner to pytest. | |
23 | - Migrate from Travis CI to GitHub Actions. | |
24 | ||
25 | ## [0.2.2] - 2021-09-24 | |
5 | 26 | ### Added |
6 | 27 | - DateTime64 extended range. Pull request [#222](https://github.com/mymarilyn/clickhouse-driver/pull/222) by [0x4ec7](https://github.com/0x4ec7). |
7 | 28 | - Support for using `Client` as context manager closing connection on exit. Solves issue [#237](https://github.com/mymarilyn/clickhouse-driver/issues/237). Pull request [#206](https://github.com/mymarilyn/clickhouse-driver/pull/238) by [wlhjason](https://github.com/wlhjason). |
358 | 379 | - Date/DateTime types. |
359 | 380 | - String types. |
360 | 381 | |
361 | [Unreleased]: https://github.com/mymarilyn/clickhouse-driver/compare/0.2.2...HEAD | |
382 | [Unreleased]: https://github.com/mymarilyn/clickhouse-driver/compare/0.2.3...HEAD | |
383 | [0.2.3]: https://github.com/mymarilyn/clickhouse-driver/compare/0.2.2...0.2.3 | |
362 | 384 | [0.2.2]: https://github.com/mymarilyn/clickhouse-driver/compare/0.2.1...0.2.2 |
363 | 385 | [0.2.1]: https://github.com/mymarilyn/clickhouse-driver/compare/0.2.0...0.2.1 |
364 | 386 | [0.2.0]: https://github.com/mymarilyn/clickhouse-driver/compare/0.1.5...0.2.0 |
15 | 15 | .. image:: https://img.shields.io/pypi/dm/clickhouse-driver.svg |
16 | 16 | :target: https://pypi.org/project/clickhouse-driver |
17 | 17 | |
18 | .. image:: https://travis-ci.org/mymarilyn/clickhouse-driver.svg?branch=master | |
19 | :target: https://travis-ci.org/mymarilyn/clickhouse-driver | |
18 | .. image:: https://github.com/mymarilyn/clickhouse-driver/actions/workflows/actions.yml/badge.svg | |
19 | :target: https://github.com/mymarilyn/clickhouse-driver/actions/workflows/actions.yml | |
20 | 20 | |
21 | 21 | ClickHouse Python Driver with native (TCP) interface support. |
22 | 22 | |
42 | 42 | * Enum8/16 |
43 | 43 | * Array(T) |
44 | 44 | * Nullable(T) |
45 | * Bool | |
45 | 46 | * UUID |
46 | 47 | * Decimal |
47 | 48 | * IPv4/IPv6 |
2 | 2 | from .dbapi import connect |
3 | 3 | |
4 | 4 | |
5 | VERSION = (0, 2, 2) | |
5 | VERSION = (0, 2, 3) | |
6 | 6 | __version__ = '.'.join(str(x) for x in VERSION) |
7 | 7 | |
8 | 8 | __all__ = ['Client', 'connect'] |
0 | 0 | from .reader import read_varint, read_binary_uint8, read_binary_int32 |
1 | 1 | from .varint import write_varint |
2 | 2 | from .writer import write_binary_uint8, write_binary_int32 |
3 | from .columns import nestedcolumn | |
3 | 4 | |
4 | 5 | |
5 | 6 | class BlockInfo(object): |
150 | 151 | return [row[index] for row in self.data] |
151 | 152 | |
152 | 153 | def _mutate_dicts_to_rows(self, data): |
153 | column_names = [x[0] for x in self.columns_with_types] | |
154 | ||
155 | 154 | check_row_type = False |
156 | 155 | if self.types_check: |
157 | 156 | check_row_type = self._check_dict_row_type |
158 | 157 | |
158 | return self._pure_mutate_dicts_to_rows( | |
159 | data, | |
160 | self.columns_with_types, | |
161 | check_row_type, | |
162 | ) | |
163 | ||
164 | def _pure_mutate_dicts_to_rows( | |
165 | self, | |
166 | data, | |
167 | columns_with_types, | |
168 | check_row_type, | |
169 | ): | |
170 | columns_with_cwt = [] | |
171 | for name, type_ in columns_with_types: | |
172 | cwt = None | |
173 | if type_.startswith('Nested'): | |
174 | cwt = nestedcolumn.get_columns_with_types(type_) | |
175 | columns_with_cwt.append((name, cwt)) | |
176 | ||
159 | 177 | for i, row in enumerate(data): |
160 | 178 | if check_row_type: |
161 | 179 | check_row_type(row) |
162 | 180 | |
163 | data[i] = [row[name] for name in column_names] | |
181 | new_data = [] | |
182 | for name, cwt in columns_with_cwt: | |
183 | if cwt is None: | |
184 | new_data.append(row[name]) | |
185 | else: | |
186 | new_data.append(self._pure_mutate_dicts_to_rows( | |
187 | row[name], cwt, check_row_type | |
188 | )) | |
189 | data[i] = new_data | |
190 | # return for recursion | |
191 | return data | |
164 | 192 | |
165 | 193 | def _check_rows(self, data): |
166 | 194 | expected_row_len = len(self.columns_with_types) |
45 | 45 | * ``opentelemetry_tracestate`` -- OpenTelemetry tracestate header as |
46 | 46 | described by W3C Trace Context recommendation. |
47 | 47 | New in version *0.2.2*. |
48 | * ``quota_key`` -- A string to differentiate quotas when the user have | |
49 | keyed quotas configured on server. | |
50 | New in version *0.2.3*. | |
48 | 51 | """ |
49 | 52 | |
50 | 53 | available_client_settings = ( |
53 | 56 | 'strings_encoding', |
54 | 57 | 'use_numpy', |
55 | 58 | 'opentelemetry_traceparent', |
56 | 'opentelemetry_tracestate' | |
59 | 'opentelemetry_tracestate', | |
60 | 'quota_key' | |
57 | 61 | ) |
58 | 62 | |
59 | 63 | def __init__(self, *args, **kwargs): |
60 | self.settings = kwargs.pop('settings', {}).copy() | |
64 | self.settings = (kwargs.pop('settings', None) or {}).copy() | |
61 | 65 | |
62 | 66 | self.client_settings = { |
63 | 67 | 'insert_block_size': int(self.settings.pop( |
77 | 81 | ), |
78 | 82 | 'opentelemetry_tracestate': self.settings.pop( |
79 | 83 | 'opentelemetry_tracestate', '' |
84 | ), | |
85 | 'quota_key': self.settings.pop( | |
86 | 'quota_key', '' | |
80 | 87 | ) |
81 | 88 | } |
82 | 89 | |
212 | 219 | self.connection.database = query[4:].strip() |
213 | 220 | |
214 | 221 | @contextmanager |
215 | def disconnect_on_error(self, query): | |
222 | def disconnect_on_error(self, query, settings): | |
223 | self.make_query_settings(settings) | |
224 | ||
216 | 225 | try: |
226 | self.connection.force_connect() | |
227 | self.last_query = QueryInfo() | |
228 | ||
217 | 229 | yield |
230 | ||
218 | 231 | self.track_current_database(query) |
219 | 232 | |
220 | 233 | except (Exception, KeyboardInterrupt): |
266 | 279 | """ |
267 | 280 | |
268 | 281 | start_time = time() |
269 | self.make_query_settings(settings) | |
270 | self.connection.force_connect() | |
271 | self.last_query = QueryInfo() | |
272 | ||
273 | with self.disconnect_on_error(query): | |
282 | ||
283 | with self.disconnect_on_error(query, settings): | |
274 | 284 | # INSERT queries can use list/tuple/generator of list/tuples/dicts. |
275 | 285 | # For SELECT parameters can be passed in only in dict right now. |
276 | 286 | is_insert = isinstance(params, (list, tuple, types.GeneratorType)) |
321 | 331 | :return: :ref:`progress-query-result` proxy. |
322 | 332 | """ |
323 | 333 | |
324 | self.make_query_settings(settings) | |
325 | self.connection.force_connect() | |
326 | self.last_query = QueryInfo() | |
327 | ||
328 | with self.disconnect_on_error(query): | |
334 | with self.disconnect_on_error(query, settings): | |
329 | 335 | return self.process_ordinary_query_with_progress( |
330 | 336 | query, params=params, with_column_types=with_column_types, |
331 | 337 | external_tables=external_tables, query_id=query_id, |
360 | 366 | :return: :ref:`iter-query-result` proxy. |
361 | 367 | """ |
362 | 368 | |
363 | self.make_query_settings(settings) | |
364 | self.connection.force_connect() | |
365 | self.last_query = QueryInfo() | |
366 | ||
367 | with self.disconnect_on_error(query): | |
369 | with self.disconnect_on_error(query, settings): | |
368 | 370 | return self.iter_process_ordinary_query( |
369 | 371 | query, params=params, with_column_types=with_column_types, |
370 | 372 | external_tables=external_tables, |
431 | 433 | raise RuntimeError('Extras for NumPy must be installed') |
432 | 434 | |
433 | 435 | start_time = time() |
434 | self.make_query_settings(settings) | |
435 | self.connection.force_connect() | |
436 | self.last_query = QueryInfo() | |
437 | ||
438 | with self.disconnect_on_error(query): | |
436 | ||
437 | with self.disconnect_on_error(query, settings): | |
439 | 438 | self.connection.send_query(query, query_id=query_id) |
440 | 439 | self.connection.send_external_tables(external_tables) |
441 | 440 | |
456 | 455 | types_check=False, columnar=False): |
457 | 456 | |
458 | 457 | if params is not None: |
459 | query = self.substitute_params(query, params) | |
458 | query = self.substitute_params( | |
459 | query, params, self.connection.context | |
460 | ) | |
460 | 461 | |
461 | 462 | self.connection.send_query(query, query_id=query_id) |
462 | 463 | self.connection.send_external_tables(external_tables, |
470 | 471 | types_check=False, columnar=False): |
471 | 472 | |
472 | 473 | if params is not None: |
473 | query = self.substitute_params(query, params) | |
474 | query = self.substitute_params( | |
475 | query, params, self.connection.context | |
476 | ) | |
474 | 477 | |
475 | 478 | self.connection.send_query(query, query_id=query_id) |
476 | 479 | self.connection.send_external_tables(external_tables, |
484 | 487 | types_check=False): |
485 | 488 | |
486 | 489 | if params is not None: |
487 | query = self.substitute_params(query, params) | |
490 | query = self.substitute_params( | |
491 | query, params, self.connection.context | |
492 | ) | |
488 | 493 | |
489 | 494 | self.connection.send_query(query, query_id=query_id) |
490 | 495 | self.connection.send_external_tables(external_tables, |
588 | 593 | # Client must still read until END_OF_STREAM packet. |
589 | 594 | return self.receive_result(with_column_types=with_column_types) |
590 | 595 | |
591 | def substitute_params(self, query, params): | |
596 | def substitute_params(self, query, params, context): | |
592 | 597 | if not isinstance(params, dict): |
593 | 598 | raise ValueError('Parameters are expected in dict form') |
594 | 599 | |
595 | escaped = escape_params(params) | |
600 | escaped = escape_params(params, context) | |
596 | 601 | return query % escaped |
597 | 602 | |
598 | 603 | @classmethod |
33 | 33 | initial_query_id = '' |
34 | 34 | initial_address = '0.0.0.0:0' |
35 | 35 | |
36 | quota_key = '' | |
37 | ||
38 | 36 | def __init__(self, client_name, context): |
39 | 37 | self.query_kind = ClientInfo.QueryKind.NO_QUERY |
40 | 38 | |
49 | 47 | context.client_settings['opentelemetry_traceparent'], |
50 | 48 | context.client_settings['opentelemetry_tracestate'] |
51 | 49 | ) |
50 | ||
51 | self.quota_key = context.client_settings['quota_key'] | |
52 | 52 | |
53 | 53 | super(ClientInfo, self).__init__() |
54 | 54 |
0 | from .base import FormatColumn | |
1 | ||
2 | ||
3 | class BoolColumn(FormatColumn): | |
4 | ch_type = 'Bool' | |
5 | py_types = (bool, ) | |
6 | format = '?' |
0 | 0 | from datetime import datetime |
1 | 1 | |
2 | 2 | from pytz import timezone as get_timezone, utc |
3 | from tzlocal import get_localzone | |
4 | ||
3 | from ..util.compat import get_localzone_name_compat | |
5 | 4 | from .base import FormatColumn |
6 | 5 | |
7 | 6 | EPOCH = datetime(1970, 1, 1, tzinfo=utc) |
192 | 191 | offset_naive = False |
193 | 192 | else: |
194 | 193 | if not context.settings.get('use_client_time_zone', False): |
195 | try: | |
196 | local_timezone = get_localzone().key | |
197 | except AttributeError: | |
198 | local_timezone = get_localzone().zone | |
199 | except Exception: | |
200 | local_timezone = None | |
201 | ||
194 | local_timezone = get_localzone_name_compat() | |
202 | 195 | if local_timezone != context.server_info.timezone: |
203 | 196 | tz_name = context.server_info.timezone |
204 | 197 |
7 | 7 | class DecimalColumn(FormatColumn): |
8 | 8 | py_types = (Decimal, float, int) |
9 | 9 | max_precision = None |
10 | int_size = None | |
11 | 10 | |
12 | 11 | def __init__(self, precision, scale, types_check=False, **kwargs): |
13 | 12 | self.precision = precision |
15 | 14 | super(DecimalColumn, self).__init__(**kwargs) |
16 | 15 | |
17 | 16 | if types_check: |
18 | max_signed_int = (1 << (8 * self.int_size - 1)) - 1 | |
17 | def check_item(value): | |
18 | parts = str(value).split('.') | |
19 | int_part = parts[0] | |
20 | frac_part = parts[1] if len(parts) > 1 else '' | |
19 | 21 | |
20 | def check_item(value): | |
21 | if value < -max_signed_int or value > max_signed_int: | |
22 | if len(int_part) > precision: | |
23 | raise ColumnTypeMismatchException(value) | |
24 | ||
25 | if len(frac_part) > scale: | |
22 | 26 | raise ColumnTypeMismatchException(value) |
23 | 27 | |
24 | 28 | self.check_item = check_item |
79 | 83 | class Decimal32Column(DecimalColumn): |
80 | 84 | format = 'i' |
81 | 85 | max_precision = 9 |
82 | int_size = 4 | |
83 | 86 | |
84 | 87 | |
85 | 88 | class Decimal64Column(DecimalColumn): |
86 | 89 | format = 'q' |
87 | 90 | max_precision = 18 |
88 | int_size = 8 | |
89 | 91 | |
90 | 92 | |
91 | 93 | class Decimal128Column(DecimalColumn, Int128Column): |
2 | 2 | |
3 | 3 | from .. import writer |
4 | 4 | |
5 | cpdef object MAX_UINT64 = writer.MAX_UINT64 | |
6 | cpdef object MAX_INT64 = writer.MAX_INT64 | |
5 | cdef object MAX_UINT64 = writer.MAX_UINT64 | |
6 | cdef object MAX_INT64 = writer.MAX_INT64 | |
7 | 7 | |
8 | 8 | |
9 | 9 | def int128_from_quads(quad_items, unsigned long long n_items): |
0 | ||
1 | from .arraycolumn import create_array_column | |
2 | ||
3 | ||
4 | def create_nested_column(spec, column_by_spec_getter): | |
5 | return create_array_column( | |
6 | 'Array(Tuple({}))'.format(','.join(get_nested_columns(spec))), | |
7 | column_by_spec_getter=column_by_spec_getter | |
8 | ) | |
9 | ||
10 | ||
11 | def get_nested_columns(spec): | |
12 | brackets = 0 | |
13 | column_begin = 0 | |
14 | ||
15 | inner_spec = get_inner_spec(spec) | |
16 | nested_columns = [] | |
17 | for i, x in enumerate(inner_spec + ','): | |
18 | if x == ',': | |
19 | if brackets == 0: | |
20 | nested_columns.append(inner_spec[column_begin:i]) | |
21 | column_begin = i + 1 | |
22 | elif x == '(': | |
23 | brackets += 1 | |
24 | elif x == ')': | |
25 | brackets -= 1 | |
26 | elif x == ' ': | |
27 | if brackets == 0: | |
28 | column_begin = i + 1 | |
29 | return nested_columns | |
30 | ||
31 | ||
32 | def get_columns_with_types(spec): | |
33 | brackets = 0 | |
34 | prev_comma = 0 | |
35 | prev_space = 0 | |
36 | ||
37 | inner_spec = get_inner_spec(spec) | |
38 | columns_with_types = [] | |
39 | ||
40 | for i, x in enumerate(inner_spec + ','): | |
41 | if x == ',': | |
42 | if brackets == 0: | |
43 | columns_with_types.append(( | |
44 | inner_spec[prev_comma:prev_space].strip(), | |
45 | inner_spec[prev_space:i] | |
46 | )) | |
47 | prev_comma = i + 1 | |
48 | elif x == '(': | |
49 | brackets += 1 | |
50 | elif x == ')': | |
51 | brackets -= 1 | |
52 | elif x == ' ': | |
53 | if brackets == 0: | |
54 | prev_space = i + 1 | |
55 | return columns_with_types | |
56 | ||
57 | ||
58 | def get_inner_spec(spec): | |
59 | brackets = 0 | |
60 | offset = len('Nested') | |
61 | i = offset | |
62 | for i, ch in enumerate(spec[offset:], offset): | |
63 | if ch == '(': | |
64 | brackets += 1 | |
65 | ||
66 | elif ch == ')': | |
67 | brackets -= 1 | |
68 | ||
69 | if brackets == 0: | |
70 | break | |
71 | ||
72 | return spec[offset + 1:i] |
0 | 0 | import numpy as np |
1 | 1 | import pandas as pd |
2 | 2 | from pytz import timezone as get_timezone |
3 | from tzlocal import get_localzone | |
4 | 3 | |
5 | 4 | from .base import NumpyColumn |
5 | from ...util.compat import get_localzone_name_compat | |
6 | 6 | |
7 | 7 | |
8 | 8 | class NumpyDateTimeColumnBase(NumpyColumn): |
121 | 121 | |
122 | 122 | tz_name = timezone = None |
123 | 123 | offset_naive = True |
124 | local_timezone = None | |
125 | 124 | |
126 | 125 | # As Numpy do not use local timezone for converting timestamp to |
127 | 126 | # datetime we need always detect local timezone for manual converting. |
128 | try: | |
129 | local_timezone = get_localzone().key | |
130 | except AttributeError: | |
131 | local_timezone = get_localzone().zone | |
132 | except Exception: | |
133 | pass | |
127 | local_timezone = get_localzone_name_compat() | |
134 | 128 | |
135 | 129 | # Use column's timezone if it's specified. |
136 | 130 | if spec and spec[-1] == ')': |
0 | 0 | from ... import errors |
1 | from ..arraycolumn import create_array_column | |
2 | 1 | from .datecolumn import NumpyDateColumn |
3 | 2 | from .datetimecolumn import create_numpy_datetime_column |
4 | from ..decimalcolumn import create_decimal_column | |
5 | from ..enumcolumn import create_enum_column | |
6 | 3 | from .floatcolumn import NumpyFloat32Column, NumpyFloat64Column |
7 | 4 | from .intcolumn import ( |
8 | 5 | NumpyInt8Column, NumpyInt16Column, NumpyInt32Column, NumpyInt64Column, |
9 | 6 | NumpyUInt8Column, NumpyUInt16Column, NumpyUInt32Column, NumpyUInt64Column |
10 | 7 | ) |
11 | 8 | from .lowcardinalitycolumn import create_numpy_low_cardinality_column |
12 | from ..nothingcolumn import NothingColumn | |
13 | from ..nullcolumn import NullColumn | |
9 | from .stringcolumn import create_string_column | |
14 | 10 | from ..nullablecolumn import create_nullable_column |
15 | from ..simpleaggregatefunctioncolumn import ( | |
16 | create_simple_aggregate_function_column | |
17 | ) | |
18 | from .stringcolumn import create_string_column | |
19 | from ..tuplecolumn import create_tuple_column | |
20 | from ..uuidcolumn import UUIDColumn | |
21 | from ..intervalcolumn import ( | |
22 | IntervalYearColumn, IntervalMonthColumn, IntervalWeekColumn, | |
23 | IntervalDayColumn, IntervalHourColumn, IntervalMinuteColumn, | |
24 | IntervalSecondColumn | |
25 | ) | |
26 | from ..ipcolumn import IPv4Column, IPv6Column | |
27 | 11 | |
28 | 12 | column_by_type = {c.ch_type: c for c in [ |
29 | 13 | NumpyDateColumn, |
30 | 14 | NumpyFloat32Column, NumpyFloat64Column, |
31 | 15 | NumpyInt8Column, NumpyInt16Column, NumpyInt32Column, NumpyInt64Column, |
32 | NumpyUInt8Column, NumpyUInt16Column, NumpyUInt32Column, NumpyUInt64Column, | |
33 | NothingColumn, NullColumn, UUIDColumn, | |
34 | IntervalYearColumn, IntervalMonthColumn, IntervalWeekColumn, | |
35 | IntervalDayColumn, IntervalHourColumn, IntervalMinuteColumn, | |
36 | IntervalSecondColumn, IPv4Column, IPv6Column | |
16 | NumpyUInt8Column, NumpyUInt16Column, NumpyUInt32Column, NumpyUInt64Column | |
37 | 17 | ]} |
38 | 18 | |
39 | 19 | |
44 | 24 | if spec == 'String' or spec.startswith('FixedString'): |
45 | 25 | return create_string_column(spec, column_options) |
46 | 26 | |
47 | elif spec.startswith('Enum'): | |
48 | return create_enum_column(spec, column_options) | |
49 | ||
50 | 27 | elif spec.startswith('DateTime'): |
51 | 28 | return create_numpy_datetime_column(spec, column_options) |
52 | ||
53 | elif spec.startswith('Decimal'): | |
54 | return create_decimal_column(spec, column_options) | |
55 | ||
56 | elif spec.startswith('Array'): | |
57 | return create_array_column(spec, create_column_with_options) | |
58 | ||
59 | elif spec.startswith('Tuple'): | |
60 | return create_tuple_column(spec, create_column_with_options) | |
61 | 29 | |
62 | 30 | elif spec.startswith('Nullable'): |
63 | 31 | return create_nullable_column(spec, create_column_with_options) |
65 | 33 | elif spec.startswith('LowCardinality'): |
66 | 34 | return create_numpy_low_cardinality_column(spec, |
67 | 35 | create_column_with_options) |
68 | ||
69 | elif spec.startswith('SimpleAggregateFunction'): | |
70 | return create_simple_aggregate_function_column( | |
71 | spec, create_column_with_options) | |
72 | ||
73 | 36 | else: |
74 | try: | |
37 | if spec in column_by_type: | |
75 | 38 | cls = column_by_type[spec] |
76 | 39 | return cls(**column_options) |
77 | 40 | |
78 | except KeyError as e: | |
79 | raise errors.UnknownTypeError('Unknown type {}'.format(e.args[0])) | |
41 | raise errors.UnknownTypeError('Unknown type {}'.format(spec)) |
0 | import logging | |
1 | ||
0 | 2 | from .. import errors |
1 | 3 | from .arraycolumn import create_array_column |
4 | from .boolcolumn import BoolColumn | |
2 | 5 | from .datecolumn import DateColumn, Date32Column |
3 | 6 | from .datetimecolumn import create_datetime_column |
4 | 7 | from .decimalcolumn import create_decimal_column |
20 | 23 | ) |
21 | 24 | from .stringcolumn import create_string_column |
22 | 25 | from .tuplecolumn import create_tuple_column |
26 | from .nestedcolumn import create_nested_column | |
23 | 27 | from .uuidcolumn import UUIDColumn |
24 | 28 | from .intervalcolumn import ( |
25 | 29 | IntervalYearColumn, IntervalMonthColumn, IntervalWeekColumn, |
37 | 41 | NothingColumn, NullColumn, UUIDColumn, |
38 | 42 | IntervalYearColumn, IntervalMonthColumn, IntervalWeekColumn, |
39 | 43 | IntervalDayColumn, IntervalHourColumn, IntervalMinuteColumn, |
40 | IntervalSecondColumn, IPv4Column, IPv6Column | |
44 | IntervalSecondColumn, IPv4Column, IPv6Column, BoolColumn | |
41 | 45 | ]} |
42 | 46 | |
47 | logger = logging.getLogger(__name__) | |
43 | 48 | |
44 | def get_column_by_spec(spec, column_options): | |
49 | ||
50 | def get_column_by_spec(spec, column_options, use_numpy=None): | |
45 | 51 | context = column_options['context'] |
46 | use_numpy = context.client_settings['use_numpy'] if context else False | |
52 | ||
53 | if use_numpy is None: | |
54 | use_numpy = context.client_settings['use_numpy'] if context else False | |
47 | 55 | |
48 | 56 | if use_numpy: |
49 | 57 | from .numpy.service import get_numpy_column_by_spec |
50 | return get_numpy_column_by_spec(spec, column_options) | |
58 | ||
59 | try: | |
60 | return get_numpy_column_by_spec(spec, column_options) | |
61 | except errors.UnknownTypeError: | |
62 | use_numpy = False | |
63 | logger.warning('NumPy support is not implemented for %s. ' | |
64 | 'Using generic column', spec) | |
51 | 65 | |
52 | 66 | def create_column_with_options(x): |
53 | return get_column_by_spec(x, column_options) | |
67 | return get_column_by_spec(x, column_options, use_numpy=use_numpy) | |
54 | 68 | |
55 | 69 | if spec == 'String' or spec.startswith('FixedString'): |
56 | 70 | return create_string_column(spec, column_options) |
70 | 84 | elif spec.startswith('Tuple'): |
71 | 85 | return create_tuple_column(spec, create_column_with_options) |
72 | 86 | |
87 | elif spec.startswith('Nested'): | |
88 | return create_nested_column(spec, create_column_with_options) | |
89 | ||
73 | 90 | elif spec.startswith('Nullable'): |
74 | 91 | return create_nullable_column(spec, create_column_with_options) |
75 | 92 | |
88 | 105 | cls = column_by_type[spec] |
89 | 106 | return cls(**column_options) |
90 | 107 | |
91 | except KeyError as e: | |
92 | raise errors.UnknownTypeError('Unknown type {}'.format(e.args[0])) | |
108 | except KeyError: | |
109 | raise errors.UnknownTypeError('Unknown type {}'.format(spec)) | |
93 | 110 | |
94 | 111 | |
95 | 112 | def read_column(context, column_spec, n_items, buf): |
48 | 48 | |
49 | 49 | |
50 | 50 | def get_inner_spec(spec): |
51 | brackets = 1 | |
52 | offset = len('Tuple(') | |
51 | brackets = 0 | |
52 | offset = len('Tuple') | |
53 | 53 | i = offset |
54 | 54 | for i, ch in enumerate(spec[offset:], offset): |
55 | if brackets == 0: | |
56 | break | |
57 | ||
58 | 55 | if ch == '(': |
59 | 56 | brackets += 1 |
60 | 57 | |
61 | 58 | elif ch == ')': |
62 | 59 | brackets -= 1 |
63 | 60 | |
64 | return spec[offset:i] | |
61 | if brackets == 0: | |
62 | break | |
63 | ||
64 | return spec[offset + 1:i] |
0 | 0 | from io import BytesIO |
1 | ||
2 | from ..reader import read_binary_uint32 | |
3 | from ..writer import write_binary_uint8, write_binary_uint32 | |
4 | from .. import errors | |
1 | 5 | |
2 | 6 | try: |
3 | 7 | from clickhouse_cityhash.cityhash import CityHash128 |
5 | 9 | raise RuntimeError( |
6 | 10 | 'Package clickhouse-cityhash is required to use compression' |
7 | 11 | ) |
8 | ||
9 | from .. import errors | |
10 | 12 | |
11 | 13 | |
12 | 14 | class BaseCompressor(object): |
30 | 32 | def write(self, p_str): |
31 | 33 | self.data.write(p_str) |
32 | 34 | |
35 | def compress_data(self, data): | |
36 | raise NotImplementedError | |
37 | ||
33 | 38 | def get_compressed_data(self, extra_header_size): |
34 | raise NotImplementedError | |
39 | rv = BytesIO() | |
40 | ||
41 | data = self.get_value() | |
42 | compressed = self.compress_data(data) | |
43 | ||
44 | header_size = extra_header_size + 4 + 4 # sizes | |
45 | ||
46 | write_binary_uint32(header_size + len(compressed), rv) | |
47 | write_binary_uint32(len(data), rv) | |
48 | rv.write(compressed) | |
49 | ||
50 | return rv.getvalue() | |
35 | 51 | |
36 | 52 | |
37 | 53 | class BaseDecompressor(object): |
42 | 58 | self.stream = real_stream |
43 | 59 | super(BaseDecompressor, self).__init__() |
44 | 60 | |
61 | def decompress_data(self, data, uncompressed_size): | |
62 | raise NotImplementedError | |
63 | ||
45 | 64 | def check_hash(self, compressed_data, compressed_hash): |
46 | 65 | if CityHash128(compressed_data) != compressed_hash: |
47 | 66 | raise errors.ChecksumDoesntMatchError() |
48 | 67 | |
49 | 68 | def get_decompressed_data(self, method_byte, compressed_hash, |
50 | 69 | extra_header_size): |
51 | raise NotImplementedError | |
70 | size_with_header = read_binary_uint32(self.stream) | |
71 | compressed_size = size_with_header - extra_header_size - 4 | |
72 | ||
73 | compressed = BytesIO(self.stream.read(compressed_size)) | |
74 | ||
75 | block_check = BytesIO() | |
76 | write_binary_uint8(method_byte, block_check) | |
77 | write_binary_uint32(size_with_header, block_check) | |
78 | block_check.write(compressed.getvalue()) | |
79 | ||
80 | self.check_hash(block_check.getvalue(), compressed_hash) | |
81 | ||
82 | uncompressed_size = read_binary_uint32(compressed) | |
83 | ||
84 | compressed = compressed.read(compressed_size - 4) | |
85 | ||
86 | return self.decompress_data(compressed, uncompressed_size) |
0 | from __future__ import absolute_import | |
1 | from io import BytesIO | |
2 | ||
3 | 0 | from lz4 import block |
4 | 1 | |
5 | 2 | from .base import BaseCompressor, BaseDecompressor |
6 | 3 | from ..protocol import CompressionMethod, CompressionMethodByte |
7 | from ..reader import read_binary_uint32 | |
8 | from ..writer import write_binary_uint32, write_binary_uint8 | |
9 | 4 | |
10 | 5 | |
11 | 6 | class Compressor(BaseCompressor): |
13 | 8 | method_byte = CompressionMethodByte.LZ4 |
14 | 9 | mode = 'default' |
15 | 10 | |
16 | def get_compressed_data(self, extra_header_size): | |
17 | rv = BytesIO() | |
18 | ||
19 | data = self.get_value() | |
20 | compressed = block.compress(data, store_size=False, mode=self.mode) | |
21 | ||
22 | header_size = extra_header_size + 4 + 4 # sizes | |
23 | ||
24 | write_binary_uint32(header_size + len(compressed), rv) | |
25 | write_binary_uint32(len(data), rv) | |
26 | rv.write(compressed) | |
27 | ||
28 | return rv.getvalue() | |
11 | def compress_data(self, data): | |
12 | return block.compress(data, store_size=False, mode=self.mode) | |
29 | 13 | |
30 | 14 | |
31 | 15 | class Decompressor(BaseDecompressor): |
32 | 16 | method = CompressionMethod.LZ4 |
33 | 17 | method_byte = CompressionMethodByte.LZ4 |
34 | 18 | |
35 | def get_decompressed_data(self, method_byte, compressed_hash, | |
36 | extra_header_size): | |
37 | size_with_header = read_binary_uint32(self.stream) | |
38 | compressed_size = size_with_header - extra_header_size - 4 | |
39 | ||
40 | compressed = BytesIO(self.stream.read(compressed_size)) | |
41 | ||
42 | block_check = BytesIO() | |
43 | write_binary_uint8(method_byte, block_check) | |
44 | write_binary_uint32(size_with_header, block_check) | |
45 | block_check.write(compressed.getvalue()) | |
46 | ||
47 | self.check_hash(block_check.getvalue(), compressed_hash) | |
48 | ||
49 | uncompressed_size = read_binary_uint32(compressed) | |
50 | ||
51 | compressed = compressed.read(compressed_size - 4) | |
52 | ||
53 | return block.decompress(compressed, | |
54 | uncompressed_size=uncompressed_size) | |
19 | def decompress_data(self, data, uncompressed_size): | |
20 | return block.decompress(data, uncompressed_size=uncompressed_size) |
0 | from __future__ import absolute_import | |
1 | from io import BytesIO | |
2 | ||
3 | 0 | import zstd |
4 | 1 | |
5 | 2 | from .base import BaseCompressor, BaseDecompressor |
6 | 3 | from ..protocol import CompressionMethod, CompressionMethodByte |
7 | from ..reader import read_binary_uint32 | |
8 | from ..writer import write_binary_uint32, write_binary_uint8 | |
9 | 4 | |
10 | 5 | |
11 | 6 | class Compressor(BaseCompressor): |
12 | 7 | method = CompressionMethod.ZSTD |
13 | 8 | method_byte = CompressionMethodByte.ZSTD |
14 | 9 | |
15 | def get_compressed_data(self, extra_header_size): | |
16 | rv = BytesIO() | |
17 | ||
18 | data = self.get_value() | |
19 | compressed = zstd.compress(data) | |
20 | ||
21 | header_size = extra_header_size + 4 + 4 # sizes | |
22 | ||
23 | write_binary_uint32(header_size + len(compressed), rv) | |
24 | write_binary_uint32(len(data), rv) | |
25 | rv.write(compressed) | |
26 | ||
27 | return rv.getvalue() | |
10 | def compress_data(self, data): | |
11 | return zstd.compress(data) | |
28 | 12 | |
29 | 13 | |
30 | 14 | class Decompressor(BaseDecompressor): |
31 | 15 | method = CompressionMethod.ZSTD |
32 | 16 | method_byte = CompressionMethodByte.ZSTD |
33 | 17 | |
34 | def get_decompressed_data(self, method_byte, compressed_hash, | |
35 | extra_header_size): | |
36 | size_with_header = read_binary_uint32(self.stream) | |
37 | compressed_size = size_with_header - extra_header_size - 4 | |
38 | ||
39 | compressed = BytesIO(self.stream.read(compressed_size)) | |
40 | ||
41 | block_check = BytesIO() | |
42 | write_binary_uint8(method_byte, block_check) | |
43 | write_binary_uint32(size_with_header, block_check) | |
44 | block_check.write(compressed.getvalue()) | |
45 | ||
46 | self.check_hash(block_check.getvalue(), compressed_hash) | |
47 | ||
48 | compressed = compressed.read(compressed_size - 4) | |
49 | ||
50 | return zstd.decompress(compressed) | |
18 | def decompress_data(self, data, uncompressed_size): | |
19 | return zstd.decompress(data) |
22 | 22 | from .readhelpers import read_exception |
23 | 23 | from .settings.writer import write_settings |
24 | 24 | from .streams.native import BlockInputStream, BlockOutputStream |
25 | from .util.compat import threading | |
25 | 26 | from .varint import write_varint, read_varint |
26 | 27 | from .writer import write_binary_str |
27 | 28 | |
202 | 203 | self.block_out = None |
203 | 204 | self.block_in_raw = None # log blocks are always not compressed |
204 | 205 | |
206 | self._lock = threading.Lock() | |
207 | self.is_query_executing = False | |
208 | ||
205 | 209 | super(Connection, self).__init__() |
206 | 210 | |
207 | 211 | def get_description(self): |
208 | 212 | return '{}:{}'.format(self.host, self.port) |
209 | 213 | |
210 | 214 | def force_connect(self): |
215 | self.check_query_execution() | |
216 | ||
211 | 217 | if not self.connected: |
212 | 218 | self.connect() |
213 | 219 | |
354 | 360 | self.block_in_raw = None |
355 | 361 | self.block_out = None |
356 | 362 | |
363 | self.is_query_executing = False | |
364 | ||
357 | 365 | def disconnect(self): |
358 | 366 | """ |
359 | 367 | Closes connection between server and client. |
495 | 503 | log_block(block) |
496 | 504 | |
497 | 505 | elif packet_type == ServerPacketTypes.END_OF_STREAM: |
506 | self.is_query_executing = False | |
498 | 507 | pass |
499 | 508 | |
500 | 509 | elif packet_type == ServerPacketTypes.TABLE_COLUMNS: |
612 | 621 | 'Empty table "{}" structure'.format(table['name']) |
613 | 622 | ) |
614 | 623 | |
615 | block = RowOrientedBlock(table['structure'], table['data'], | |
616 | types_check=types_check) | |
624 | data = table['data'] | |
625 | block_cls = RowOrientedBlock | |
626 | ||
627 | if self.context.client_settings['use_numpy']: | |
628 | from .numpy.block import NumpyColumnOrientedBlock | |
629 | ||
630 | columns = [x[0] for x in table['structure']] | |
631 | data = [data[column].values for column in columns] | |
632 | ||
633 | block_cls = NumpyColumnOrientedBlock | |
634 | ||
635 | block = block_cls(table['structure'], data, | |
636 | types_check=types_check) | |
617 | 637 | self.send_data(block, table_name=table['name']) |
618 | 638 | |
619 | 639 | # Empty block, end of data transfer. |
635 | 655 | 'Unexpected packet from server {} (expected {}, got {})' |
636 | 656 | .format(self.get_description(), expected, packet_type) |
637 | 657 | ) |
658 | ||
659 | def check_query_execution(self): | |
660 | self._lock.acquire(blocking=False) | |
661 | ||
662 | if self.is_query_executing: | |
663 | raise errors.PartiallyConsumedQueryError() | |
664 | ||
665 | self.is_query_executing = True | |
666 | self._lock.release() |
309 | 309 | self._rowcount = response |
310 | 310 | response = None |
311 | 311 | |
312 | if not response: | |
312 | if not response or isinstance(response, int): | |
313 | 313 | self._columns = self._types = self._rows = [] |
314 | if isinstance(response, int): | |
315 | self._rowcount = response | |
314 | 316 | return |
315 | 317 | |
316 | 318 | if self._stream_results: |
443 | 443 | |
444 | 444 | class CannotParseDomainError(Error): |
445 | 445 | code = ErrorCodes.CANNOT_PARSE_DOMAIN_VALUE_FROM_STRING |
446 | ||
447 | ||
448 | class PartiallyConsumedQueryError(Error): | |
449 | code = -1 | |
450 | ||
451 | def __str__(self): | |
452 | return 'Simultaneous queries on single connection detected' |
0 | ||
1 | # Drop this when minimum supported version will be 3.7. | |
2 | try: | |
3 | import threading | |
4 | except ImportError: | |
5 | import dummy_threading as threading # noqa: F401 | |
6 | ||
7 | try: | |
8 | # since tzlocal 4.0+ | |
9 | # this will avoid warning for get_localzone().key | |
10 | from tzlocal import get_localzone_name | |
11 | ||
12 | def get_localzone_name_compat(): | |
13 | try: | |
14 | return get_localzone_name() | |
15 | except Exception: | |
16 | return None | |
17 | except ImportError: | |
18 | from tzlocal import get_localzone | |
19 | ||
20 | def get_localzone_name_compat(): | |
21 | try: | |
22 | return get_localzone().key | |
23 | except AttributeError: | |
24 | return get_localzone().zone | |
25 | except Exception: | |
26 | return None |
0 | 0 | from datetime import date, datetime |
1 | 1 | from enum import Enum |
2 | 2 | from uuid import UUID |
3 | ||
4 | from pytz import timezone | |
3 | 5 | |
4 | 6 | |
5 | 7 | escape_chars_map = { |
16 | 18 | } |
17 | 19 | |
18 | 20 | |
19 | def escape_param(item): | |
21 | def escape_datetime(item, context): | |
22 | server_tz = timezone(context.server_info.timezone) | |
23 | ||
24 | if item.tzinfo is not None: | |
25 | item = item.astimezone(server_tz) | |
26 | ||
27 | return "'%s'" % item.strftime('%Y-%m-%d %H:%M:%S') | |
28 | ||
29 | ||
30 | def escape_param(item, context): | |
20 | 31 | if item is None: |
21 | 32 | return 'NULL' |
22 | 33 | |
23 | 34 | elif isinstance(item, datetime): |
24 | return "'%s'" % item.strftime('%Y-%m-%d %H:%M:%S') | |
35 | return escape_datetime(item, context) | |
25 | 36 | |
26 | 37 | elif isinstance(item, date): |
27 | 38 | return "'%s'" % item.strftime('%Y-%m-%d') |
30 | 41 | return "'%s'" % ''.join(escape_chars_map.get(c, c) for c in item) |
31 | 42 | |
32 | 43 | elif isinstance(item, list): |
33 | return "[%s]" % ', '.join(str(escape_param(x)) for x in item) | |
44 | return "[%s]" % ', '.join(str(escape_param(x, context)) for x in item) | |
34 | 45 | |
35 | 46 | elif isinstance(item, tuple): |
36 | return "(%s)" % ', '.join(str(escape_param(x)) for x in item) | |
47 | return "(%s)" % ', '.join(str(escape_param(x, context)) for x in item) | |
37 | 48 | |
38 | 49 | elif isinstance(item, Enum): |
39 | return escape_param(item.value) | |
50 | return escape_param(item.value, context) | |
40 | 51 | |
41 | 52 | elif isinstance(item, UUID): |
42 | 53 | return "'%s'" % str(item) |
45 | 56 | return item |
46 | 57 | |
47 | 58 | |
48 | def escape_params(params): | |
59 | def escape_params(params, context): | |
49 | 60 | escaped = {} |
50 | 61 | |
51 | 62 | for key, value in params.items(): |
52 | escaped[key] = escape_param(value) | |
63 | escaped[key] = escape_param(value, context) | |
53 | 64 | |
54 | 65 | return escaped |
13 | 13 | |
14 | 14 | Install desired Python version with system package manager/pyenv/another manager. |
15 | 15 | |
16 | Install test requirements and build package: | |
17 | ||
18 | .. code-block:: bash | |
19 | ||
20 | python testsrequire.py && python setup.py develop | |
21 | ||
22 | You should install cython if you want to change ``*.pyx`` files: | |
23 | ||
24 | .. code-block:: bash | |
25 | ||
26 | pip install cython | |
27 | ||
16 | 28 | ClickHouse on host machine |
17 | 29 | ^^^^^^^^^^^^^^^^^^^^^^^^^^ |
18 | 30 | |
23 | 35 | |
24 | 36 | .. code-block:: bash |
25 | 37 | |
26 | python setup.py test | |
38 | py.test -v | |
27 | 39 | |
28 | 40 | ClickHouse in docker |
29 | 41 | ^^^^^^^^^^^^^^^^^^^^ |
32 | 44 | |
33 | 45 | .. code-block:: bash |
34 | 46 | |
35 | docker run --rm -e "TZ=Europe/Moscow" -p 127.0.0.1:9000:9000 --name test-clickhouse-server yandex/clickhouse-server:$VERSION | |
47 | docker run --rm -e "TZ=Europe/Moscow" -p 127.0.0.1:9000:9000 --name test-clickhouse-server yandex/clickhouse-server:$VERSION | |
36 | 48 | |
37 | 49 | Create container with the same version of ``clickhouse-client``: |
38 | 50 | |
39 | 51 | .. code-block:: bash |
40 | 52 | |
41 | docker run --rm --entrypoint "/bin/sh" --name test-clickhouse-client --link test-clickhouse-server:clickhouse-server yandex/clickhouse-client:$VERSION -c 'while :; do sleep 1; done' | |
53 | docker run --rm --entrypoint "/bin/sh" --name test-clickhouse-client --link test-clickhouse-server:clickhouse-server yandex/clickhouse-client:$VERSION -c 'while :; do sleep 1; done' | |
42 | 54 | |
43 | 55 | Create ``clickhouse-client`` script on your host machine: |
44 | 56 | |
63 | 75 | .. code-block:: bash |
64 | 76 | |
65 | 77 | export TZ=UTC |
66 | python setup.py test | |
78 | py.test -v | |
79 | ||
80 | GitHub Actions in forked repository | |
81 | ----------------------------------- | |
82 | ||
83 | Workflows in forked repositories can be used for running tests. | |
84 | ||
85 | Workflows don't run in forked repositories by default. | |
86 | You must enable GitHub Actions in the **Actions** tab of the forked repository. |
20 | 20 | |
21 | 21 | By default there are wheels for Linux, Mac OS X and Windows. |
22 | 22 | |
23 | Packages for Linux and Mac OS X are available for python: 3.4 -- 3.9. | |
23 | Packages for Linux and Mac OS X are available for python: 3.6 -- 3.10. | |
24 | 24 | |
25 | Packages for Windows are available for python: 3.5 -- 3.9. | |
25 | Packages for Windows are available for python: 3.6 -- 3.10. | |
26 | ||
27 | Starting from version *0.2.3* there are wheels for musl-based Linux distributions. | |
26 | 28 | |
27 | 29 | Dependencies |
28 | 30 | ------------ |
56 | 56 | [('2018-10-21', 3)] |
57 | 57 | |
58 | 58 | Percent symbols in inlined constants should be doubled if you mix constants |
59 | with ``%`` symbol and ``%(x)s`` parameters. | |
60 | ||
61 | .. code-block:: python | |
62 | ||
63 | >>> client.execute( | |
64 | ... "SELECT 'test' like '%%es%%', %(x)s", | |
65 | ... {'x': 1} | |
66 | ... ) | |
59 | with ``%`` symbol and ``%(myvar)s`` parameters. | |
60 | ||
61 | .. code-block:: python | |
62 | ||
63 | >>> client.execute( | |
64 | ... "SELECT 'test' like '%%es%%', %(myvar)s", | |
65 | ... {'myvar': 1} | |
66 | ... ) | |
67 | ||
68 | NOTE: formatting queries using Python's f-strings or concatenation can lead to SQL injections. | |
69 | Use ``%(myvar)s`` parameters instead. | |
67 | 70 | |
68 | 71 | Customisation ``SELECT`` output with ``FORMAT`` clause is not supported. |
69 | 72 |
166 | 166 | INSERT types: :data:`~types.NoneType`, ``T``. |
167 | 167 | |
168 | 168 | SELECT type: :data:`~types.NoneType`, ``T``. |
169 | ||
170 | ||
171 | Bool | |
172 | ---- | |
173 | ||
174 | INSERT types: :class:`bool`, | |
175 | ||
176 | SELECT type: :class:`bool`. | |
169 | 177 | |
170 | 178 | |
171 | 179 | UUID |
274 | 282 | SELECT type: :class:`tuple`. |
275 | 283 | |
276 | 284 | |
277 | Nested | |
285 | Nested(flatten_nested=1, default) | |
278 | 286 | ------ |
279 | 287 | |
280 | Nested type is represented by sequence of arrays. In example below actual | |
288 | Nested type is represented by sequence of arrays when flatten_nested=1. In example below actual | |
281 | 289 | columns for are ``col.name`` and ``col.version``. |
282 | 290 | |
283 | 291 | .. code-block:: sql |
326 | 334 | (['a', 'b', 'c'], [100, 200, 300]), |
327 | 335 | ]) |
328 | 336 | |
337 | Nested(flatten_nested=0) | |
338 | ------ | |
339 | ||
340 | Nested type is represented by array of named tuples when flatten_nested=0. | |
341 | ||
342 | .. code-block:: sql | |
343 | ||
344 | :) SET flatten_nested = 0; | |
345 | ||
346 | SET flatten_nested = 0 | |
347 | ||
348 | Ok. | |
349 | ||
350 | 0 rows in set. Elapsed: 0.006 sec. | |
351 | ||
352 | :) CREATE TABLE test_nested (col Nested(name String, version UInt16)) Engine = Memory; | |
353 | ||
354 | CREATE TABLE test_nested | |
355 | ( | |
356 | `col` Nested(name String, version UInt16) | |
357 | ) | |
358 | ENGINE = Memory | |
359 | ||
360 | Ok. | |
361 | ||
362 | 0 rows in set. Elapsed: 0.005 sec. | |
363 | ||
364 | :) DESCRIBE TABLE test_nested FORMAT TSV; | |
365 | ||
366 | DESCRIBE TABLE test_nested | |
367 | FORMAT TSV | |
368 | ||
369 | col Nested(name String, version UInt16) | |
370 | ||
371 | 1 rows in set. Elapsed: 0.004 sec. | |
372 | ||
373 | Inserting data into nested column in ``clickhouse-client``: | |
374 | ||
375 | .. code-block:: sql | |
376 | ||
377 | :) INSERT INTO test_nested VALUES ([('a', 100), ('b', 200), ('c', 300)]); | |
378 | ||
379 | INSERT INTO test_nested VALUES | |
380 | ||
381 | Ok. | |
382 | ||
383 | 1 rows in set. Elapsed: 0.003 sec. | |
384 | ||
385 | Inserting data into nested column with ``clickhouse-driver``: | |
386 | ||
387 | .. code-block:: python | |
388 | ||
389 | client.execute( | |
390 | 'INSERT INTO test_nested VALUES', | |
391 | [([('a', 100), ('b', 200), ('c', 300)]),] | |
392 | ) | |
393 | # or | |
394 | client.execute( | |
395 | 'INSERT INTO test_nested VALUES', | |
396 | [{'col': [{'name': 'a', 'version': 100}, {'name': 'b', 'version': 200}, {'name': 'c', 'version': 300}]}] | |
397 | ) | |
398 | ||
329 | 399 | Map(key, value) |
330 | 400 | ------------------ |
331 | 401 |
11 | 11 | else: |
12 | 12 | USE_CYTHON = True |
13 | 13 | |
14 | USE_NUMPY = bool(os.getenv('USE_NUMPY', False)) | |
15 | 14 | CYTHON_TRACE = bool(os.getenv('CYTHON_TRACE', False)) |
16 | ||
17 | 15 | |
18 | 16 | here = os.path.abspath(os.path.dirname(__file__)) |
19 | 17 | |
63 | 61 | |
64 | 62 | extensions = cythonize(extensions, compiler_directives=compiler_directives) |
65 | 63 | |
66 | tests_require = [ | |
67 | 'nose', | |
68 | 'parameterized', | |
69 | 'freezegun', | |
70 | 'lz4<=3.0.1; implementation_name=="pypy"', | |
71 | 'lz4; implementation_name!="pypy"', | |
72 | 'zstd', | |
73 | 'clickhouse-cityhash>=1.0.2.1' | |
74 | ] | |
75 | ||
76 | if USE_NUMPY: | |
77 | tests_require.extend(['numpy', 'pandas']) | |
78 | ||
79 | 64 | setup( |
80 | 65 | name='clickhouse-driver', |
81 | 66 | version=read_version(), |
109 | 94 | |
110 | 95 | 'Programming Language :: SQL', |
111 | 96 | 'Programming Language :: Python :: 3', |
112 | 'Programming Language :: Python :: 3.4', | |
113 | 97 | 'Programming Language :: Python :: 3.5', |
114 | 98 | 'Programming Language :: Python :: 3.6', |
115 | 99 | 'Programming Language :: Python :: 3.7', |
116 | 100 | 'Programming Language :: Python :: 3.8', |
117 | 101 | 'Programming Language :: Python :: 3.9', |
102 | 'Programming Language :: Python :: 3.10', | |
118 | 103 | 'Programming Language :: Python :: Implementation :: PyPy', |
119 | 104 | |
120 | 105 | 'Topic :: Database', |
134 | 119 | python_requires='>=3.4.*, <4', |
135 | 120 | install_requires=[ |
136 | 121 | 'pytz', |
137 | 'tzlocal' | |
122 | 'tzlocal', | |
123 | 'tzlocal<2.1; python_version=="3.5"' | |
138 | 124 | ], |
139 | 125 | ext_modules=extensions, |
140 | 126 | extras_require={ |
146 | 132 | 'zstd': ['zstd', 'clickhouse-cityhash>=1.0.2.1'], |
147 | 133 | 'numpy': ['numpy>=1.12.0', 'pandas>=0.24.0'] |
148 | 134 | }, |
149 | test_suite='nose.collector', | |
150 | tests_require=tests_require | |
135 | test_suite='pytest' | |
151 | 136 | ) |
0 | from tests.testcase import BaseTestCase | |
1 | from clickhouse_driver import errors | |
2 | ||
3 | ||
4 | class BoolTestCase(BaseTestCase): | |
5 | required_server_version = (21, 12) | |
6 | ||
7 | def test_simple(self): | |
8 | columns = ("a Bool") | |
9 | ||
10 | data = [(1,), (0,), (True,), (False,), (None,), ("False",), ("",)] | |
11 | with self.create_table(columns): | |
12 | self.client.execute('INSERT INTO test (a) VALUES', data) | |
13 | ||
14 | query = 'SELECT * FROM test' | |
15 | inserted = self.emit_cli(query) | |
16 | self.assertEqual( | |
17 | inserted, ( | |
18 | 'true\n' | |
19 | 'false\n' | |
20 | 'true\n' | |
21 | 'false\n' | |
22 | 'false\n' | |
23 | 'true\n' | |
24 | 'false\n' | |
25 | ) | |
26 | ) | |
27 | ||
28 | inserted = self.client.execute(query) | |
29 | self.assertEqual( | |
30 | inserted, [ | |
31 | (True, ), | |
32 | (False, ), | |
33 | (True, ), | |
34 | (False, ), | |
35 | (False, ), | |
36 | (True, ), | |
37 | (False, ), | |
38 | ] | |
39 | ) | |
40 | ||
41 | def test_errors(self): | |
42 | columns = "a Bool" | |
43 | with self.create_table(columns): | |
44 | with self.assertRaises(errors.TypeMismatchError): | |
45 | self.client.execute( | |
46 | 'INSERT INTO test (a) VALUES', [(1, )], | |
47 | types_check=True | |
48 | ) | |
49 | ||
50 | def test_nullable(self): | |
51 | columns = "a Nullable(Bool)" | |
52 | ||
53 | data = [(None, ), (True, ), (False, )] | |
54 | with self.create_table(columns): | |
55 | self.client.execute('INSERT INTO test (a) VALUES', data) | |
56 | ||
57 | query = 'SELECT * FROM test' | |
58 | inserted = self.emit_cli(query) | |
59 | self.assertEqual( | |
60 | inserted, ( | |
61 | '\\N\ntrue\nfalse\n' | |
62 | ) | |
63 | ) | |
64 | ||
65 | inserted = self.client.execute(query) | |
66 | self.assertEqual( | |
67 | inserted, [ | |
68 | (None, ), (True, ), (False, ), | |
69 | ] | |
70 | ) |
0 | from contextlib import contextmanager | |
1 | 0 | from datetime import date, datetime |
2 | import os | |
3 | from time import tzset | |
4 | 1 | from unittest.mock import patch |
5 | 2 | |
6 | 3 | from pytz import timezone, utc, UnknownTimeZoneError |
7 | 4 | import tzlocal |
8 | 5 | |
9 | 6 | from tests.testcase import BaseTestCase |
10 | from tests.util import require_server_version | |
11 | ||
12 | ||
13 | class BaseDateTimeTestCase(BaseTestCase): | |
14 | def setUp(self): | |
15 | super(BaseDateTimeTestCase, self).setUp() | |
16 | ||
17 | # Bust tzlocal cache. | |
18 | try: | |
19 | tzlocal.unix._cache_tz = None | |
20 | except AttributeError: | |
21 | pass | |
22 | ||
23 | try: | |
24 | tzlocal.win32._cache_tz = None | |
25 | except AttributeError: | |
26 | pass | |
27 | ||
28 | ||
29 | class DateTimeTestCase(BaseDateTimeTestCase): | |
7 | from tests.util import require_server_version, patch_env_tz | |
8 | ||
9 | ||
10 | class DateTimeTestCase(BaseTestCase): | |
30 | 11 | def test_simple(self): |
31 | 12 | with self.create_table('a Date, b DateTime'): |
32 | 13 | data = [(date(2012, 10, 25), datetime(2012, 10, 25, 14, 7, 19))] |
81 | 62 | self.assertEqual(inserted, data) |
82 | 63 | |
83 | 64 | def test_handle_errors_from_tzlocal(self): |
84 | with patch('tzlocal.get_localzone') as mocked_get_localzone: | |
85 | mocked_get_localzone.side_effect = UnknownTimeZoneError() | |
65 | with patch('tzlocal.get_localzone') as mocked: | |
66 | mocked.side_effect = UnknownTimeZoneError() | |
86 | 67 | self.client.execute('SELECT now()') |
68 | ||
69 | if hasattr(tzlocal, 'get_localzone_name'): | |
70 | with patch('tzlocal.get_localzone_name') as mocked: | |
71 | mocked.side_effect = None | |
72 | self.client.execute('SELECT now()') | |
87 | 73 | |
88 | 74 | @require_server_version(20, 1, 2) |
89 | 75 | def test_datetime64_frac_trunc(self): |
182 | 168 | ) |
183 | 169 | |
184 | 170 | |
185 | class DateTimeTimezonesTestCase(BaseDateTimeTestCase): | |
171 | class DateTimeTimezonesTestCase(BaseTestCase): | |
186 | 172 | dt_type = 'DateTime' |
187 | ||
188 | @contextmanager | |
189 | def patch_env_tz(self, tz_name): | |
190 | # Although in many cases, changing the TZ environment variable may | |
191 | # affect the output of functions like localtime() without calling | |
192 | # tzset(), this behavior should not be relied on. | |
193 | # https://docs.python.org/3/library/time.html#time.tzset | |
194 | with patch.dict(os.environ, {'TZ': tz_name}): | |
195 | tzset() | |
196 | yield | |
197 | ||
198 | tzset() | |
199 | 173 | |
200 | 174 | # Asia/Kamchatka = UTC+12 |
201 | 175 | # Asia/Novosibirsk = UTC+7 |
228 | 202 | offset = timezone(server_tz_name).utcoffset(self.dt).total_seconds() |
229 | 203 | timestamp = 1500010800 - int(offset) |
230 | 204 | |
231 | with self.patch_env_tz('Asia/Novosibirsk'): | |
205 | with patch_env_tz('Asia/Novosibirsk'): | |
232 | 206 | with self.create_table(self.table_columns()): |
233 | 207 | self.client.execute( |
234 | 208 | 'INSERT INTO test (a) VALUES', [(self.dt, )] |
259 | 233 | |
260 | 234 | settings = {'use_client_time_zone': True} |
261 | 235 | |
262 | with self.patch_env_tz('Asia/Novosibirsk'): | |
236 | with patch_env_tz('Asia/Novosibirsk'): | |
263 | 237 | with self.create_table(self.table_columns()): |
264 | 238 | self.client.execute( |
265 | 239 | 'INSERT INTO test (a) VALUES', [(self.dt, )], |
295 | 269 | server_tz_name = self.client.execute('SELECT timezone()')[0][0] |
296 | 270 | offset = timezone(server_tz_name).utcoffset(self.dt) |
297 | 271 | |
298 | with self.patch_env_tz('Asia/Novosibirsk'): | |
272 | with patch_env_tz('Asia/Novosibirsk'): | |
299 | 273 | with self.create_table(self.table_columns()): |
300 | 274 | self.client.execute( |
301 | 275 | 'INSERT INTO test (a) VALUES', [(self.dt_tz, )] |
328 | 302 | |
329 | 303 | settings = {'use_client_time_zone': True} |
330 | 304 | |
331 | with self.patch_env_tz('Asia/Novosibirsk'): | |
305 | with patch_env_tz('Asia/Novosibirsk'): | |
332 | 306 | with self.create_table(self.table_columns()): |
333 | 307 | self.client.execute( |
334 | 308 | 'INSERT INTO test (a) VALUES', [(self.dt_tz, )], |
365 | 339 | # into column with timezone Asia/Novosibirsk |
366 | 340 | # using server's timezone (Europe/Moscow) |
367 | 341 | |
368 | with self.patch_env_tz('Europe/Moscow'): | |
342 | with patch_env_tz('Europe/Moscow'): | |
369 | 343 | with self.create_table(self.table_columns(with_tz=True)): |
370 | 344 | self.client.execute( |
371 | 345 | 'INSERT INTO test (a) VALUES', [(self.dt, )] |
401 | 375 | |
402 | 376 | settings = {'use_client_time_zone': True} |
403 | 377 | |
404 | with self.patch_env_tz('Europe/Moscow'): | |
378 | with patch_env_tz('Europe/Moscow'): | |
405 | 379 | with self.create_table(self.table_columns(with_tz=True)): |
406 | 380 | self.client.execute( |
407 | 381 | 'INSERT INTO test (a) VALUES', [(self.dt, )], |
436 | 410 | # into column with timezone Asia/Novosibirsk |
437 | 411 | # using server's timezone (Europe/Moscow) |
438 | 412 | |
439 | with self.patch_env_tz('Europe/Moscow'): | |
413 | with patch_env_tz('Europe/Moscow'): | |
440 | 414 | with self.create_table(self.table_columns(with_tz=True)): |
441 | 415 | self.client.execute( |
442 | 416 | 'INSERT INTO test (a) VALUES', [(self.dt_tz, )] |
476 | 450 | |
477 | 451 | settings = {'use_client_time_zone': True} |
478 | 452 | |
479 | with self.patch_env_tz('Europe/Moscow'): | |
453 | with patch_env_tz('Europe/Moscow'): | |
480 | 454 | with self.create_table(self.table_columns(with_tz=True)): |
481 | 455 | self.client.execute( |
482 | 456 | 'INSERT INTO test (a) VALUES', [(self.dt_tz, )], |
139 | 139 | |
140 | 140 | def test_nullable(self): |
141 | 141 | with self.create_table('a Nullable(Decimal32(3))'): |
142 | data = [(300.42, ), (None, ), ] | |
142 | data = [(300.42, ), (None, )] | |
143 | 143 | self.client.execute( |
144 | 144 | 'INSERT INTO test (a) VALUES', data |
145 | 145 | ) |
157 | 157 | |
158 | 158 | def test_no_scale(self): |
159 | 159 | with self.create_table('a Decimal32(0)'): |
160 | data = [(2147483647, ), ] | |
160 | data = [(2147483647, )] | |
161 | 161 | self.client.execute( |
162 | 162 | 'INSERT INTO test (a) VALUES', data |
163 | 163 | ) |
170 | 170 | self.assertEqual(inserted, [(Decimal('2147483647'), )]) |
171 | 171 | |
172 | 172 | def test_type_mismatch(self): |
173 | data = [(2147483649,), ] | |
173 | data = [(2147483649, )] | |
174 | 174 | with self.create_table('a Decimal32(0)'): |
175 | 175 | with self.assertRaises(errors.TypeMismatchError) as e: |
176 | 176 | self.client.execute( |
185 | 185 | ) |
186 | 186 | |
187 | 187 | self.assertIn('Column a', str(e.exception)) |
188 | ||
189 | def test_type_mismatch_scale(self): | |
190 | data = [(1.234,)] | |
191 | with self.create_table('a Decimal32(2)'): | |
192 | with self.assertRaises(errors.TypeMismatchError) as e: | |
193 | self.client.execute( | |
194 | 'INSERT INTO test (a) VALUES', data, types_check=True | |
195 | ) | |
196 | ||
197 | self.assertIn('1.234 for column "a"', str(e.exception)) | |
198 | ||
199 | # Without types_check decimal will be cropped. | |
200 | self.client.execute('INSERT INTO test (a) VALUES', data) | |
201 | query = 'SELECT * FROM test' | |
202 | inserted = self.emit_cli(query) | |
203 | self.assertEqual(inserted, '1.23\n') | |
204 | inserted = self.client.execute(query) | |
205 | self.assertEqual(inserted, [(Decimal('1.23'), )]) | |
188 | 206 | |
189 | 207 | def test_preserve_precision(self): |
190 | 208 | data = [(1.66, ), (1.15, )] |
0 | from tests.testcase import BaseTestCase | |
1 | from tests.util import require_server_version | |
2 | from clickhouse_driver.columns import nestedcolumn | |
3 | ||
4 | ||
5 | class NestedTestCase(BaseTestCase): | |
6 | def entuple(self, lst): | |
7 | return tuple( | |
8 | self.entuple(x) if isinstance(x, list) else x for x in lst | |
9 | ) | |
10 | ||
11 | @require_server_version(21, 3, 13) | |
12 | def test_simple(self): | |
13 | columns = 'n Nested(i Int32, s String)' | |
14 | ||
15 | # INSERT INTO test_nested VALUES ([(0, 'a'), (1, 'b')]); | |
16 | data = [([(0, 'a'), (1, 'b')],)] | |
17 | ||
18 | with self.create_table(columns, flatten_nested=0): | |
19 | self.client.execute( | |
20 | 'INSERT INTO test (n) VALUES', data | |
21 | ) | |
22 | ||
23 | query = 'SELECT * FROM test' | |
24 | inserted = self.emit_cli(query) | |
25 | self.assertEqual(inserted, "[(0,'a'),(1,'b')]\n") | |
26 | ||
27 | inserted = self.client.execute(query) | |
28 | self.assertEqual(inserted, data) | |
29 | ||
30 | projected_i = self.client.execute('SELECT n.i FROM test') | |
31 | self.assertEqual( | |
32 | projected_i, | |
33 | [([0, 1],)] | |
34 | ) | |
35 | ||
36 | projected_s = self.client.execute('SELECT n.s FROM test') | |
37 | self.assertEqual( | |
38 | projected_s, | |
39 | [(['a', 'b'],)] | |
40 | ) | |
41 | ||
42 | @require_server_version(21, 3, 13) | |
43 | def test_multiple_rows(self): | |
44 | columns = 'n Nested(i Int32, s String)' | |
45 | ||
46 | data = [([(0, 'a'), (1, 'b')],), ([(3, 'd'), (4, 'e')],)] | |
47 | ||
48 | with self.create_table(columns, flatten_nested=0): | |
49 | self.client.execute( | |
50 | 'INSERT INTO test (n) VALUES', data | |
51 | ) | |
52 | ||
53 | query = 'SELECT * FROM test' | |
54 | inserted = self.emit_cli(query) | |
55 | self.assertEqual( | |
56 | inserted, | |
57 | "[(0,'a'),(1,'b')]\n[(3,'d'),(4,'e')]\n" | |
58 | ) | |
59 | ||
60 | inserted = self.client.execute(query) | |
61 | self.assertEqual(inserted, data) | |
62 | ||
63 | @require_server_version(21, 3, 13) | |
64 | def test_dict(self): | |
65 | columns = 'n Nested(i Int32, s String)' | |
66 | ||
67 | data = [ | |
68 | {'n': [{'i': 0, 's': 'a'}, {'i': 1, 's': 'b'}]}, | |
69 | {'n': [{'i': 3, 's': 'd'}, {'i': 4, 's': 'e'}]}, | |
70 | ] | |
71 | ||
72 | with self.create_table(columns, flatten_nested=0): | |
73 | self.client.execute( | |
74 | 'INSERT INTO test (n) VALUES', data | |
75 | ) | |
76 | ||
77 | query = 'SELECT * FROM test' | |
78 | inserted = self.emit_cli(query) | |
79 | self.assertEqual( | |
80 | inserted, | |
81 | "[(0,'a'),(1,'b')]\n[(3,'d'),(4,'e')]\n" | |
82 | ) | |
83 | ||
84 | inserted = self.client.execute(query) | |
85 | self.assertEqual( | |
86 | inserted, | |
87 | [([(0, 'a'), (1, 'b')],), ([(3, 'd'), (4, 'e')],)] | |
88 | ) | |
89 | ||
90 | def test_get_nested_columns(self): | |
91 | self.assertEqual( | |
92 | nestedcolumn.get_nested_columns( | |
93 | 'Nested(a Tuple(Array(Int8)),\n b Nullable(String))', | |
94 | ), | |
95 | ['Tuple(Array(Int8))', 'Nullable(String)'] | |
96 | ) | |
97 | ||
98 | def test_get_columns_with_types(self): | |
99 | self.assertEqual( | |
100 | nestedcolumn.get_columns_with_types( | |
101 | 'Nested(a Tuple(Array(Int8)),\n b Nullable(String))', | |
102 | ), | |
103 | [('a', 'Tuple(Array(Int8))'), ('b', 'Nullable(String)')] | |
104 | ) | |
105 | ||
106 | def test_get_inner_spec(self): | |
107 | inner = 'a Tuple(Array(Int8), Array(Int64)), b Nullable(String)' | |
108 | self.assertEqual( | |
109 | nestedcolumn.get_inner_spec('Nested({}) dummy '.format(inner)), | |
110 | inner | |
111 | ) |
0 | import pytest | |
1 | ||
2 | ||
3 | @pytest.fixture(autouse=True) | |
4 | def assert_empty_output(capfd): | |
5 | yield | |
6 | ||
7 | captured = capfd.readouterr() | |
8 | ||
9 | assert captured.out == '' | |
10 | assert captured.err == '' |
0 | version: '3' | |
1 | ||
2 | services: | |
3 | clickhouse-server: | |
4 | image: "yandex/clickhouse-server:$VERSION" | |
5 | container_name: test-clickhouse-server | |
6 | environment: | |
7 | - TZ=Europe/Moscow | |
8 | ports: | |
9 | - "127.0.0.1:9000:9000" | |
10 | ||
11 | clickhouse-client: | |
12 | image: "yandex/clickhouse-client:$VERSION" | |
13 | container_name: test-clickhouse-client | |
14 | entrypoint: /bin/sh | |
15 | command: [-c, 'while :; do sleep 1; done'] |
0 | from contextlib import contextmanager | |
1 | 0 | from datetime import datetime, date |
2 | import os | |
3 | from time import tzset | |
4 | 1 | from unittest.mock import patch |
5 | 2 | |
6 | 3 | try: |
17 | 14 | import tzlocal |
18 | 15 | |
19 | 16 | from tests.numpy.testcase import NumpyBaseTestCase |
20 | from tests.util import require_server_version | |
17 | from tests.util import require_server_version, patch_env_tz | |
21 | 18 | |
22 | 19 | |
23 | 20 | class BaseDateTimeTestCase(NumpyBaseTestCase): |
24 | def setUp(self): | |
25 | super(BaseDateTimeTestCase, self).setUp() | |
26 | ||
27 | # Bust tzlocal cache. | |
28 | try: | |
29 | tzlocal.unix._cache_tz = None | |
30 | except AttributeError: | |
31 | pass | |
32 | ||
33 | try: | |
34 | tzlocal.win32._cache_tz = None | |
35 | except AttributeError: | |
36 | pass | |
37 | ||
38 | 21 | def make_numpy_d64ns(self, items): |
39 | 22 | return np.array(items, dtype='datetime64[ns]') |
40 | 23 | |
120 | 103 | self.assertEqual(inserted[0].dtype, object) |
121 | 104 | |
122 | 105 | def test_handle_errors_from_tzlocal(self): |
123 | with patch('tzlocal.get_localzone') as mocked_get_localzone: | |
124 | mocked_get_localzone.side_effect = UnknownTimeZoneError() | |
106 | with patch('tzlocal.get_localzone') as mocked: | |
107 | mocked.side_effect = UnknownTimeZoneError() | |
125 | 108 | self.client.execute('SELECT now()') |
109 | ||
110 | if hasattr(tzlocal, 'get_localzone_name'): | |
111 | with patch('tzlocal.get_localzone_name') as mocked: | |
112 | mocked.side_effect = None | |
113 | self.client.execute('SELECT now()') | |
126 | 114 | |
127 | 115 | @require_server_version(20, 1, 2) |
128 | 116 | def test_datetime64_frac_trunc(self): |
202 | 190 | |
203 | 191 | return pd.to_datetime(np.array([dt] * 2, dtype=dtype)) \ |
204 | 192 | .tz_localize(tz_name).to_numpy(dtype) |
205 | ||
206 | @contextmanager | |
207 | def patch_env_tz(self, tz_name): | |
208 | # Although in many cases, changing the TZ environment variable may | |
209 | # affect the output of functions like localtime() without calling | |
210 | # tzset(), this behavior should not be relied on. | |
211 | # https://docs.python.org/3/library/time.html#time.tzset | |
212 | with patch.dict(os.environ, {'TZ': tz_name}): | |
213 | tzset() | |
214 | yield | |
215 | ||
216 | tzset() | |
217 | 193 | |
218 | 194 | # Asia/Kamchatka = UTC+12 |
219 | 195 | # Asia/Novosibirsk = UTC+7 |
256 | 232 | offset = timezone(server_tz_name).utcoffset(self.dt).total_seconds() |
257 | 233 | timestamp = 1500010800 - int(offset) |
258 | 234 | |
259 | with self.patch_env_tz('Asia/Novosibirsk'): | |
235 | with patch_env_tz('Asia/Novosibirsk'): | |
260 | 236 | with self.create_table(self.table_columns()): |
261 | 237 | self.client.execute( |
262 | 238 | 'INSERT INTO test (a) VALUES', [self.dt_arr], columnar=True |
289 | 265 | |
290 | 266 | settings = {'use_client_time_zone': True} |
291 | 267 | |
292 | with self.patch_env_tz('Asia/Novosibirsk'): | |
268 | with patch_env_tz('Asia/Novosibirsk'): | |
293 | 269 | with self.create_table(self.table_columns()): |
294 | 270 | self.client.execute( |
295 | 271 | 'INSERT INTO test (a) VALUES', [self.dt_arr], |
328 | 304 | server_tz_name = self.client.execute('SELECT timezone()')[0][0] |
329 | 305 | offset = timezone(server_tz_name).utcoffset(self.dt) |
330 | 306 | |
331 | with self.patch_env_tz('Asia/Novosibirsk'): | |
307 | with patch_env_tz('Asia/Novosibirsk'): | |
332 | 308 | with self.create_table(self.table_columns()): |
333 | 309 | self.client.execute( |
334 | 310 | 'INSERT INTO test (a) VALUES', [self.dt_tz], columnar=True |
364 | 340 | |
365 | 341 | settings = {'use_client_time_zone': True} |
366 | 342 | |
367 | with self.patch_env_tz('Asia/Novosibirsk'): | |
343 | with patch_env_tz('Asia/Novosibirsk'): | |
368 | 344 | with self.create_table(self.table_columns()): |
369 | 345 | self.client.execute( |
370 | 346 | 'INSERT INTO test (a) VALUES', [self.dt_tz], |
404 | 380 | # into column with timezone Asia/Novosibirsk |
405 | 381 | # using server's timezone (Europe/Moscow) |
406 | 382 | |
407 | with self.patch_env_tz('Europe/Moscow'): | |
383 | with patch_env_tz('Europe/Moscow'): | |
408 | 384 | with self.create_table(self.table_columns(with_tz=True)): |
409 | 385 | self.client.execute( |
410 | 386 | 'INSERT INTO test (a) VALUES', [self.dt_arr], columnar=True |
440 | 416 | |
441 | 417 | settings = {'use_client_time_zone': True} |
442 | 418 | |
443 | with self.patch_env_tz('Europe/Moscow'): | |
419 | with patch_env_tz('Europe/Moscow'): | |
444 | 420 | with self.create_table(self.table_columns(with_tz=True)): |
445 | 421 | self.client.execute( |
446 | 422 | 'INSERT INTO test (a) VALUES', [self.dt_arr], |
476 | 452 | # into column with timezone Asia/Novosibirsk |
477 | 453 | # using server's timezone (Europe/Moscow) |
478 | 454 | |
479 | with self.patch_env_tz('Europe/Moscow'): | |
455 | with patch_env_tz('Europe/Moscow'): | |
480 | 456 | with self.create_table(self.table_columns(with_tz=True)): |
481 | 457 | self.client.execute( |
482 | 458 | 'INSERT INTO test (a) VALUES', [self.dt_tz], columnar=True |
515 | 491 | |
516 | 492 | settings = {'use_client_time_zone': True} |
517 | 493 | |
518 | with self.patch_env_tz('Europe/Moscow'): | |
494 | with patch_env_tz('Europe/Moscow'): | |
519 | 495 | with self.create_table(self.table_columns(with_tz=True)): |
520 | 496 | self.client.execute( |
521 | 497 | 'INSERT INTO test (a) VALUES', [self.dt_tz], |
0 | from parameterized import parameterized | |
1 | ||
0 | 2 | from clickhouse_driver import errors |
1 | ||
2 | try: | |
3 | from clickhouse_driver.columns.numpy.service import \ | |
4 | get_numpy_column_by_spec | |
5 | except ImportError: | |
6 | get_numpy_column_by_spec = None | |
7 | ||
3 | from clickhouse_driver.columns.service import get_column_by_spec | |
8 | 4 | from clickhouse_driver.context import Context |
9 | 5 | |
10 | 6 | from tests.numpy.testcase import NumpyBaseTestCase |
13 | 9 | class OtherColumnsTestCase(NumpyBaseTestCase): |
14 | 10 | def get_column(self, spec): |
15 | 11 | ctx = Context() |
16 | ctx.client_settings = {'strings_as_bytes': False} | |
17 | return get_numpy_column_by_spec(spec, {'context': ctx}) | |
12 | ctx.client_settings = {'strings_as_bytes': False, 'use_numpy': True} | |
13 | return get_column_by_spec(spec, {'context': ctx}) | |
18 | 14 | |
19 | def test_enum(self): | |
20 | col = self.get_column("Enum8('hello' = 1, 'world' = 2)") | |
21 | self.assertIsNotNone(col) | |
22 | ||
23 | def test_decimal(self): | |
24 | col = self.get_column('Decimal(8, 4)') | |
25 | self.assertIsNotNone(col) | |
26 | ||
27 | def test_array(self): | |
28 | col = self.get_column('Array(String)') | |
29 | self.assertIsNotNone(col) | |
30 | ||
31 | def test_tuple(self): | |
32 | col = self.get_column('Tuple(String)') | |
33 | self.assertIsNotNone(col) | |
34 | ||
35 | def test_simple_aggregation_function(self): | |
36 | col = self.get_column('SimpleAggregateFunction(any, Int32)') | |
15 | @parameterized.expand([ | |
16 | ("Enum8('hello' = 1, 'world' = 2)", ), | |
17 | ('Decimal(8, 4)', ), | |
18 | ('Array(String)', ), | |
19 | ('Tuple(String)', ), | |
20 | ('SimpleAggregateFunction(any, Int32)', ), | |
21 | ('Map(String, String)', ), | |
22 | ('Array(LowCardinality(String))', ) | |
23 | ]) | |
24 | def test_generic_type(self, spec): | |
25 | col = self.get_column(spec) | |
37 | 26 | self.assertIsNotNone(col) |
38 | 27 | |
39 | 28 | def test_get_unknown_column(self): |
0 | try: | |
1 | import numpy as np | |
2 | import pandas as pd | |
3 | except ImportError: | |
4 | np = None | |
5 | pd = None | |
6 | ||
7 | from tests.numpy.testcase import NumpyBaseTestCase | |
8 | ||
9 | ||
10 | class ExternalTablesTestCase(NumpyBaseTestCase): | |
11 | def test_select(self): | |
12 | tables = [{ | |
13 | 'name': 'test', | |
14 | 'structure': [('x', 'Int32'), ('y', 'String')], | |
15 | 'data': pd.DataFrame({ | |
16 | 'x': [100, 500], | |
17 | 'y': ['abc', 'def'] | |
18 | }) | |
19 | }] | |
20 | rv = self.client.execute( | |
21 | 'SELECT * FROM test', external_tables=tables, columnar=True | |
22 | ) | |
23 | self.assertArraysListEqual( | |
24 | rv, [np.array([100, 500]), np.array(['abc', 'def'])] | |
25 | ) | |
26 | ||
27 | def test_send_empty_table(self): | |
28 | tables = [{ | |
29 | 'name': 'test', | |
30 | 'structure': [('x', 'Int32')], | |
31 | 'data': pd.DataFrame({'x': []}) | |
32 | }] | |
33 | rv = self.client.execute( | |
34 | 'SELECT * FROM test', external_tables=tables, columnar=True | |
35 | ) | |
36 | self.assertArraysListEqual(rv, []) | |
37 | ||
38 | def test_send_empty_table_structure(self): | |
39 | tables = [{ | |
40 | 'name': 'test', | |
41 | 'structure': [], | |
42 | 'data': pd.DataFrame() | |
43 | }] | |
44 | with self.assertRaises(ValueError) as e: | |
45 | self.client.execute( | |
46 | 'SELECT * FROM test', external_tables=tables, columnar=True | |
47 | ) | |
48 | ||
49 | self.assertIn('Empty table "test" structure', str(e.exception)) |
15 | 15 | |
16 | 16 | def assertArraysEqual(self, first, second): |
17 | 17 | return self.assertTrue((first == second).all()) |
18 | ||
19 | def assertArraysListEqual(self, first, second): | |
20 | self.assertEqual(len(first), len(second)) | |
21 | for x, y in zip(first, second): | |
22 | self.assertTrue((x == y).all()) |
5 | 5 | |
6 | 6 | class BufferedReaderTestCase(TestCase): |
7 | 7 | def test_overflow_signed_int_string_size(self): |
8 | data = b'\xFF\xFE\xFC\xFE\x29\x80\x40\x00\x00\x01' | |
8 | data = b'\xFF\xFE\xFC\xFE\xFE\xFE\xFE\xFE\x29\x80\x40\x00\x00\x01' | |
9 | 9 | |
10 | 10 | def recv_into(buf): |
11 | 11 | size = len(data) |
254 | 254 | c.connection.context.client_settings['opentelemetry_tracestate'], |
255 | 255 | 'state' |
256 | 256 | ) |
257 | ||
258 | def test_quota_key(self): | |
259 | c = Client.from_url('clickhouse://host?quota_key=myquota') | |
260 | self.assertEqual( | |
261 | c.connection.context.client_settings['quota_key'], 'myquota' | |
262 | ) | |
263 | ||
264 | c = Client.from_url('clickhouse://host') | |
265 | self.assertEqual( | |
266 | c.connection.context.client_settings['quota_key'], '' | |
267 | ) |
12 | 12 | supported_compressions = file_config.get('db', 'compression').split(',') |
13 | 13 | |
14 | 14 | def _create_client(self): |
15 | settings = None | |
16 | if self.compression: | |
17 | # Set server compression method explicitly | |
18 | # By default server sends blocks compressed by LZ4. | |
19 | method = self.compression | |
20 | if self.server_version > (19, ): | |
21 | method = method.upper() | |
22 | settings = {'network_compression_method': method} | |
23 | ||
15 | 24 | return Client( |
16 | 25 | self.host, self.port, self.database, self.user, self.password, |
17 | compression=self.compression | |
26 | compression=self.compression, settings=settings | |
18 | 27 | ) |
19 | 28 | |
20 | 29 | def setUp(self): |
220 | 220 | 'Hello or Exception', 'Unknown packet' |
221 | 221 | ) |
222 | 222 | self.assertEqual(str(e.exception), msg) |
223 | ||
224 | def test_partially_consumed_query(self): | |
225 | self.client.execute_iter('SELECT 1') | |
226 | ||
227 | error = errors.PartiallyConsumedQueryError | |
228 | with self.assertRaises(error) as e: | |
229 | self.client.execute_iter('SELECT 1') | |
230 | ||
231 | self.assertEqual( | |
232 | str(e.exception), | |
233 | 'Simultaneous queries on single connection detected' | |
234 | ) | |
235 | rv = self.client.execute('SELECT 1') | |
236 | self.assertEqual(rv, [(1, )]) | |
237 | ||
238 | def test_read_all_packets_on_execute_iter(self): | |
239 | list(self.client.execute_iter('SELECT 1')) | |
240 | list(self.client.execute_iter('SELECT 1')) | |
223 | 241 | |
224 | 242 | |
225 | 243 | class FakeBufferedReader(BufferedReader): |
150 | 150 | ) |
151 | 151 | self.assertEqual(cursor.rowcount, -1) |
152 | 152 | |
153 | def test_execute_insert(self): | |
154 | with self.created_cursor() as cursor, self.create_table('a UInt8'): | |
155 | cursor.execute('INSERT INTO test VALUES', [[4]]) | |
156 | self.assertEqual(cursor.rowcount, 1) | |
157 | ||
153 | 158 | def test_description(self): |
154 | 159 | with self.created_cursor() as cursor: |
155 | 160 | self.assertIsNone(cursor.description) |
2 | 2 | |
3 | 3 | from datetime import date, datetime |
4 | 4 | from decimal import Decimal |
5 | from unittest.mock import Mock | |
5 | 6 | from uuid import UUID |
6 | 7 | |
7 | 8 | from enum import IntEnum, Enum |
9 | from pytz import timezone | |
8 | 10 | |
9 | 11 | from tests.testcase import BaseTestCase |
12 | from tests.util import patch_env_tz | |
10 | 13 | |
11 | 14 | |
12 | 15 | class ParametersSubstitutionTestCase(BaseTestCase): |
14 | 17 | double_tpl = 'SELECT %(x)s, %(y)s' |
15 | 18 | |
16 | 19 | def assert_subst(self, tpl, params, sql): |
17 | self.assertEqual(self.client.substitute_params(tpl, params), sql) | |
20 | ctx = Mock() | |
21 | ctx.server_info.timezone = 'Europe/Moscow' | |
22 | self.assertEqual(self.client.substitute_params(tpl, params, ctx), sql) | |
18 | 23 | |
19 | 24 | def test_int(self): |
20 | 25 | params = {'x': 123} |
64 | 69 | rv = self.client.execute(tpl, params) |
65 | 70 | self.assertEqual(rv, [(dt, )]) |
66 | 71 | |
72 | def test_datetime_with_timezone(self): | |
73 | dt = datetime(2017, 7, 14, 5, 40, 0) | |
74 | params = {'x': timezone('Asia/Kamchatka').localize(dt)} | |
75 | ||
76 | self.assert_subst(self.single_tpl, params, | |
77 | "SELECT '2017-07-13 20:40:00'") | |
78 | ||
79 | tpl = ( | |
80 | 'SELECT toDateTime(toInt32(toDateTime(%(x)s))), ' | |
81 | 'toInt32(toDateTime(%(x)s))' | |
82 | ) | |
83 | ||
84 | with patch_env_tz('Asia/Novosibirsk'): | |
85 | # use server timezone | |
86 | rv = self.client.execute( | |
87 | tpl, params, settings={'use_client_time_zone': False} | |
88 | ) | |
89 | ||
90 | self.assertEqual( | |
91 | rv, [(datetime(2017, 7, 13, 20, 40, 0), 1499967600)] | |
92 | ) | |
93 | ||
94 | query = ( | |
95 | "SELECT " | |
96 | "toDateTime(toInt32(toDateTime('{0}', 'Asia/Kamchatka'))), " | |
97 | "toInt32(toDateTime('{0}', 'Asia/Kamchatka'))" | |
98 | ).format('2017-07-14 05:40:00') | |
99 | ||
100 | rv = self.emit_cli(query, use_client_time_zone=0) | |
101 | ||
102 | self.assertEqual(rv, '2017-07-13 20:40:00\t1499967600\n') | |
103 | ||
104 | # use client timezone | |
105 | rv = self.client.execute( | |
106 | tpl, params, settings={'use_client_time_zone': True} | |
107 | ) | |
108 | ||
109 | self.assertEqual( | |
110 | rv, [(datetime(2017, 7, 14, 0, 40, 0), 1499967600)] | |
111 | ) | |
112 | ||
113 | query = ( | |
114 | "SELECT " | |
115 | "toDateTime(toInt32(toDateTime('{0}', 'Asia/Kamchatka'))), " | |
116 | "toInt32(toDateTime('{0}', 'Asia/Kamchatka'))" | |
117 | ).format('2017-07-14 05:40:00') | |
118 | ||
119 | rv = self.emit_cli(query, use_client_time_zone=1) | |
120 | self.assertEqual(rv, '2017-07-14 00:40:00\t1499967600\n') | |
121 | ||
67 | 122 | def test_string(self): |
68 | 123 | params = {'x': 'test\t\n\x16', 'y': 'ัะตัั\t\n\x16'} |
69 | 124 | |
171 | 226 | params = object() |
172 | 227 | |
173 | 228 | with self.assertRaises(ValueError) as e: |
174 | self.client.substitute_params(self.single_tpl, params) | |
229 | self.client.substitute_params(self.single_tpl, params, Mock()) | |
175 | 230 | |
176 | 231 | self.assertEqual(e.exception.args[0], |
177 | 232 | 'Parameters are expected in dict form') |
0 | import os | |
1 | from contextlib import contextmanager | |
0 | 2 | from functools import wraps |
1 | 3 | import logging |
2 | 4 | from io import StringIO |
5 | from time import tzset | |
6 | from unittest.mock import patch | |
7 | ||
8 | import tzlocal | |
3 | 9 | |
4 | 10 | |
5 | 11 | def skip_by_server_version(testcase, version_required): |
51 | 57 | |
52 | 58 | |
53 | 59 | capture_logging = LoggingCapturer |
60 | ||
61 | ||
62 | def bust_tzlocal_cache(): | |
63 | try: | |
64 | tzlocal.unix._cache_tz = None | |
65 | tzlocal.unix._cache_tz_name = None | |
66 | except AttributeError: | |
67 | pass | |
68 | ||
69 | try: | |
70 | tzlocal.win32._cache_tz = None | |
71 | tzlocal.unix._cache_tz_name = None | |
72 | except AttributeError: | |
73 | pass | |
74 | ||
75 | ||
76 | @contextmanager | |
77 | def patch_env_tz(tz_name): | |
78 | bust_tzlocal_cache() | |
79 | ||
80 | # Although in many cases, changing the TZ environment variable may | |
81 | # affect the output of functions like localtime() without calling | |
82 | # tzset(), this behavior should not be relied on. | |
83 | # https://docs.python.org/3/library/time.html#time.tzset | |
84 | with patch.dict(os.environ, {'TZ': tz_name}): | |
85 | tzset() | |
86 | yield | |
87 | ||
88 | tzset() |
0 | import os | |
1 | import sys | |
2 | ||
3 | USE_NUMPY = bool(int(os.getenv('USE_NUMPY', '0'))) | |
4 | ||
5 | tests_require = [ | |
6 | 'pytest', | |
7 | 'parameterized', | |
8 | 'freezegun', | |
9 | 'zstd', | |
10 | 'clickhouse-cityhash>=1.0.2.1' | |
11 | ] | |
12 | ||
13 | if sys.implementation.name == 'pypy': | |
14 | tests_require.append('lz4<=3.0.1') | |
15 | else: | |
16 | tests_require.append('lz4') | |
17 | ||
18 | if USE_NUMPY: | |
19 | tests_require.extend(['numpy', 'pandas']) | |
20 | ||
21 | try: | |
22 | from pip import main as pipmain | |
23 | except ImportError: | |
24 | from pip._internal import main as pipmain | |
25 | ||
26 | pipmain(['install'] + tests_require) |