Codebase list python-clickhouse-driver / 03a832f
Update upstream source from tag 'upstream/0.2.3' Update to upstream version '0.2.3' with Debian dir dbc093aa423367d1f1c63dd1c0f2b9f52980a88f Federico Ceratto 2 years ago
49 changed file(s) with 1301 addition(s) and 629 deletion(s). Raw diff Collapse all Expand all
0 on: [push, pull_request]
1 name: build
2 jobs:
3 tests:
4 runs-on: ubuntu-20.04
5 strategy:
6 matrix:
7 use-numpy:
8 - 0
9 python-version:
10 - "3.5"
11 - "3.6"
12 - "3.7"
13 - "3.8"
14 - "3.9"
15 - "3.10"
16 - "pypy-3.6"
17 - "pypy-3.7"
18 clickhouse-version:
19 - 21.12.3.32
20 - 21.9.3.30
21 - 21.9.3.30
22 - 21.4.6.55
23 - 21.3.10.1
24 - 21.2.10.48
25 - 21.1.9.41
26 - 20.11.2.1
27 - 20.10.2.20
28 - 20.9.3.45
29 - 20.8.4.11
30 - 20.7.4.11
31 - 20.6.8.5
32 - 20.5.5.74
33 - 20.4.9.110
34 - 20.3.20.6
35 - 19.16.17.80
36 - 19.15.3.6
37 - 19.9.2.4 # allow_suspicious_low_cardinality_types
38 - 19.8.3.8 # SimpleAggregateFunction
39 - 19.3.3
40 - 18.12.17
41 include:
42 - clickhouse-version: 20.3.20.6
43 use-numpy: 1
44 python-version: 3.8
45
46 name: ${{ matrix.python-version }} CH=${{ matrix.clickhouse-version }} NUMPY=${{ matrix.use-numpy }}
47 steps:
48 - uses: actions/checkout@v2
49 - name: Set up Python
50 uses: actions/setup-python@v2
51 with:
52 python-version: ${{ matrix.python-version }}
53 architecture: x64
54 # - name: Login to Docker Hub
55 # uses: docker/login-action@v1
56 # with:
57 # username: ${{ secrets.DOCKER_HUB_USERNAME }}
58 # password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }}
59 - name: Install flake8
60 run: |
61 pip install --upgrade pip setuptools wheel
62 pip install flake8 flake8-print
63 - name: Run flake8
64 run: flake8
65 - name: Start ClickHouse server and client containers
66 run: |
67 echo "VERSION=${{ matrix.clickhouse-version }}" > tests/.env
68 docker-compose -f tests/docker-compose.yml up -d
69 - name: Setup clickhouse-client proxy for docker
70 run: |
71 # Faking clickhouse-client real communication with container via docker exec.
72 echo -e '#!/bin/bash\n\ndocker exec -e "`env | grep ^TZ=`" test-clickhouse-client clickhouse-client "$@"' | sudo tee /usr/local/bin/clickhouse-client > /dev/null
73 sudo chmod +x /usr/local/bin/clickhouse-client
74 # Overriding setup.cfg. Set host=clickhouse-server
75 sed -i 's/^host=localhost$/host=clickhouse-server/' setup.cfg
76 # Make host think that clickhouse-server is localhost
77 echo '127.0.0.1 clickhouse-server' | sudo tee /etc/hosts > /dev/null
78 - name: Build cython extensions with tracing
79 run: CYTHON_TRACE=1 python setup.py build_ext --define CYTHON_TRACE
80 if: ${{ !contains(matrix.python-version, 'pypy') }}
81 - name: Install requirements
82 run: |
83 # Newer coveralls do not work with github actions.
84 pip install 'coveralls<3.0.0'
85 pip install cython
86 python testsrequire.py
87 python setup.py develop
88 # Limit each test time execution.
89 pip install pytest-timeout
90 env:
91 USE_NUMPY: ${{ matrix.use-numpy }}
92 - name: Run tests
93 run: coverage run -m py.test --timeout=10 -v
94 timeout-minutes: 5
95 env:
96 # Set initial TZ for docker exec -e "`env | grep ^TZ`"
97 TZ: UTC
98 - name: Upload coverage
99 run: coveralls
100 env:
101 GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
102 COVERALLS_PARALLEL: true
103 COVERALLS_FLAG_NAME: ${{ matrix.python-version }} CH=${{ matrix.clickhouse-version }} NUMPY=${{ matrix.use-numpy }}
104
105 coveralls-finished:
106 name: Indicate completion to coveralls.io
107 needs: tests
108 runs-on: ubuntu-latest
109 steps:
110 - name: Finished
111 uses: coverallsapp/github-action@1.1.3
112 with:
113 github-token: ${{ secrets.GITHUB_TOKEN }}
114 parallel-finished: true
115
116 valgrind:
117 name: Valgrind check
118 needs: tests
119 runs-on: ubuntu-20.04
120 steps:
121 - uses: actions/checkout@v2
122 - name: Set up Python
123 uses: actions/setup-python@v2
124 with:
125 python-version: 3.8
126 architecture: x64
127 - name: Install valgrind
128 run: sudo apt-get install -y valgrind
129 # - name: Login to Docker Hub
130 # uses: docker/login-action@v1
131 # with:
132 # username: ${{ secrets.DOCKER_HUB_USERNAME }}
133 # password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }}
134 - name: Start ClickHouse server and client containers
135 run: |
136 echo "VERSION=$VERSION" > tests/.env
137 docker-compose -f tests/docker-compose.yml up -d
138 env:
139 VERSION: 20.3.7.46
140 - name: Setup clickhouse-client proxy for docker
141 run: |
142 # Faking clickhouse-client real communication with container via docker exec.
143 echo -e '#!/bin/bash\n\ndocker exec -e "`env | grep ^TZ=`" test-clickhouse-client clickhouse-client "$@"' | sudo tee /usr/local/bin/clickhouse-client > /dev/null
144 sudo chmod +x /usr/local/bin/clickhouse-client
145 # Overriding setup.cfg. Set host=clickhouse-server
146 sed -i 's/^host=localhost$/host=clickhouse-server/' setup.cfg
147 # Make host think that clickhouse-server is localhost
148 echo '127.0.0.1 clickhouse-server' | sudo tee /etc/hosts > /dev/null
149 - name: Install requirements
150 run: |
151 python testsrequire.py
152 python setup.py develop
153 env:
154 USE_NUMPY: 1
155 - name: Run tests under valgrind
156 run: valgrind --error-exitcode=1 --suppressions=valgrind.supp py.test -v
157 env:
158 # Set initial TZ for docker exec -e "`env | grep ^TZ`"
159 TZ: UTC
160 USE_NUMPY: 1
161 PYTHONMALLOC: malloc
162
163 wheels-linux:
164 name: Wheels for Linux
165 needs: valgrind
166 runs-on: ubuntu-20.04
167 steps:
168 - uses: actions/checkout@v2
169 - name: Set up Python
170 uses: actions/setup-python@v2
171 with:
172 python-version: 3.8
173 architecture: x64
174 # - name: Login to Docker Hub
175 # uses: docker/login-action@v1
176 # with:
177 # username: ${{ secrets.DOCKER_HUB_USERNAME }}
178 # password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }}
179 - name: Install cibuildwheel
180 run: |
181 pip install --upgrade pip setuptools
182 pip install cython cibuildwheel==$VERSION
183 env:
184 VERSION: 2.2.2
185 - name: Build wheels
186 run: cibuildwheel --output-dir wheelhouse
187 env:
188 CIBW_BUILD: '*p3*'
189 CIBW_BEFORE_BUILD: pip install cython
190 - uses: ncipollo/release-action@v1
191 name: Upload wheels
192 if: ${{ github.ref_type == 'tag' }}
193 with:
194 artifacts: "wheelhouse/*"
195 allowUpdates: true
196 draft: true
197 tag: Linux
198 token: ${{ secrets.GITHUB_TOKEN }}
199
200 wheels-macos:
201 name: Wheels for OS X
202 needs: valgrind
203 runs-on: macos-10.15
204 steps:
205 - uses: actions/checkout@v2
206 - name: Set up Python
207 uses: actions/setup-python@v2
208 with:
209 python-version: 3.8
210 architecture: x64
211 # - name: Login to Docker Hub
212 # uses: docker/login-action@v1
213 # with:
214 # username: ${{ secrets.DOCKER_HUB_USERNAME }}
215 # password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }}
216 - name: Install cibuildwheel
217 run: |
218 pip install --upgrade pip setuptools
219 pip install cython cibuildwheel==$VERSION
220 env:
221 VERSION: 2.2.2
222 - name: Build wheels
223 run: cibuildwheel --output-dir wheelhouse
224 env:
225 CIBW_BUILD: '*p3*'
226 CIBW_BEFORE_BUILD: pip install cython
227 - uses: ncipollo/release-action@v1
228 name: Upload wheels
229 if: ${{ github.ref_type == 'tag' }}
230 with:
231 artifacts: "wheelhouse/*"
232 allowUpdates: true
233 draft: true
234 tag: OS X
235 token: ${{ secrets.GITHUB_TOKEN }}
236
237 wheels-windows:
238 name: Wheels for Windows
239 needs: valgrind
240 runs-on: windows-2019
241 steps:
242 - uses: actions/checkout@v2
243 - name: Set up Python
244 uses: actions/setup-python@v2
245 with:
246 python-version: 3.8
247 architecture: x64
248 # - name: Login to Docker Hub
249 # uses: docker/login-action@v1
250 # with:
251 # username: ${{ secrets.DOCKER_HUB_USERNAME }}
252 # password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }}
253 - name: Install cibuildwheel
254 run: |
255 pip install cibuildwheel==$env:VERSION
256 env:
257 VERSION: 2.2.2
258 - name: Build wheels
259 run: cibuildwheel --output-dir wheelhouse
260 env:
261 CIBW_BUILD: '*p3*'
262 CIBW_BEFORE_BUILD: pip install cython
263 - uses: ncipollo/release-action@v1
264 name: Upload wheels
265 if: ${{ github.ref_type == 'tag' }}
266 with:
267 artifacts: "wheelhouse/*"
268 allowUpdates: true
269 draft: true
270 tag: Windows
271 token: ${{ secrets.GITHUB_TOKEN }}
272
273 wheels-linux-non-x86:
274 name: Wheels for Linux non-x86
275 needs: valgrind
276 runs-on: ubuntu-20.04
277 strategy:
278 matrix:
279 arch:
280 - aarch64
281 - ppc64le
282 - s390x
283 steps:
284 - uses: actions/checkout@v2.1.0
285 # - name: Login to Docker Hub
286 # uses: docker/login-action@v1
287 # with:
288 # username: ${{ secrets.DOCKER_HUB_USERNAME }}
289 # password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }}
290 - name: Set up Python
291 uses: actions/setup-python@v2
292 with:
293 python-version: 3.8
294 architecture: x64
295 - name: Set up QEMU
296 id: qemu
297 uses: docker/setup-qemu-action@v1.2.0
298 with:
299 image: tonistiigi/binfmt:latest
300 platforms: all
301 - name: Install cibuildwheel
302 run: |
303 pip install --upgrade pip setuptools
304 pip install cibuildwheel==$VERSION
305 env:
306 VERSION: 2.2.2
307 - name: Build wheels
308 run: cibuildwheel --output-dir wheelhouse
309 env:
310 CIBW_BUILD: '*p3*'
311 CIBW_BEFORE_BUILD: pip install cython
312 CIBW_ARCHS: ${{ matrix.arch }}
313 - uses: ncipollo/release-action@v1
314 name: Upload wheels
315 if: ${{ github.ref_type == 'tag' }}
316 with:
317 artifacts: "wheelhouse/*"
318 allowUpdates: true
319 draft: true
320 tag: Linux non-x86
321 token: ${{ secrets.GITHUB_TOKEN }}
+0
-268
.travis.yml less more
0 env:
1 - VERSION=21.9.3.30
2 - VERSION=21.4.6.55
3 - VERSION=21.3.10.1
4 - VERSION=21.2.10.48
5 - VERSION=21.1.9.41
6 - VERSION=20.11.2.1
7 - VERSION=20.10.2.20
8 - VERSION=20.9.3.45
9 - VERSION=20.8.4.11
10 - VERSION=20.7.4.11
11 - VERSION=20.6.8.5
12 - VERSION=20.5.5.74
13 - VERSION=20.4.9.110
14 - VERSION=20.3.20.6
15 - VERSION=20.3.20.6 USE_NUMPY=1
16 - VERSION=19.16.17.80
17 - VERSION=19.15.3.6
18 - VERSION=19.9.2.4 # allow_suspicious_low_cardinality_types
19 - VERSION=19.8.3.8 # SimpleAggregateFunction
20 - VERSION=19.3.3
21 - VERSION=18.12.17
22 # - VERSION=18.10.3
23 # - VERSION=18.6.0
24 # - VERSION=18.5.1
25 # - VERSION=18.4.0
26 # - VERSION=18.1.0
27 # - VERSION=1.1.54394
28 # - VERSION=1.1.54390
29 # - VERSION=1.1.54388
30 # - VERSION=1.1.54385
31 # - VERSION=1.1.54383
32 # - VERSION=1.1.54381
33 # - VERSION=1.1.54380
34 # - VERSION=1.1.54378 client's image miss tzdata package: https://github.com/yandex/ClickHouse/commit/1bf49fe8446c7dea95beaef2b131e6c6708b0b62#diff-cc737435a5ba74620a889b7718f39a80
35 # - VERSION=1.1.54343
36 # - VERSION=1.1.54342
37 ## - VERSION=1.1.54337 Broken network
38 # - VERSION=1.1.54327
39 # - VERSION=1.1.54310
40 # - VERSION=1.1.54304
41 # - VERSION=1.1.54292
42 # - VERSION=1.1.54289
43 # - VERSION=1.1.54284
44 # - VERSION=1.1.54282
45
46 language: python
47 python:
48 - "3.4"
49 - "3.5"
50 - "3.6"
51 - "3.7"
52 - "3.8"
53 - "3.9"
54 - "pypy3.5"
55 cache: pip
56 services:
57 - docker
58 install:
59 - pip install --upgrade pip setuptools
60 # Check flake8 first
61 - pip install flake8 flake8-print
62 - flake8
63 - if [ ! -z $DOCKER_PASSWORD ] ; then echo "$DOCKER_PASSWORD" | docker login -u "$DOCKER_USERNAME" --password-stdin ; fi
64 - docker run -e "TZ=Europe/Moscow" -d -p 127.0.0.1:9000:9000 --name test-clickhouse-server --ulimit nofile=262144:262144 yandex/clickhouse-server:$VERSION
65 - docker run -d --entrypoint "/bin/sh" --name test-clickhouse-client --link test-clickhouse-server:clickhouse-server yandex/clickhouse-client:$VERSION -c 'while :; do sleep 1; done'
66 - docker ps -a
67 # Faking clickhouse-client real communication with container via docker exec.
68 - echo -e '#!/bin/bash\n\ndocker exec -e "`env | grep ^TZ=`" test-clickhouse-client clickhouse-client "$@"' | sudo tee /usr/local/bin/clickhouse-client > /dev/null
69 - sudo chmod +x /usr/local/bin/clickhouse-client
70 # Overriding setup.cfg. Set host=clickhouse-server
71 - sed -i 's/^host=localhost$/host=clickhouse-server/' setup.cfg
72 # Make host think that clickhouse-server is localhost
73 - echo '127.0.0.1 clickhouse-server' | sudo tee /etc/hosts > /dev/null
74 - pip install coveralls cython 'cryptography<3.3'
75 - if [ -z ${USE_NUMPY+x} ]; then pip uninstall -y numpy pandas; fi
76 script:
77 # Enable cython tracing only for cpython
78 - if [ "$TRAVIS_PYTHON_VERSION" != "pypy3.5" ]; then CYTHON_TRACE=1 python setup.py build_ext --define CYTHON_TRACE ; fi
79 - coverage run setup.py test
80 after_success:
81 coveralls
82
83 jobs:
84 # Exclude numpy unsupported versions,
85 exclude:
86 - python: 3.4
87 env: VERSION=20.3.20.6 USE_NUMPY=1
88 - python: 3.9-dev
89 env: VERSION=20.3.20.6 USE_NUMPY=1
90 - python: pypy3.5
91 env: VERSION=20.3.20.6 USE_NUMPY=1
92
93 include:
94 - stage: valgrind
95 name: Valgrind check
96 os: linux
97 language: python
98 python:
99 - "3.6"
100 addons:
101 apt:
102 packages:
103 - valgrind
104 install:
105 - if [ ! -z $DOCKER_PASSWORD ] ; then echo "$DOCKER_PASSWORD" | docker login -u "$DOCKER_USERNAME" --password-stdin ; fi
106 - docker run -e "TZ=Europe/Moscow" -d -p 127.0.0.1:9000:9000 --name test-clickhouse-server --ulimit nofile=262144:262144 yandex/clickhouse-server:$VERSION
107 - docker run -d --entrypoint "/bin/sh" --name test-clickhouse-client --link test-clickhouse-server:clickhouse-server yandex/clickhouse-client:$VERSION -c 'while :; do sleep 1; done'
108 - docker ps -a
109 # Faking clickhouse-client real communication with container via docker exec.
110 - echo -e '#!/bin/bash\n\ndocker exec -e "`env | grep ^TZ=`" test-clickhouse-client clickhouse-client "$@"' | sudo tee /usr/local/bin/clickhouse-client > /dev/null
111 - sudo chmod +x /usr/local/bin/clickhouse-client
112 # Overriding setup.cfg. Set host=clickhouse-server
113 - sed -i 's/^host=localhost$/host=clickhouse-server/' setup.cfg
114 # Make host think that clickhouse-server is localhost
115 - echo '127.0.0.1 clickhouse-server' | sudo tee /etc/hosts > /dev/null
116 - pip install --upgrade pip setuptools
117 - pip install cython
118
119 script:
120 - valgrind --error-exitcode=1 python setup.py test
121
122 env:
123 - VERSION=20.3.7.46
124 - USE_NUMPY=1
125 - PYTHONMALLOC=malloc
126
127 - stage: wheels
128 name: Wheels for Linux
129 os: linux
130 language: python
131 python:
132 - "3.6"
133
134 install:
135 - pip install --upgrade pip setuptools
136 - pip install cython cibuildwheel==1.11.0
137
138 script:
139 - cibuildwheel --output-dir wheelhouse
140
141 env:
142 - CIBW_BUILD='*p3*'
143 - CIBW_BEFORE_BUILD='pip install cython'
144 deploy:
145 name: Linux
146 provider: releases
147 api_key: $GITHUB_TOKEN
148 file_glob: true
149 file: wheelhouse/*
150 skip_cleanup: true
151 draft: true
152 prerelease: true
153 overwrite: true
154 on:
155 tags: true
156
157 - stage: wheels
158 name: Wheels for Linux aarch64
159 arch: arm64
160 os: linux
161 language: python
162 python:
163 - "3.6"
164 install:
165 - pip install --upgrade pip setuptools
166 - pip install cython cibuildwheel==1.11.0
167
168 script:
169 - cibuildwheel --output-dir wheelhouse
170
171 env:
172 - CIBW_BUILD='*p3*'
173 - CIBW_BEFORE_BUILD='pip install cython'
174 deploy:
175 name: Linux aarch64
176 provider: releases
177 api_key: $GITHUB_TOKEN
178 file_glob: true
179 file: wheelhouse/*
180 skip_cleanup: true
181 draft: true
182 prerelease: true
183 overwrite: true
184 on:
185 tags: true
186
187 - stage: wheels
188 name: Wheels for OS X
189 os: osx
190 language: generic
191
192 install:
193 - pip3 install --upgrade pip setuptools
194 - pip3 install cython cibuildwheel==1.11.0
195
196 script:
197 - cibuildwheel --output-dir wheelhouse
198
199 env:
200 - CIBW_BUILD='*p3*'
201 - CIBW_BEFORE_BUILD='pip install cython'
202 deploy:
203 name: Mac OS X
204 provider: releases
205 api_key: $GITHUB_TOKEN
206 file_glob: true
207 file: wheelhouse/*
208 skip_cleanup: true
209 draft: true
210 prerelease: true
211 overwrite: true
212 on:
213 tags: true
214
215 - stage: wheels
216 name: Wheels for Windows
217 os: windows
218 language: shell
219
220 install:
221 - choco install python3 --version 3.9.0 --params "/InstallDir:C:\Python39"
222 script:
223 - C:/Python39/python -m pip install cibuildwheel==1.11.0
224 - C:/Python39/python -m cibuildwheel --platform windows --output-dir wheelhouse
225
226 env:
227 - CIBW_BUILD='cp*'
228 - CIBW_BEFORE_BUILD='python -m pip install cython'
229 deploy:
230 name: Windows Python
231 provider: releases
232 api_key: $GITHUB_TOKEN
233 file_glob: true
234 file: wheelhouse/*
235 skip_cleanup: true
236 draft: true
237 prerelease: true
238 overwrite: true
239 on:
240 tags: true
241
242 - stage: wheels
243 name: Wheels for Windows PyPy
244 os: windows
245 language: shell
246
247 install:
248 - choco install python3 --version 3.9.0 --params "/InstallDir:C:\Python39"
249 script:
250 - C:/Python39/python -m pip install cibuildwheel==1.11.0
251 - C:/Python39/python -m cibuildwheel --platform windows --output-dir wheelhouse
252
253 env:
254 - CIBW_BUILD='pp*'
255 - CIBW_BEFORE_BUILD='python -m pip install cython'
256 deploy:
257 name: Windows Python PyPy
258 provider: releases
259 api_key: $GITHUB_TOKEN
260 file_glob: true
261 file: wheelhouse/*
262 skip_cleanup: true
263 draft: true
264 prerelease: true
265 overwrite: true
266 on:
267 tags: true
00 # Changelog
11
22 ## Unreleased
3
4 ## [0.2.2] - 2021-10-24
3 ### Added
4 - `tzlocal`>=4.0 support. Pull request [#263](https://github.com/mymarilyn/clickhouse-driver/pull/263) by [azat](https://github.com/azat).
5 - `quota_key` support.
6 - Wheels for Python 3.10.
7 - Bool type. Pull request [#279](https://github.com/mymarilyn/clickhouse-driver/pull/279) by [adrian17](https://github.com/adrian17).
8 - Nested type with `flatten_nested=0`. Pull request [#285](https://github.com/mymarilyn/clickhouse-driver/pull/285) by [spff](https://github.com/spff).
9
10 ### Fixed
11 - Handle partially consumed query. Solves issue [#117](https://github.com/mymarilyn/clickhouse-driver/issues/117).
12 - Fallback to generic columns when NumPy support is not implemented for column type. Solves issue [#254](https://github.com/mymarilyn/clickhouse-driver/issues/254).
13 - Broken ZSTD decompression. Solves issue [#269](https://github.com/mymarilyn/clickhouse-driver/issues/269).
14 - External tables passing with NumPy. Solves issue [#267](https://github.com/mymarilyn/clickhouse-driver/issues/267).
15 - Consider tzinfo for datetime parameters substitution. Solves issue [#268](https://github.com/mymarilyn/clickhouse-driver/issues/268).
16 - Do not use NumPy columns inside generic columns. Solves issue [#272](https://github.com/mymarilyn/clickhouse-driver/issues/272).
17 - Decimal128 and Decimal256 types_check. Solves issue [#274](https://github.com/mymarilyn/clickhouse-driver/issues/274).
18 - Insertion using `execute` in DB API. Solves issue [#179](https://github.com/mymarilyn/clickhouse-driver/issues/179). Pull request [#276](https://github.com/mymarilyn/clickhouse-driver/pull/276) by [nnseva](https://github.com/nnseva).
19 - Variables cannot be declared with `cpdef` in Cython 3. Pull request [#281](https://github.com/mymarilyn/clickhouse-driver/pull/281) by [ym](https://github.com/ym).
20
21 ### Changed
22 - Switch from nose test runner to pytest.
23 - Migrate from Travis CI to GitHub Actions.
24
25 ## [0.2.2] - 2021-09-24
526 ### Added
627 - DateTime64 extended range. Pull request [#222](https://github.com/mymarilyn/clickhouse-driver/pull/222) by [0x4ec7](https://github.com/0x4ec7).
728 - Support for using `Client` as context manager closing connection on exit. Solves issue [#237](https://github.com/mymarilyn/clickhouse-driver/issues/237). Pull request [#206](https://github.com/mymarilyn/clickhouse-driver/pull/238) by [wlhjason](https://github.com/wlhjason).
358379 - Date/DateTime types.
359380 - String types.
360381
361 [Unreleased]: https://github.com/mymarilyn/clickhouse-driver/compare/0.2.2...HEAD
382 [Unreleased]: https://github.com/mymarilyn/clickhouse-driver/compare/0.2.3...HEAD
383 [0.2.3]: https://github.com/mymarilyn/clickhouse-driver/compare/0.2.2...0.2.3
362384 [0.2.2]: https://github.com/mymarilyn/clickhouse-driver/compare/0.2.1...0.2.2
363385 [0.2.1]: https://github.com/mymarilyn/clickhouse-driver/compare/0.2.0...0.2.1
364386 [0.2.0]: https://github.com/mymarilyn/clickhouse-driver/compare/0.1.5...0.2.0
1515 .. image:: https://img.shields.io/pypi/dm/clickhouse-driver.svg
1616 :target: https://pypi.org/project/clickhouse-driver
1717
18 .. image:: https://travis-ci.org/mymarilyn/clickhouse-driver.svg?branch=master
19 :target: https://travis-ci.org/mymarilyn/clickhouse-driver
18 .. image:: https://github.com/mymarilyn/clickhouse-driver/actions/workflows/actions.yml/badge.svg
19 :target: https://github.com/mymarilyn/clickhouse-driver/actions/workflows/actions.yml
2020
2121 ClickHouse Python Driver with native (TCP) interface support.
2222
4242 * Enum8/16
4343 * Array(T)
4444 * Nullable(T)
45 * Bool
4546 * UUID
4647 * Decimal
4748 * IPv4/IPv6
22 from .dbapi import connect
33
44
5 VERSION = (0, 2, 2)
5 VERSION = (0, 2, 3)
66 __version__ = '.'.join(str(x) for x in VERSION)
77
88 __all__ = ['Client', 'connect']
00 from .reader import read_varint, read_binary_uint8, read_binary_int32
11 from .varint import write_varint
22 from .writer import write_binary_uint8, write_binary_int32
3 from .columns import nestedcolumn
34
45
56 class BlockInfo(object):
150151 return [row[index] for row in self.data]
151152
152153 def _mutate_dicts_to_rows(self, data):
153 column_names = [x[0] for x in self.columns_with_types]
154
155154 check_row_type = False
156155 if self.types_check:
157156 check_row_type = self._check_dict_row_type
158157
158 return self._pure_mutate_dicts_to_rows(
159 data,
160 self.columns_with_types,
161 check_row_type,
162 )
163
164 def _pure_mutate_dicts_to_rows(
165 self,
166 data,
167 columns_with_types,
168 check_row_type,
169 ):
170 columns_with_cwt = []
171 for name, type_ in columns_with_types:
172 cwt = None
173 if type_.startswith('Nested'):
174 cwt = nestedcolumn.get_columns_with_types(type_)
175 columns_with_cwt.append((name, cwt))
176
159177 for i, row in enumerate(data):
160178 if check_row_type:
161179 check_row_type(row)
162180
163 data[i] = [row[name] for name in column_names]
181 new_data = []
182 for name, cwt in columns_with_cwt:
183 if cwt is None:
184 new_data.append(row[name])
185 else:
186 new_data.append(self._pure_mutate_dicts_to_rows(
187 row[name], cwt, check_row_type
188 ))
189 data[i] = new_data
190 # return for recursion
191 return data
164192
165193 def _check_rows(self, data):
166194 expected_row_len = len(self.columns_with_types)
4545 * ``opentelemetry_tracestate`` -- OpenTelemetry tracestate header as
4646 described by W3C Trace Context recommendation.
4747 New in version *0.2.2*.
48 * ``quota_key`` -- A string to differentiate quotas when the user have
49 keyed quotas configured on server.
50 New in version *0.2.3*.
4851 """
4952
5053 available_client_settings = (
5356 'strings_encoding',
5457 'use_numpy',
5558 'opentelemetry_traceparent',
56 'opentelemetry_tracestate'
59 'opentelemetry_tracestate',
60 'quota_key'
5761 )
5862
5963 def __init__(self, *args, **kwargs):
60 self.settings = kwargs.pop('settings', {}).copy()
64 self.settings = (kwargs.pop('settings', None) or {}).copy()
6165
6266 self.client_settings = {
6367 'insert_block_size': int(self.settings.pop(
7781 ),
7882 'opentelemetry_tracestate': self.settings.pop(
7983 'opentelemetry_tracestate', ''
84 ),
85 'quota_key': self.settings.pop(
86 'quota_key', ''
8087 )
8188 }
8289
212219 self.connection.database = query[4:].strip()
213220
214221 @contextmanager
215 def disconnect_on_error(self, query):
222 def disconnect_on_error(self, query, settings):
223 self.make_query_settings(settings)
224
216225 try:
226 self.connection.force_connect()
227 self.last_query = QueryInfo()
228
217229 yield
230
218231 self.track_current_database(query)
219232
220233 except (Exception, KeyboardInterrupt):
266279 """
267280
268281 start_time = time()
269 self.make_query_settings(settings)
270 self.connection.force_connect()
271 self.last_query = QueryInfo()
272
273 with self.disconnect_on_error(query):
282
283 with self.disconnect_on_error(query, settings):
274284 # INSERT queries can use list/tuple/generator of list/tuples/dicts.
275285 # For SELECT parameters can be passed in only in dict right now.
276286 is_insert = isinstance(params, (list, tuple, types.GeneratorType))
321331 :return: :ref:`progress-query-result` proxy.
322332 """
323333
324 self.make_query_settings(settings)
325 self.connection.force_connect()
326 self.last_query = QueryInfo()
327
328 with self.disconnect_on_error(query):
334 with self.disconnect_on_error(query, settings):
329335 return self.process_ordinary_query_with_progress(
330336 query, params=params, with_column_types=with_column_types,
331337 external_tables=external_tables, query_id=query_id,
360366 :return: :ref:`iter-query-result` proxy.
361367 """
362368
363 self.make_query_settings(settings)
364 self.connection.force_connect()
365 self.last_query = QueryInfo()
366
367 with self.disconnect_on_error(query):
369 with self.disconnect_on_error(query, settings):
368370 return self.iter_process_ordinary_query(
369371 query, params=params, with_column_types=with_column_types,
370372 external_tables=external_tables,
431433 raise RuntimeError('Extras for NumPy must be installed')
432434
433435 start_time = time()
434 self.make_query_settings(settings)
435 self.connection.force_connect()
436 self.last_query = QueryInfo()
437
438 with self.disconnect_on_error(query):
436
437 with self.disconnect_on_error(query, settings):
439438 self.connection.send_query(query, query_id=query_id)
440439 self.connection.send_external_tables(external_tables)
441440
456455 types_check=False, columnar=False):
457456
458457 if params is not None:
459 query = self.substitute_params(query, params)
458 query = self.substitute_params(
459 query, params, self.connection.context
460 )
460461
461462 self.connection.send_query(query, query_id=query_id)
462463 self.connection.send_external_tables(external_tables,
470471 types_check=False, columnar=False):
471472
472473 if params is not None:
473 query = self.substitute_params(query, params)
474 query = self.substitute_params(
475 query, params, self.connection.context
476 )
474477
475478 self.connection.send_query(query, query_id=query_id)
476479 self.connection.send_external_tables(external_tables,
484487 types_check=False):
485488
486489 if params is not None:
487 query = self.substitute_params(query, params)
490 query = self.substitute_params(
491 query, params, self.connection.context
492 )
488493
489494 self.connection.send_query(query, query_id=query_id)
490495 self.connection.send_external_tables(external_tables,
588593 # Client must still read until END_OF_STREAM packet.
589594 return self.receive_result(with_column_types=with_column_types)
590595
591 def substitute_params(self, query, params):
596 def substitute_params(self, query, params, context):
592597 if not isinstance(params, dict):
593598 raise ValueError('Parameters are expected in dict form')
594599
595 escaped = escape_params(params)
600 escaped = escape_params(params, context)
596601 return query % escaped
597602
598603 @classmethod
3333 initial_query_id = ''
3434 initial_address = '0.0.0.0:0'
3535
36 quota_key = ''
37
3836 def __init__(self, client_name, context):
3937 self.query_kind = ClientInfo.QueryKind.NO_QUERY
4038
4947 context.client_settings['opentelemetry_traceparent'],
5048 context.client_settings['opentelemetry_tracestate']
5149 )
50
51 self.quota_key = context.client_settings['quota_key']
5252
5353 super(ClientInfo, self).__init__()
5454
0 from .base import FormatColumn
1
2
3 class BoolColumn(FormatColumn):
4 ch_type = 'Bool'
5 py_types = (bool, )
6 format = '?'
00 from datetime import datetime
11
22 from pytz import timezone as get_timezone, utc
3 from tzlocal import get_localzone
4
3 from ..util.compat import get_localzone_name_compat
54 from .base import FormatColumn
65
76 EPOCH = datetime(1970, 1, 1, tzinfo=utc)
192191 offset_naive = False
193192 else:
194193 if not context.settings.get('use_client_time_zone', False):
195 try:
196 local_timezone = get_localzone().key
197 except AttributeError:
198 local_timezone = get_localzone().zone
199 except Exception:
200 local_timezone = None
201
194 local_timezone = get_localzone_name_compat()
202195 if local_timezone != context.server_info.timezone:
203196 tz_name = context.server_info.timezone
204197
77 class DecimalColumn(FormatColumn):
88 py_types = (Decimal, float, int)
99 max_precision = None
10 int_size = None
1110
1211 def __init__(self, precision, scale, types_check=False, **kwargs):
1312 self.precision = precision
1514 super(DecimalColumn, self).__init__(**kwargs)
1615
1716 if types_check:
18 max_signed_int = (1 << (8 * self.int_size - 1)) - 1
17 def check_item(value):
18 parts = str(value).split('.')
19 int_part = parts[0]
20 frac_part = parts[1] if len(parts) > 1 else ''
1921
20 def check_item(value):
21 if value < -max_signed_int or value > max_signed_int:
22 if len(int_part) > precision:
23 raise ColumnTypeMismatchException(value)
24
25 if len(frac_part) > scale:
2226 raise ColumnTypeMismatchException(value)
2327
2428 self.check_item = check_item
7983 class Decimal32Column(DecimalColumn):
8084 format = 'i'
8185 max_precision = 9
82 int_size = 4
8386
8487
8588 class Decimal64Column(DecimalColumn):
8689 format = 'q'
8790 max_precision = 18
88 int_size = 8
8991
9092
9193 class Decimal128Column(DecimalColumn, Int128Column):
22
33 from .. import writer
44
5 cpdef object MAX_UINT64 = writer.MAX_UINT64
6 cpdef object MAX_INT64 = writer.MAX_INT64
5 cdef object MAX_UINT64 = writer.MAX_UINT64
6 cdef object MAX_INT64 = writer.MAX_INT64
77
88
99 def int128_from_quads(quad_items, unsigned long long n_items):
0
1 from .arraycolumn import create_array_column
2
3
4 def create_nested_column(spec, column_by_spec_getter):
5 return create_array_column(
6 'Array(Tuple({}))'.format(','.join(get_nested_columns(spec))),
7 column_by_spec_getter=column_by_spec_getter
8 )
9
10
11 def get_nested_columns(spec):
12 brackets = 0
13 column_begin = 0
14
15 inner_spec = get_inner_spec(spec)
16 nested_columns = []
17 for i, x in enumerate(inner_spec + ','):
18 if x == ',':
19 if brackets == 0:
20 nested_columns.append(inner_spec[column_begin:i])
21 column_begin = i + 1
22 elif x == '(':
23 brackets += 1
24 elif x == ')':
25 brackets -= 1
26 elif x == ' ':
27 if brackets == 0:
28 column_begin = i + 1
29 return nested_columns
30
31
32 def get_columns_with_types(spec):
33 brackets = 0
34 prev_comma = 0
35 prev_space = 0
36
37 inner_spec = get_inner_spec(spec)
38 columns_with_types = []
39
40 for i, x in enumerate(inner_spec + ','):
41 if x == ',':
42 if brackets == 0:
43 columns_with_types.append((
44 inner_spec[prev_comma:prev_space].strip(),
45 inner_spec[prev_space:i]
46 ))
47 prev_comma = i + 1
48 elif x == '(':
49 brackets += 1
50 elif x == ')':
51 brackets -= 1
52 elif x == ' ':
53 if brackets == 0:
54 prev_space = i + 1
55 return columns_with_types
56
57
58 def get_inner_spec(spec):
59 brackets = 0
60 offset = len('Nested')
61 i = offset
62 for i, ch in enumerate(spec[offset:], offset):
63 if ch == '(':
64 brackets += 1
65
66 elif ch == ')':
67 brackets -= 1
68
69 if brackets == 0:
70 break
71
72 return spec[offset + 1:i]
00 import numpy as np
11 import pandas as pd
22 from pytz import timezone as get_timezone
3 from tzlocal import get_localzone
43
54 from .base import NumpyColumn
5 from ...util.compat import get_localzone_name_compat
66
77
88 class NumpyDateTimeColumnBase(NumpyColumn):
121121
122122 tz_name = timezone = None
123123 offset_naive = True
124 local_timezone = None
125124
126125 # As Numpy do not use local timezone for converting timestamp to
127126 # datetime we need always detect local timezone for manual converting.
128 try:
129 local_timezone = get_localzone().key
130 except AttributeError:
131 local_timezone = get_localzone().zone
132 except Exception:
133 pass
127 local_timezone = get_localzone_name_compat()
134128
135129 # Use column's timezone if it's specified.
136130 if spec and spec[-1] == ')':
00 from ... import errors
1 from ..arraycolumn import create_array_column
21 from .datecolumn import NumpyDateColumn
32 from .datetimecolumn import create_numpy_datetime_column
4 from ..decimalcolumn import create_decimal_column
5 from ..enumcolumn import create_enum_column
63 from .floatcolumn import NumpyFloat32Column, NumpyFloat64Column
74 from .intcolumn import (
85 NumpyInt8Column, NumpyInt16Column, NumpyInt32Column, NumpyInt64Column,
96 NumpyUInt8Column, NumpyUInt16Column, NumpyUInt32Column, NumpyUInt64Column
107 )
118 from .lowcardinalitycolumn import create_numpy_low_cardinality_column
12 from ..nothingcolumn import NothingColumn
13 from ..nullcolumn import NullColumn
9 from .stringcolumn import create_string_column
1410 from ..nullablecolumn import create_nullable_column
15 from ..simpleaggregatefunctioncolumn import (
16 create_simple_aggregate_function_column
17 )
18 from .stringcolumn import create_string_column
19 from ..tuplecolumn import create_tuple_column
20 from ..uuidcolumn import UUIDColumn
21 from ..intervalcolumn import (
22 IntervalYearColumn, IntervalMonthColumn, IntervalWeekColumn,
23 IntervalDayColumn, IntervalHourColumn, IntervalMinuteColumn,
24 IntervalSecondColumn
25 )
26 from ..ipcolumn import IPv4Column, IPv6Column
2711
2812 column_by_type = {c.ch_type: c for c in [
2913 NumpyDateColumn,
3014 NumpyFloat32Column, NumpyFloat64Column,
3115 NumpyInt8Column, NumpyInt16Column, NumpyInt32Column, NumpyInt64Column,
32 NumpyUInt8Column, NumpyUInt16Column, NumpyUInt32Column, NumpyUInt64Column,
33 NothingColumn, NullColumn, UUIDColumn,
34 IntervalYearColumn, IntervalMonthColumn, IntervalWeekColumn,
35 IntervalDayColumn, IntervalHourColumn, IntervalMinuteColumn,
36 IntervalSecondColumn, IPv4Column, IPv6Column
16 NumpyUInt8Column, NumpyUInt16Column, NumpyUInt32Column, NumpyUInt64Column
3717 ]}
3818
3919
4424 if spec == 'String' or spec.startswith('FixedString'):
4525 return create_string_column(spec, column_options)
4626
47 elif spec.startswith('Enum'):
48 return create_enum_column(spec, column_options)
49
5027 elif spec.startswith('DateTime'):
5128 return create_numpy_datetime_column(spec, column_options)
52
53 elif spec.startswith('Decimal'):
54 return create_decimal_column(spec, column_options)
55
56 elif spec.startswith('Array'):
57 return create_array_column(spec, create_column_with_options)
58
59 elif spec.startswith('Tuple'):
60 return create_tuple_column(spec, create_column_with_options)
6129
6230 elif spec.startswith('Nullable'):
6331 return create_nullable_column(spec, create_column_with_options)
6533 elif spec.startswith('LowCardinality'):
6634 return create_numpy_low_cardinality_column(spec,
6735 create_column_with_options)
68
69 elif spec.startswith('SimpleAggregateFunction'):
70 return create_simple_aggregate_function_column(
71 spec, create_column_with_options)
72
7336 else:
74 try:
37 if spec in column_by_type:
7538 cls = column_by_type[spec]
7639 return cls(**column_options)
7740
78 except KeyError as e:
79 raise errors.UnknownTypeError('Unknown type {}'.format(e.args[0]))
41 raise errors.UnknownTypeError('Unknown type {}'.format(spec))
0 import logging
1
02 from .. import errors
13 from .arraycolumn import create_array_column
4 from .boolcolumn import BoolColumn
25 from .datecolumn import DateColumn, Date32Column
36 from .datetimecolumn import create_datetime_column
47 from .decimalcolumn import create_decimal_column
2023 )
2124 from .stringcolumn import create_string_column
2225 from .tuplecolumn import create_tuple_column
26 from .nestedcolumn import create_nested_column
2327 from .uuidcolumn import UUIDColumn
2428 from .intervalcolumn import (
2529 IntervalYearColumn, IntervalMonthColumn, IntervalWeekColumn,
3741 NothingColumn, NullColumn, UUIDColumn,
3842 IntervalYearColumn, IntervalMonthColumn, IntervalWeekColumn,
3943 IntervalDayColumn, IntervalHourColumn, IntervalMinuteColumn,
40 IntervalSecondColumn, IPv4Column, IPv6Column
44 IntervalSecondColumn, IPv4Column, IPv6Column, BoolColumn
4145 ]}
4246
47 logger = logging.getLogger(__name__)
4348
44 def get_column_by_spec(spec, column_options):
49
50 def get_column_by_spec(spec, column_options, use_numpy=None):
4551 context = column_options['context']
46 use_numpy = context.client_settings['use_numpy'] if context else False
52
53 if use_numpy is None:
54 use_numpy = context.client_settings['use_numpy'] if context else False
4755
4856 if use_numpy:
4957 from .numpy.service import get_numpy_column_by_spec
50 return get_numpy_column_by_spec(spec, column_options)
58
59 try:
60 return get_numpy_column_by_spec(spec, column_options)
61 except errors.UnknownTypeError:
62 use_numpy = False
63 logger.warning('NumPy support is not implemented for %s. '
64 'Using generic column', spec)
5165
5266 def create_column_with_options(x):
53 return get_column_by_spec(x, column_options)
67 return get_column_by_spec(x, column_options, use_numpy=use_numpy)
5468
5569 if spec == 'String' or spec.startswith('FixedString'):
5670 return create_string_column(spec, column_options)
7084 elif spec.startswith('Tuple'):
7185 return create_tuple_column(spec, create_column_with_options)
7286
87 elif spec.startswith('Nested'):
88 return create_nested_column(spec, create_column_with_options)
89
7390 elif spec.startswith('Nullable'):
7491 return create_nullable_column(spec, create_column_with_options)
7592
88105 cls = column_by_type[spec]
89106 return cls(**column_options)
90107
91 except KeyError as e:
92 raise errors.UnknownTypeError('Unknown type {}'.format(e.args[0]))
108 except KeyError:
109 raise errors.UnknownTypeError('Unknown type {}'.format(spec))
93110
94111
95112 def read_column(context, column_spec, n_items, buf):
4848
4949
5050 def get_inner_spec(spec):
51 brackets = 1
52 offset = len('Tuple(')
51 brackets = 0
52 offset = len('Tuple')
5353 i = offset
5454 for i, ch in enumerate(spec[offset:], offset):
55 if brackets == 0:
56 break
57
5855 if ch == '(':
5956 brackets += 1
6057
6158 elif ch == ')':
6259 brackets -= 1
6360
64 return spec[offset:i]
61 if brackets == 0:
62 break
63
64 return spec[offset + 1:i]
00 from io import BytesIO
1
2 from ..reader import read_binary_uint32
3 from ..writer import write_binary_uint8, write_binary_uint32
4 from .. import errors
15
26 try:
37 from clickhouse_cityhash.cityhash import CityHash128
59 raise RuntimeError(
610 'Package clickhouse-cityhash is required to use compression'
711 )
8
9 from .. import errors
1012
1113
1214 class BaseCompressor(object):
3032 def write(self, p_str):
3133 self.data.write(p_str)
3234
35 def compress_data(self, data):
36 raise NotImplementedError
37
3338 def get_compressed_data(self, extra_header_size):
34 raise NotImplementedError
39 rv = BytesIO()
40
41 data = self.get_value()
42 compressed = self.compress_data(data)
43
44 header_size = extra_header_size + 4 + 4 # sizes
45
46 write_binary_uint32(header_size + len(compressed), rv)
47 write_binary_uint32(len(data), rv)
48 rv.write(compressed)
49
50 return rv.getvalue()
3551
3652
3753 class BaseDecompressor(object):
4258 self.stream = real_stream
4359 super(BaseDecompressor, self).__init__()
4460
61 def decompress_data(self, data, uncompressed_size):
62 raise NotImplementedError
63
4564 def check_hash(self, compressed_data, compressed_hash):
4665 if CityHash128(compressed_data) != compressed_hash:
4766 raise errors.ChecksumDoesntMatchError()
4867
4968 def get_decompressed_data(self, method_byte, compressed_hash,
5069 extra_header_size):
51 raise NotImplementedError
70 size_with_header = read_binary_uint32(self.stream)
71 compressed_size = size_with_header - extra_header_size - 4
72
73 compressed = BytesIO(self.stream.read(compressed_size))
74
75 block_check = BytesIO()
76 write_binary_uint8(method_byte, block_check)
77 write_binary_uint32(size_with_header, block_check)
78 block_check.write(compressed.getvalue())
79
80 self.check_hash(block_check.getvalue(), compressed_hash)
81
82 uncompressed_size = read_binary_uint32(compressed)
83
84 compressed = compressed.read(compressed_size - 4)
85
86 return self.decompress_data(compressed, uncompressed_size)
0 from __future__ import absolute_import
1 from io import BytesIO
2
30 from lz4 import block
41
52 from .base import BaseCompressor, BaseDecompressor
63 from ..protocol import CompressionMethod, CompressionMethodByte
7 from ..reader import read_binary_uint32
8 from ..writer import write_binary_uint32, write_binary_uint8
94
105
116 class Compressor(BaseCompressor):
138 method_byte = CompressionMethodByte.LZ4
149 mode = 'default'
1510
16 def get_compressed_data(self, extra_header_size):
17 rv = BytesIO()
18
19 data = self.get_value()
20 compressed = block.compress(data, store_size=False, mode=self.mode)
21
22 header_size = extra_header_size + 4 + 4 # sizes
23
24 write_binary_uint32(header_size + len(compressed), rv)
25 write_binary_uint32(len(data), rv)
26 rv.write(compressed)
27
28 return rv.getvalue()
11 def compress_data(self, data):
12 return block.compress(data, store_size=False, mode=self.mode)
2913
3014
3115 class Decompressor(BaseDecompressor):
3216 method = CompressionMethod.LZ4
3317 method_byte = CompressionMethodByte.LZ4
3418
35 def get_decompressed_data(self, method_byte, compressed_hash,
36 extra_header_size):
37 size_with_header = read_binary_uint32(self.stream)
38 compressed_size = size_with_header - extra_header_size - 4
39
40 compressed = BytesIO(self.stream.read(compressed_size))
41
42 block_check = BytesIO()
43 write_binary_uint8(method_byte, block_check)
44 write_binary_uint32(size_with_header, block_check)
45 block_check.write(compressed.getvalue())
46
47 self.check_hash(block_check.getvalue(), compressed_hash)
48
49 uncompressed_size = read_binary_uint32(compressed)
50
51 compressed = compressed.read(compressed_size - 4)
52
53 return block.decompress(compressed,
54 uncompressed_size=uncompressed_size)
19 def decompress_data(self, data, uncompressed_size):
20 return block.decompress(data, uncompressed_size=uncompressed_size)
0 from __future__ import absolute_import
1 from io import BytesIO
2
30 import zstd
41
52 from .base import BaseCompressor, BaseDecompressor
63 from ..protocol import CompressionMethod, CompressionMethodByte
7 from ..reader import read_binary_uint32
8 from ..writer import write_binary_uint32, write_binary_uint8
94
105
116 class Compressor(BaseCompressor):
127 method = CompressionMethod.ZSTD
138 method_byte = CompressionMethodByte.ZSTD
149
15 def get_compressed_data(self, extra_header_size):
16 rv = BytesIO()
17
18 data = self.get_value()
19 compressed = zstd.compress(data)
20
21 header_size = extra_header_size + 4 + 4 # sizes
22
23 write_binary_uint32(header_size + len(compressed), rv)
24 write_binary_uint32(len(data), rv)
25 rv.write(compressed)
26
27 return rv.getvalue()
10 def compress_data(self, data):
11 return zstd.compress(data)
2812
2913
3014 class Decompressor(BaseDecompressor):
3115 method = CompressionMethod.ZSTD
3216 method_byte = CompressionMethodByte.ZSTD
3317
34 def get_decompressed_data(self, method_byte, compressed_hash,
35 extra_header_size):
36 size_with_header = read_binary_uint32(self.stream)
37 compressed_size = size_with_header - extra_header_size - 4
38
39 compressed = BytesIO(self.stream.read(compressed_size))
40
41 block_check = BytesIO()
42 write_binary_uint8(method_byte, block_check)
43 write_binary_uint32(size_with_header, block_check)
44 block_check.write(compressed.getvalue())
45
46 self.check_hash(block_check.getvalue(), compressed_hash)
47
48 compressed = compressed.read(compressed_size - 4)
49
50 return zstd.decompress(compressed)
18 def decompress_data(self, data, uncompressed_size):
19 return zstd.decompress(data)
2222 from .readhelpers import read_exception
2323 from .settings.writer import write_settings
2424 from .streams.native import BlockInputStream, BlockOutputStream
25 from .util.compat import threading
2526 from .varint import write_varint, read_varint
2627 from .writer import write_binary_str
2728
202203 self.block_out = None
203204 self.block_in_raw = None # log blocks are always not compressed
204205
206 self._lock = threading.Lock()
207 self.is_query_executing = False
208
205209 super(Connection, self).__init__()
206210
207211 def get_description(self):
208212 return '{}:{}'.format(self.host, self.port)
209213
210214 def force_connect(self):
215 self.check_query_execution()
216
211217 if not self.connected:
212218 self.connect()
213219
354360 self.block_in_raw = None
355361 self.block_out = None
356362
363 self.is_query_executing = False
364
357365 def disconnect(self):
358366 """
359367 Closes connection between server and client.
495503 log_block(block)
496504
497505 elif packet_type == ServerPacketTypes.END_OF_STREAM:
506 self.is_query_executing = False
498507 pass
499508
500509 elif packet_type == ServerPacketTypes.TABLE_COLUMNS:
612621 'Empty table "{}" structure'.format(table['name'])
613622 )
614623
615 block = RowOrientedBlock(table['structure'], table['data'],
616 types_check=types_check)
624 data = table['data']
625 block_cls = RowOrientedBlock
626
627 if self.context.client_settings['use_numpy']:
628 from .numpy.block import NumpyColumnOrientedBlock
629
630 columns = [x[0] for x in table['structure']]
631 data = [data[column].values for column in columns]
632
633 block_cls = NumpyColumnOrientedBlock
634
635 block = block_cls(table['structure'], data,
636 types_check=types_check)
617637 self.send_data(block, table_name=table['name'])
618638
619639 # Empty block, end of data transfer.
635655 'Unexpected packet from server {} (expected {}, got {})'
636656 .format(self.get_description(), expected, packet_type)
637657 )
658
659 def check_query_execution(self):
660 self._lock.acquire(blocking=False)
661
662 if self.is_query_executing:
663 raise errors.PartiallyConsumedQueryError()
664
665 self.is_query_executing = True
666 self._lock.release()
309309 self._rowcount = response
310310 response = None
311311
312 if not response:
312 if not response or isinstance(response, int):
313313 self._columns = self._types = self._rows = []
314 if isinstance(response, int):
315 self._rowcount = response
314316 return
315317
316318 if self._stream_results:
443443
444444 class CannotParseDomainError(Error):
445445 code = ErrorCodes.CANNOT_PARSE_DOMAIN_VALUE_FROM_STRING
446
447
448 class PartiallyConsumedQueryError(Error):
449 code = -1
450
451 def __str__(self):
452 return 'Simultaneous queries on single connection detected'
0
1 # Drop this when minimum supported version will be 3.7.
2 try:
3 import threading
4 except ImportError:
5 import dummy_threading as threading # noqa: F401
6
7 try:
8 # since tzlocal 4.0+
9 # this will avoid warning for get_localzone().key
10 from tzlocal import get_localzone_name
11
12 def get_localzone_name_compat():
13 try:
14 return get_localzone_name()
15 except Exception:
16 return None
17 except ImportError:
18 from tzlocal import get_localzone
19
20 def get_localzone_name_compat():
21 try:
22 return get_localzone().key
23 except AttributeError:
24 return get_localzone().zone
25 except Exception:
26 return None
00 from datetime import date, datetime
11 from enum import Enum
22 from uuid import UUID
3
4 from pytz import timezone
35
46
57 escape_chars_map = {
1618 }
1719
1820
19 def escape_param(item):
21 def escape_datetime(item, context):
22 server_tz = timezone(context.server_info.timezone)
23
24 if item.tzinfo is not None:
25 item = item.astimezone(server_tz)
26
27 return "'%s'" % item.strftime('%Y-%m-%d %H:%M:%S')
28
29
30 def escape_param(item, context):
2031 if item is None:
2132 return 'NULL'
2233
2334 elif isinstance(item, datetime):
24 return "'%s'" % item.strftime('%Y-%m-%d %H:%M:%S')
35 return escape_datetime(item, context)
2536
2637 elif isinstance(item, date):
2738 return "'%s'" % item.strftime('%Y-%m-%d')
3041 return "'%s'" % ''.join(escape_chars_map.get(c, c) for c in item)
3142
3243 elif isinstance(item, list):
33 return "[%s]" % ', '.join(str(escape_param(x)) for x in item)
44 return "[%s]" % ', '.join(str(escape_param(x, context)) for x in item)
3445
3546 elif isinstance(item, tuple):
36 return "(%s)" % ', '.join(str(escape_param(x)) for x in item)
47 return "(%s)" % ', '.join(str(escape_param(x, context)) for x in item)
3748
3849 elif isinstance(item, Enum):
39 return escape_param(item.value)
50 return escape_param(item.value, context)
4051
4152 elif isinstance(item, UUID):
4253 return "'%s'" % str(item)
4556 return item
4657
4758
48 def escape_params(params):
59 def escape_params(params, context):
4960 escaped = {}
5061
5162 for key, value in params.items():
52 escaped[key] = escape_param(value)
63 escaped[key] = escape_param(value, context)
5364
5465 return escaped
1313
1414 Install desired Python version with system package manager/pyenv/another manager.
1515
16 Install test requirements and build package:
17
18 .. code-block:: bash
19
20 python testsrequire.py && python setup.py develop
21
22 You should install cython if you want to change ``*.pyx`` files:
23
24 .. code-block:: bash
25
26 pip install cython
27
1628 ClickHouse on host machine
1729 ^^^^^^^^^^^^^^^^^^^^^^^^^^
1830
2335
2436 .. code-block:: bash
2537
26 python setup.py test
38 py.test -v
2739
2840 ClickHouse in docker
2941 ^^^^^^^^^^^^^^^^^^^^
3244
3345 .. code-block:: bash
3446
35 docker run --rm -e "TZ=Europe/Moscow" -p 127.0.0.1:9000:9000 --name test-clickhouse-server yandex/clickhouse-server:$VERSION
47 docker run --rm -e "TZ=Europe/Moscow" -p 127.0.0.1:9000:9000 --name test-clickhouse-server yandex/clickhouse-server:$VERSION
3648
3749 Create container with the same version of ``clickhouse-client``:
3850
3951 .. code-block:: bash
4052
41 docker run --rm --entrypoint "/bin/sh" --name test-clickhouse-client --link test-clickhouse-server:clickhouse-server yandex/clickhouse-client:$VERSION -c 'while :; do sleep 1; done'
53 docker run --rm --entrypoint "/bin/sh" --name test-clickhouse-client --link test-clickhouse-server:clickhouse-server yandex/clickhouse-client:$VERSION -c 'while :; do sleep 1; done'
4254
4355 Create ``clickhouse-client`` script on your host machine:
4456
6375 .. code-block:: bash
6476
6577 export TZ=UTC
66 python setup.py test
78 py.test -v
79
80 GitHub Actions in forked repository
81 -----------------------------------
82
83 Workflows in forked repositories can be used for running tests.
84
85 Workflows don't run in forked repositories by default.
86 You must enable GitHub Actions in the **Actions** tab of the forked repository.
2020
2121 By default there are wheels for Linux, Mac OS X and Windows.
2222
23 Packages for Linux and Mac OS X are available for python: 3.4 -- 3.9.
23 Packages for Linux and Mac OS X are available for python: 3.6 -- 3.10.
2424
25 Packages for Windows are available for python: 3.5 -- 3.9.
25 Packages for Windows are available for python: 3.6 -- 3.10.
26
27 Starting from version *0.2.3* there are wheels for musl-based Linux distributions.
2628
2729 Dependencies
2830 ------------
5656 [('2018-10-21', 3)]
5757
5858 Percent symbols in inlined constants should be doubled if you mix constants
59 with ``%`` symbol and ``%(x)s`` parameters.
60
61 .. code-block:: python
62
63 >>> client.execute(
64 ... "SELECT 'test' like '%%es%%', %(x)s",
65 ... {'x': 1}
66 ... )
59 with ``%`` symbol and ``%(myvar)s`` parameters.
60
61 .. code-block:: python
62
63 >>> client.execute(
64 ... "SELECT 'test' like '%%es%%', %(myvar)s",
65 ... {'myvar': 1}
66 ... )
67
68 NOTE: formatting queries using Python's f-strings or concatenation can lead to SQL injections.
69 Use ``%(myvar)s`` parameters instead.
6770
6871 Customisation ``SELECT`` output with ``FORMAT`` clause is not supported.
6972
166166 INSERT types: :data:`~types.NoneType`, ``T``.
167167
168168 SELECT type: :data:`~types.NoneType`, ``T``.
169
170
171 Bool
172 ----
173
174 INSERT types: :class:`bool`,
175
176 SELECT type: :class:`bool`.
169177
170178
171179 UUID
274282 SELECT type: :class:`tuple`.
275283
276284
277 Nested
285 Nested(flatten_nested=1, default)
278286 ------
279287
280 Nested type is represented by sequence of arrays. In example below actual
288 Nested type is represented by sequence of arrays when flatten_nested=1. In example below actual
281289 columns for are ``col.name`` and ``col.version``.
282290
283291 .. code-block:: sql
326334 (['a', 'b', 'c'], [100, 200, 300]),
327335 ])
328336
337 Nested(flatten_nested=0)
338 ------
339
340 Nested type is represented by array of named tuples when flatten_nested=0.
341
342 .. code-block:: sql
343
344 :) SET flatten_nested = 0;
345
346 SET flatten_nested = 0
347
348 Ok.
349
350 0 rows in set. Elapsed: 0.006 sec.
351
352 :) CREATE TABLE test_nested (col Nested(name String, version UInt16)) Engine = Memory;
353
354 CREATE TABLE test_nested
355 (
356 `col` Nested(name String, version UInt16)
357 )
358 ENGINE = Memory
359
360 Ok.
361
362 0 rows in set. Elapsed: 0.005 sec.
363
364 :) DESCRIBE TABLE test_nested FORMAT TSV;
365
366 DESCRIBE TABLE test_nested
367 FORMAT TSV
368
369 col Nested(name String, version UInt16)
370
371 1 rows in set. Elapsed: 0.004 sec.
372
373 Inserting data into nested column in ``clickhouse-client``:
374
375 .. code-block:: sql
376
377 :) INSERT INTO test_nested VALUES ([('a', 100), ('b', 200), ('c', 300)]);
378
379 INSERT INTO test_nested VALUES
380
381 Ok.
382
383 1 rows in set. Elapsed: 0.003 sec.
384
385 Inserting data into nested column with ``clickhouse-driver``:
386
387 .. code-block:: python
388
389 client.execute(
390 'INSERT INTO test_nested VALUES',
391 [([('a', 100), ('b', 200), ('c', 300)]),]
392 )
393 # or
394 client.execute(
395 'INSERT INTO test_nested VALUES',
396 [{'col': [{'name': 'a', 'version': 100}, {'name': 'b', 'version': 200}, {'name': 'c', 'version': 300}]}]
397 )
398
329399 Map(key, value)
330400 ------------------
331401
1111 else:
1212 USE_CYTHON = True
1313
14 USE_NUMPY = bool(os.getenv('USE_NUMPY', False))
1514 CYTHON_TRACE = bool(os.getenv('CYTHON_TRACE', False))
16
1715
1816 here = os.path.abspath(os.path.dirname(__file__))
1917
6361
6462 extensions = cythonize(extensions, compiler_directives=compiler_directives)
6563
66 tests_require = [
67 'nose',
68 'parameterized',
69 'freezegun',
70 'lz4<=3.0.1; implementation_name=="pypy"',
71 'lz4; implementation_name!="pypy"',
72 'zstd',
73 'clickhouse-cityhash>=1.0.2.1'
74 ]
75
76 if USE_NUMPY:
77 tests_require.extend(['numpy', 'pandas'])
78
7964 setup(
8065 name='clickhouse-driver',
8166 version=read_version(),
10994
11095 'Programming Language :: SQL',
11196 'Programming Language :: Python :: 3',
112 'Programming Language :: Python :: 3.4',
11397 'Programming Language :: Python :: 3.5',
11498 'Programming Language :: Python :: 3.6',
11599 'Programming Language :: Python :: 3.7',
116100 'Programming Language :: Python :: 3.8',
117101 'Programming Language :: Python :: 3.9',
102 'Programming Language :: Python :: 3.10',
118103 'Programming Language :: Python :: Implementation :: PyPy',
119104
120105 'Topic :: Database',
134119 python_requires='>=3.4.*, <4',
135120 install_requires=[
136121 'pytz',
137 'tzlocal'
122 'tzlocal',
123 'tzlocal<2.1; python_version=="3.5"'
138124 ],
139125 ext_modules=extensions,
140126 extras_require={
146132 'zstd': ['zstd', 'clickhouse-cityhash>=1.0.2.1'],
147133 'numpy': ['numpy>=1.12.0', 'pandas>=0.24.0']
148134 },
149 test_suite='nose.collector',
150 tests_require=tests_require
135 test_suite='pytest'
151136 )
0 from tests.testcase import BaseTestCase
1 from clickhouse_driver import errors
2
3
4 class BoolTestCase(BaseTestCase):
5 required_server_version = (21, 12)
6
7 def test_simple(self):
8 columns = ("a Bool")
9
10 data = [(1,), (0,), (True,), (False,), (None,), ("False",), ("",)]
11 with self.create_table(columns):
12 self.client.execute('INSERT INTO test (a) VALUES', data)
13
14 query = 'SELECT * FROM test'
15 inserted = self.emit_cli(query)
16 self.assertEqual(
17 inserted, (
18 'true\n'
19 'false\n'
20 'true\n'
21 'false\n'
22 'false\n'
23 'true\n'
24 'false\n'
25 )
26 )
27
28 inserted = self.client.execute(query)
29 self.assertEqual(
30 inserted, [
31 (True, ),
32 (False, ),
33 (True, ),
34 (False, ),
35 (False, ),
36 (True, ),
37 (False, ),
38 ]
39 )
40
41 def test_errors(self):
42 columns = "a Bool"
43 with self.create_table(columns):
44 with self.assertRaises(errors.TypeMismatchError):
45 self.client.execute(
46 'INSERT INTO test (a) VALUES', [(1, )],
47 types_check=True
48 )
49
50 def test_nullable(self):
51 columns = "a Nullable(Bool)"
52
53 data = [(None, ), (True, ), (False, )]
54 with self.create_table(columns):
55 self.client.execute('INSERT INTO test (a) VALUES', data)
56
57 query = 'SELECT * FROM test'
58 inserted = self.emit_cli(query)
59 self.assertEqual(
60 inserted, (
61 '\\N\ntrue\nfalse\n'
62 )
63 )
64
65 inserted = self.client.execute(query)
66 self.assertEqual(
67 inserted, [
68 (None, ), (True, ), (False, ),
69 ]
70 )
0 from contextlib import contextmanager
10 from datetime import date, datetime
2 import os
3 from time import tzset
41 from unittest.mock import patch
52
63 from pytz import timezone, utc, UnknownTimeZoneError
74 import tzlocal
85
96 from tests.testcase import BaseTestCase
10 from tests.util import require_server_version
11
12
13 class BaseDateTimeTestCase(BaseTestCase):
14 def setUp(self):
15 super(BaseDateTimeTestCase, self).setUp()
16
17 # Bust tzlocal cache.
18 try:
19 tzlocal.unix._cache_tz = None
20 except AttributeError:
21 pass
22
23 try:
24 tzlocal.win32._cache_tz = None
25 except AttributeError:
26 pass
27
28
29 class DateTimeTestCase(BaseDateTimeTestCase):
7 from tests.util import require_server_version, patch_env_tz
8
9
10 class DateTimeTestCase(BaseTestCase):
3011 def test_simple(self):
3112 with self.create_table('a Date, b DateTime'):
3213 data = [(date(2012, 10, 25), datetime(2012, 10, 25, 14, 7, 19))]
8162 self.assertEqual(inserted, data)
8263
8364 def test_handle_errors_from_tzlocal(self):
84 with patch('tzlocal.get_localzone') as mocked_get_localzone:
85 mocked_get_localzone.side_effect = UnknownTimeZoneError()
65 with patch('tzlocal.get_localzone') as mocked:
66 mocked.side_effect = UnknownTimeZoneError()
8667 self.client.execute('SELECT now()')
68
69 if hasattr(tzlocal, 'get_localzone_name'):
70 with patch('tzlocal.get_localzone_name') as mocked:
71 mocked.side_effect = None
72 self.client.execute('SELECT now()')
8773
8874 @require_server_version(20, 1, 2)
8975 def test_datetime64_frac_trunc(self):
182168 )
183169
184170
185 class DateTimeTimezonesTestCase(BaseDateTimeTestCase):
171 class DateTimeTimezonesTestCase(BaseTestCase):
186172 dt_type = 'DateTime'
187
188 @contextmanager
189 def patch_env_tz(self, tz_name):
190 # Although in many cases, changing the TZ environment variable may
191 # affect the output of functions like localtime() without calling
192 # tzset(), this behavior should not be relied on.
193 # https://docs.python.org/3/library/time.html#time.tzset
194 with patch.dict(os.environ, {'TZ': tz_name}):
195 tzset()
196 yield
197
198 tzset()
199173
200174 # Asia/Kamchatka = UTC+12
201175 # Asia/Novosibirsk = UTC+7
228202 offset = timezone(server_tz_name).utcoffset(self.dt).total_seconds()
229203 timestamp = 1500010800 - int(offset)
230204
231 with self.patch_env_tz('Asia/Novosibirsk'):
205 with patch_env_tz('Asia/Novosibirsk'):
232206 with self.create_table(self.table_columns()):
233207 self.client.execute(
234208 'INSERT INTO test (a) VALUES', [(self.dt, )]
259233
260234 settings = {'use_client_time_zone': True}
261235
262 with self.patch_env_tz('Asia/Novosibirsk'):
236 with patch_env_tz('Asia/Novosibirsk'):
263237 with self.create_table(self.table_columns()):
264238 self.client.execute(
265239 'INSERT INTO test (a) VALUES', [(self.dt, )],
295269 server_tz_name = self.client.execute('SELECT timezone()')[0][0]
296270 offset = timezone(server_tz_name).utcoffset(self.dt)
297271
298 with self.patch_env_tz('Asia/Novosibirsk'):
272 with patch_env_tz('Asia/Novosibirsk'):
299273 with self.create_table(self.table_columns()):
300274 self.client.execute(
301275 'INSERT INTO test (a) VALUES', [(self.dt_tz, )]
328302
329303 settings = {'use_client_time_zone': True}
330304
331 with self.patch_env_tz('Asia/Novosibirsk'):
305 with patch_env_tz('Asia/Novosibirsk'):
332306 with self.create_table(self.table_columns()):
333307 self.client.execute(
334308 'INSERT INTO test (a) VALUES', [(self.dt_tz, )],
365339 # into column with timezone Asia/Novosibirsk
366340 # using server's timezone (Europe/Moscow)
367341
368 with self.patch_env_tz('Europe/Moscow'):
342 with patch_env_tz('Europe/Moscow'):
369343 with self.create_table(self.table_columns(with_tz=True)):
370344 self.client.execute(
371345 'INSERT INTO test (a) VALUES', [(self.dt, )]
401375
402376 settings = {'use_client_time_zone': True}
403377
404 with self.patch_env_tz('Europe/Moscow'):
378 with patch_env_tz('Europe/Moscow'):
405379 with self.create_table(self.table_columns(with_tz=True)):
406380 self.client.execute(
407381 'INSERT INTO test (a) VALUES', [(self.dt, )],
436410 # into column with timezone Asia/Novosibirsk
437411 # using server's timezone (Europe/Moscow)
438412
439 with self.patch_env_tz('Europe/Moscow'):
413 with patch_env_tz('Europe/Moscow'):
440414 with self.create_table(self.table_columns(with_tz=True)):
441415 self.client.execute(
442416 'INSERT INTO test (a) VALUES', [(self.dt_tz, )]
476450
477451 settings = {'use_client_time_zone': True}
478452
479 with self.patch_env_tz('Europe/Moscow'):
453 with patch_env_tz('Europe/Moscow'):
480454 with self.create_table(self.table_columns(with_tz=True)):
481455 self.client.execute(
482456 'INSERT INTO test (a) VALUES', [(self.dt_tz, )],
139139
140140 def test_nullable(self):
141141 with self.create_table('a Nullable(Decimal32(3))'):
142 data = [(300.42, ), (None, ), ]
142 data = [(300.42, ), (None, )]
143143 self.client.execute(
144144 'INSERT INTO test (a) VALUES', data
145145 )
157157
158158 def test_no_scale(self):
159159 with self.create_table('a Decimal32(0)'):
160 data = [(2147483647, ), ]
160 data = [(2147483647, )]
161161 self.client.execute(
162162 'INSERT INTO test (a) VALUES', data
163163 )
170170 self.assertEqual(inserted, [(Decimal('2147483647'), )])
171171
172172 def test_type_mismatch(self):
173 data = [(2147483649,), ]
173 data = [(2147483649, )]
174174 with self.create_table('a Decimal32(0)'):
175175 with self.assertRaises(errors.TypeMismatchError) as e:
176176 self.client.execute(
185185 )
186186
187187 self.assertIn('Column a', str(e.exception))
188
189 def test_type_mismatch_scale(self):
190 data = [(1.234,)]
191 with self.create_table('a Decimal32(2)'):
192 with self.assertRaises(errors.TypeMismatchError) as e:
193 self.client.execute(
194 'INSERT INTO test (a) VALUES', data, types_check=True
195 )
196
197 self.assertIn('1.234 for column "a"', str(e.exception))
198
199 # Without types_check decimal will be cropped.
200 self.client.execute('INSERT INTO test (a) VALUES', data)
201 query = 'SELECT * FROM test'
202 inserted = self.emit_cli(query)
203 self.assertEqual(inserted, '1.23\n')
204 inserted = self.client.execute(query)
205 self.assertEqual(inserted, [(Decimal('1.23'), )])
188206
189207 def test_preserve_precision(self):
190208 data = [(1.66, ), (1.15, )]
0 from tests.testcase import BaseTestCase
1 from tests.util import require_server_version
2 from clickhouse_driver.columns import nestedcolumn
3
4
5 class NestedTestCase(BaseTestCase):
6 def entuple(self, lst):
7 return tuple(
8 self.entuple(x) if isinstance(x, list) else x for x in lst
9 )
10
11 @require_server_version(21, 3, 13)
12 def test_simple(self):
13 columns = 'n Nested(i Int32, s String)'
14
15 # INSERT INTO test_nested VALUES ([(0, 'a'), (1, 'b')]);
16 data = [([(0, 'a'), (1, 'b')],)]
17
18 with self.create_table(columns, flatten_nested=0):
19 self.client.execute(
20 'INSERT INTO test (n) VALUES', data
21 )
22
23 query = 'SELECT * FROM test'
24 inserted = self.emit_cli(query)
25 self.assertEqual(inserted, "[(0,'a'),(1,'b')]\n")
26
27 inserted = self.client.execute(query)
28 self.assertEqual(inserted, data)
29
30 projected_i = self.client.execute('SELECT n.i FROM test')
31 self.assertEqual(
32 projected_i,
33 [([0, 1],)]
34 )
35
36 projected_s = self.client.execute('SELECT n.s FROM test')
37 self.assertEqual(
38 projected_s,
39 [(['a', 'b'],)]
40 )
41
42 @require_server_version(21, 3, 13)
43 def test_multiple_rows(self):
44 columns = 'n Nested(i Int32, s String)'
45
46 data = [([(0, 'a'), (1, 'b')],), ([(3, 'd'), (4, 'e')],)]
47
48 with self.create_table(columns, flatten_nested=0):
49 self.client.execute(
50 'INSERT INTO test (n) VALUES', data
51 )
52
53 query = 'SELECT * FROM test'
54 inserted = self.emit_cli(query)
55 self.assertEqual(
56 inserted,
57 "[(0,'a'),(1,'b')]\n[(3,'d'),(4,'e')]\n"
58 )
59
60 inserted = self.client.execute(query)
61 self.assertEqual(inserted, data)
62
63 @require_server_version(21, 3, 13)
64 def test_dict(self):
65 columns = 'n Nested(i Int32, s String)'
66
67 data = [
68 {'n': [{'i': 0, 's': 'a'}, {'i': 1, 's': 'b'}]},
69 {'n': [{'i': 3, 's': 'd'}, {'i': 4, 's': 'e'}]},
70 ]
71
72 with self.create_table(columns, flatten_nested=0):
73 self.client.execute(
74 'INSERT INTO test (n) VALUES', data
75 )
76
77 query = 'SELECT * FROM test'
78 inserted = self.emit_cli(query)
79 self.assertEqual(
80 inserted,
81 "[(0,'a'),(1,'b')]\n[(3,'d'),(4,'e')]\n"
82 )
83
84 inserted = self.client.execute(query)
85 self.assertEqual(
86 inserted,
87 [([(0, 'a'), (1, 'b')],), ([(3, 'd'), (4, 'e')],)]
88 )
89
90 def test_get_nested_columns(self):
91 self.assertEqual(
92 nestedcolumn.get_nested_columns(
93 'Nested(a Tuple(Array(Int8)),\n b Nullable(String))',
94 ),
95 ['Tuple(Array(Int8))', 'Nullable(String)']
96 )
97
98 def test_get_columns_with_types(self):
99 self.assertEqual(
100 nestedcolumn.get_columns_with_types(
101 'Nested(a Tuple(Array(Int8)),\n b Nullable(String))',
102 ),
103 [('a', 'Tuple(Array(Int8))'), ('b', 'Nullable(String)')]
104 )
105
106 def test_get_inner_spec(self):
107 inner = 'a Tuple(Array(Int8), Array(Int64)), b Nullable(String)'
108 self.assertEqual(
109 nestedcolumn.get_inner_spec('Nested({}) dummy '.format(inner)),
110 inner
111 )
0 import pytest
1
2
3 @pytest.fixture(autouse=True)
4 def assert_empty_output(capfd):
5 yield
6
7 captured = capfd.readouterr()
8
9 assert captured.out == ''
10 assert captured.err == ''
0 version: '3'
1
2 services:
3 clickhouse-server:
4 image: "yandex/clickhouse-server:$VERSION"
5 container_name: test-clickhouse-server
6 environment:
7 - TZ=Europe/Moscow
8 ports:
9 - "127.0.0.1:9000:9000"
10
11 clickhouse-client:
12 image: "yandex/clickhouse-client:$VERSION"
13 container_name: test-clickhouse-client
14 entrypoint: /bin/sh
15 command: [-c, 'while :; do sleep 1; done']
0 from contextlib import contextmanager
10 from datetime import datetime, date
2 import os
3 from time import tzset
41 from unittest.mock import patch
52
63 try:
1714 import tzlocal
1815
1916 from tests.numpy.testcase import NumpyBaseTestCase
20 from tests.util import require_server_version
17 from tests.util import require_server_version, patch_env_tz
2118
2219
2320 class BaseDateTimeTestCase(NumpyBaseTestCase):
24 def setUp(self):
25 super(BaseDateTimeTestCase, self).setUp()
26
27 # Bust tzlocal cache.
28 try:
29 tzlocal.unix._cache_tz = None
30 except AttributeError:
31 pass
32
33 try:
34 tzlocal.win32._cache_tz = None
35 except AttributeError:
36 pass
37
3821 def make_numpy_d64ns(self, items):
3922 return np.array(items, dtype='datetime64[ns]')
4023
120103 self.assertEqual(inserted[0].dtype, object)
121104
122105 def test_handle_errors_from_tzlocal(self):
123 with patch('tzlocal.get_localzone') as mocked_get_localzone:
124 mocked_get_localzone.side_effect = UnknownTimeZoneError()
106 with patch('tzlocal.get_localzone') as mocked:
107 mocked.side_effect = UnknownTimeZoneError()
125108 self.client.execute('SELECT now()')
109
110 if hasattr(tzlocal, 'get_localzone_name'):
111 with patch('tzlocal.get_localzone_name') as mocked:
112 mocked.side_effect = None
113 self.client.execute('SELECT now()')
126114
127115 @require_server_version(20, 1, 2)
128116 def test_datetime64_frac_trunc(self):
202190
203191 return pd.to_datetime(np.array([dt] * 2, dtype=dtype)) \
204192 .tz_localize(tz_name).to_numpy(dtype)
205
206 @contextmanager
207 def patch_env_tz(self, tz_name):
208 # Although in many cases, changing the TZ environment variable may
209 # affect the output of functions like localtime() without calling
210 # tzset(), this behavior should not be relied on.
211 # https://docs.python.org/3/library/time.html#time.tzset
212 with patch.dict(os.environ, {'TZ': tz_name}):
213 tzset()
214 yield
215
216 tzset()
217193
218194 # Asia/Kamchatka = UTC+12
219195 # Asia/Novosibirsk = UTC+7
256232 offset = timezone(server_tz_name).utcoffset(self.dt).total_seconds()
257233 timestamp = 1500010800 - int(offset)
258234
259 with self.patch_env_tz('Asia/Novosibirsk'):
235 with patch_env_tz('Asia/Novosibirsk'):
260236 with self.create_table(self.table_columns()):
261237 self.client.execute(
262238 'INSERT INTO test (a) VALUES', [self.dt_arr], columnar=True
289265
290266 settings = {'use_client_time_zone': True}
291267
292 with self.patch_env_tz('Asia/Novosibirsk'):
268 with patch_env_tz('Asia/Novosibirsk'):
293269 with self.create_table(self.table_columns()):
294270 self.client.execute(
295271 'INSERT INTO test (a) VALUES', [self.dt_arr],
328304 server_tz_name = self.client.execute('SELECT timezone()')[0][0]
329305 offset = timezone(server_tz_name).utcoffset(self.dt)
330306
331 with self.patch_env_tz('Asia/Novosibirsk'):
307 with patch_env_tz('Asia/Novosibirsk'):
332308 with self.create_table(self.table_columns()):
333309 self.client.execute(
334310 'INSERT INTO test (a) VALUES', [self.dt_tz], columnar=True
364340
365341 settings = {'use_client_time_zone': True}
366342
367 with self.patch_env_tz('Asia/Novosibirsk'):
343 with patch_env_tz('Asia/Novosibirsk'):
368344 with self.create_table(self.table_columns()):
369345 self.client.execute(
370346 'INSERT INTO test (a) VALUES', [self.dt_tz],
404380 # into column with timezone Asia/Novosibirsk
405381 # using server's timezone (Europe/Moscow)
406382
407 with self.patch_env_tz('Europe/Moscow'):
383 with patch_env_tz('Europe/Moscow'):
408384 with self.create_table(self.table_columns(with_tz=True)):
409385 self.client.execute(
410386 'INSERT INTO test (a) VALUES', [self.dt_arr], columnar=True
440416
441417 settings = {'use_client_time_zone': True}
442418
443 with self.patch_env_tz('Europe/Moscow'):
419 with patch_env_tz('Europe/Moscow'):
444420 with self.create_table(self.table_columns(with_tz=True)):
445421 self.client.execute(
446422 'INSERT INTO test (a) VALUES', [self.dt_arr],
476452 # into column with timezone Asia/Novosibirsk
477453 # using server's timezone (Europe/Moscow)
478454
479 with self.patch_env_tz('Europe/Moscow'):
455 with patch_env_tz('Europe/Moscow'):
480456 with self.create_table(self.table_columns(with_tz=True)):
481457 self.client.execute(
482458 'INSERT INTO test (a) VALUES', [self.dt_tz], columnar=True
515491
516492 settings = {'use_client_time_zone': True}
517493
518 with self.patch_env_tz('Europe/Moscow'):
494 with patch_env_tz('Europe/Moscow'):
519495 with self.create_table(self.table_columns(with_tz=True)):
520496 self.client.execute(
521497 'INSERT INTO test (a) VALUES', [self.dt_tz],
0 from parameterized import parameterized
1
02 from clickhouse_driver import errors
1
2 try:
3 from clickhouse_driver.columns.numpy.service import \
4 get_numpy_column_by_spec
5 except ImportError:
6 get_numpy_column_by_spec = None
7
3 from clickhouse_driver.columns.service import get_column_by_spec
84 from clickhouse_driver.context import Context
95
106 from tests.numpy.testcase import NumpyBaseTestCase
139 class OtherColumnsTestCase(NumpyBaseTestCase):
1410 def get_column(self, spec):
1511 ctx = Context()
16 ctx.client_settings = {'strings_as_bytes': False}
17 return get_numpy_column_by_spec(spec, {'context': ctx})
12 ctx.client_settings = {'strings_as_bytes': False, 'use_numpy': True}
13 return get_column_by_spec(spec, {'context': ctx})
1814
19 def test_enum(self):
20 col = self.get_column("Enum8('hello' = 1, 'world' = 2)")
21 self.assertIsNotNone(col)
22
23 def test_decimal(self):
24 col = self.get_column('Decimal(8, 4)')
25 self.assertIsNotNone(col)
26
27 def test_array(self):
28 col = self.get_column('Array(String)')
29 self.assertIsNotNone(col)
30
31 def test_tuple(self):
32 col = self.get_column('Tuple(String)')
33 self.assertIsNotNone(col)
34
35 def test_simple_aggregation_function(self):
36 col = self.get_column('SimpleAggregateFunction(any, Int32)')
15 @parameterized.expand([
16 ("Enum8('hello' = 1, 'world' = 2)", ),
17 ('Decimal(8, 4)', ),
18 ('Array(String)', ),
19 ('Tuple(String)', ),
20 ('SimpleAggregateFunction(any, Int32)', ),
21 ('Map(String, String)', ),
22 ('Array(LowCardinality(String))', )
23 ])
24 def test_generic_type(self, spec):
25 col = self.get_column(spec)
3726 self.assertIsNotNone(col)
3827
3928 def test_get_unknown_column(self):
0 try:
1 import numpy as np
2 import pandas as pd
3 except ImportError:
4 np = None
5 pd = None
6
7 from tests.numpy.testcase import NumpyBaseTestCase
8
9
10 class ExternalTablesTestCase(NumpyBaseTestCase):
11 def test_select(self):
12 tables = [{
13 'name': 'test',
14 'structure': [('x', 'Int32'), ('y', 'String')],
15 'data': pd.DataFrame({
16 'x': [100, 500],
17 'y': ['abc', 'def']
18 })
19 }]
20 rv = self.client.execute(
21 'SELECT * FROM test', external_tables=tables, columnar=True
22 )
23 self.assertArraysListEqual(
24 rv, [np.array([100, 500]), np.array(['abc', 'def'])]
25 )
26
27 def test_send_empty_table(self):
28 tables = [{
29 'name': 'test',
30 'structure': [('x', 'Int32')],
31 'data': pd.DataFrame({'x': []})
32 }]
33 rv = self.client.execute(
34 'SELECT * FROM test', external_tables=tables, columnar=True
35 )
36 self.assertArraysListEqual(rv, [])
37
38 def test_send_empty_table_structure(self):
39 tables = [{
40 'name': 'test',
41 'structure': [],
42 'data': pd.DataFrame()
43 }]
44 with self.assertRaises(ValueError) as e:
45 self.client.execute(
46 'SELECT * FROM test', external_tables=tables, columnar=True
47 )
48
49 self.assertIn('Empty table "test" structure', str(e.exception))
1515
1616 def assertArraysEqual(self, first, second):
1717 return self.assertTrue((first == second).all())
18
19 def assertArraysListEqual(self, first, second):
20 self.assertEqual(len(first), len(second))
21 for x, y in zip(first, second):
22 self.assertTrue((x == y).all())
55
66 class BufferedReaderTestCase(TestCase):
77 def test_overflow_signed_int_string_size(self):
8 data = b'\xFF\xFE\xFC\xFE\x29\x80\x40\x00\x00\x01'
8 data = b'\xFF\xFE\xFC\xFE\xFE\xFE\xFE\xFE\x29\x80\x40\x00\x00\x01'
99
1010 def recv_into(buf):
1111 size = len(data)
254254 c.connection.context.client_settings['opentelemetry_tracestate'],
255255 'state'
256256 )
257
258 def test_quota_key(self):
259 c = Client.from_url('clickhouse://host?quota_key=myquota')
260 self.assertEqual(
261 c.connection.context.client_settings['quota_key'], 'myquota'
262 )
263
264 c = Client.from_url('clickhouse://host')
265 self.assertEqual(
266 c.connection.context.client_settings['quota_key'], ''
267 )
1212 supported_compressions = file_config.get('db', 'compression').split(',')
1313
1414 def _create_client(self):
15 settings = None
16 if self.compression:
17 # Set server compression method explicitly
18 # By default server sends blocks compressed by LZ4.
19 method = self.compression
20 if self.server_version > (19, ):
21 method = method.upper()
22 settings = {'network_compression_method': method}
23
1524 return Client(
1625 self.host, self.port, self.database, self.user, self.password,
17 compression=self.compression
26 compression=self.compression, settings=settings
1827 )
1928
2029 def setUp(self):
220220 'Hello or Exception', 'Unknown packet'
221221 )
222222 self.assertEqual(str(e.exception), msg)
223
224 def test_partially_consumed_query(self):
225 self.client.execute_iter('SELECT 1')
226
227 error = errors.PartiallyConsumedQueryError
228 with self.assertRaises(error) as e:
229 self.client.execute_iter('SELECT 1')
230
231 self.assertEqual(
232 str(e.exception),
233 'Simultaneous queries on single connection detected'
234 )
235 rv = self.client.execute('SELECT 1')
236 self.assertEqual(rv, [(1, )])
237
238 def test_read_all_packets_on_execute_iter(self):
239 list(self.client.execute_iter('SELECT 1'))
240 list(self.client.execute_iter('SELECT 1'))
223241
224242
225243 class FakeBufferedReader(BufferedReader):
150150 )
151151 self.assertEqual(cursor.rowcount, -1)
152152
153 def test_execute_insert(self):
154 with self.created_cursor() as cursor, self.create_table('a UInt8'):
155 cursor.execute('INSERT INTO test VALUES', [[4]])
156 self.assertEqual(cursor.rowcount, 1)
157
153158 def test_description(self):
154159 with self.created_cursor() as cursor:
155160 self.assertIsNone(cursor.description)
22
33 from datetime import date, datetime
44 from decimal import Decimal
5 from unittest.mock import Mock
56 from uuid import UUID
67
78 from enum import IntEnum, Enum
9 from pytz import timezone
810
911 from tests.testcase import BaseTestCase
12 from tests.util import patch_env_tz
1013
1114
1215 class ParametersSubstitutionTestCase(BaseTestCase):
1417 double_tpl = 'SELECT %(x)s, %(y)s'
1518
1619 def assert_subst(self, tpl, params, sql):
17 self.assertEqual(self.client.substitute_params(tpl, params), sql)
20 ctx = Mock()
21 ctx.server_info.timezone = 'Europe/Moscow'
22 self.assertEqual(self.client.substitute_params(tpl, params, ctx), sql)
1823
1924 def test_int(self):
2025 params = {'x': 123}
6469 rv = self.client.execute(tpl, params)
6570 self.assertEqual(rv, [(dt, )])
6671
72 def test_datetime_with_timezone(self):
73 dt = datetime(2017, 7, 14, 5, 40, 0)
74 params = {'x': timezone('Asia/Kamchatka').localize(dt)}
75
76 self.assert_subst(self.single_tpl, params,
77 "SELECT '2017-07-13 20:40:00'")
78
79 tpl = (
80 'SELECT toDateTime(toInt32(toDateTime(%(x)s))), '
81 'toInt32(toDateTime(%(x)s))'
82 )
83
84 with patch_env_tz('Asia/Novosibirsk'):
85 # use server timezone
86 rv = self.client.execute(
87 tpl, params, settings={'use_client_time_zone': False}
88 )
89
90 self.assertEqual(
91 rv, [(datetime(2017, 7, 13, 20, 40, 0), 1499967600)]
92 )
93
94 query = (
95 "SELECT "
96 "toDateTime(toInt32(toDateTime('{0}', 'Asia/Kamchatka'))), "
97 "toInt32(toDateTime('{0}', 'Asia/Kamchatka'))"
98 ).format('2017-07-14 05:40:00')
99
100 rv = self.emit_cli(query, use_client_time_zone=0)
101
102 self.assertEqual(rv, '2017-07-13 20:40:00\t1499967600\n')
103
104 # use client timezone
105 rv = self.client.execute(
106 tpl, params, settings={'use_client_time_zone': True}
107 )
108
109 self.assertEqual(
110 rv, [(datetime(2017, 7, 14, 0, 40, 0), 1499967600)]
111 )
112
113 query = (
114 "SELECT "
115 "toDateTime(toInt32(toDateTime('{0}', 'Asia/Kamchatka'))), "
116 "toInt32(toDateTime('{0}', 'Asia/Kamchatka'))"
117 ).format('2017-07-14 05:40:00')
118
119 rv = self.emit_cli(query, use_client_time_zone=1)
120 self.assertEqual(rv, '2017-07-14 00:40:00\t1499967600\n')
121
67122 def test_string(self):
68123 params = {'x': 'test\t\n\x16', 'y': 'ั‚ะตัั‚\t\n\x16'}
69124
171226 params = object()
172227
173228 with self.assertRaises(ValueError) as e:
174 self.client.substitute_params(self.single_tpl, params)
229 self.client.substitute_params(self.single_tpl, params, Mock())
175230
176231 self.assertEqual(e.exception.args[0],
177232 'Parameters are expected in dict form')
0 import os
1 from contextlib import contextmanager
02 from functools import wraps
13 import logging
24 from io import StringIO
5 from time import tzset
6 from unittest.mock import patch
7
8 import tzlocal
39
410
511 def skip_by_server_version(testcase, version_required):
5157
5258
5359 capture_logging = LoggingCapturer
60
61
62 def bust_tzlocal_cache():
63 try:
64 tzlocal.unix._cache_tz = None
65 tzlocal.unix._cache_tz_name = None
66 except AttributeError:
67 pass
68
69 try:
70 tzlocal.win32._cache_tz = None
71 tzlocal.unix._cache_tz_name = None
72 except AttributeError:
73 pass
74
75
76 @contextmanager
77 def patch_env_tz(tz_name):
78 bust_tzlocal_cache()
79
80 # Although in many cases, changing the TZ environment variable may
81 # affect the output of functions like localtime() without calling
82 # tzset(), this behavior should not be relied on.
83 # https://docs.python.org/3/library/time.html#time.tzset
84 with patch.dict(os.environ, {'TZ': tz_name}):
85 tzset()
86 yield
87
88 tzset()
0 import os
1 import sys
2
3 USE_NUMPY = bool(int(os.getenv('USE_NUMPY', '0')))
4
5 tests_require = [
6 'pytest',
7 'parameterized',
8 'freezegun',
9 'zstd',
10 'clickhouse-cityhash>=1.0.2.1'
11 ]
12
13 if sys.implementation.name == 'pypy':
14 tests_require.append('lz4<=3.0.1')
15 else:
16 tests_require.append('lz4')
17
18 if USE_NUMPY:
19 tests_require.extend(['numpy', 'pandas'])
20
21 try:
22 from pip import main as pipmain
23 except ImportError:
24 from pip._internal import main as pipmain
25
26 pipmain(['install'] + tests_require)
0 {
1 <PyUnicode_Decode>
2 # See https://bugs.python.org/issue42176
3 Memcheck:Cond
4 fun:PyUnicode_Decode
5 }