diff --git a/.github/workflows/actions.yml b/.github/workflows/actions.yml
new file mode 100644
index 0000000..9a8875a
--- /dev/null
+++ b/.github/workflows/actions.yml
@@ -0,0 +1,322 @@
+on: [push, pull_request]
+name: build
+jobs:
+  tests:
+    runs-on: ubuntu-20.04
+    strategy:
+      matrix:
+        use-numpy:
+          - 0
+        python-version:
+          - "3.5"
+          - "3.6"
+          - "3.7"
+          - "3.8"
+          - "3.9"
+          - "3.10"
+          - "pypy-3.6"
+          - "pypy-3.7"
+        clickhouse-version:
+          - 21.12.3.32
+          - 21.9.3.30
+          - 21.9.3.30
+          - 21.4.6.55
+          - 21.3.10.1
+          - 21.2.10.48
+          - 21.1.9.41
+          - 20.11.2.1
+          - 20.10.2.20
+          - 20.9.3.45
+          - 20.8.4.11
+          - 20.7.4.11
+          - 20.6.8.5
+          - 20.5.5.74
+          - 20.4.9.110
+          - 20.3.20.6
+          - 19.16.17.80
+          - 19.15.3.6
+          - 19.9.2.4   # allow_suspicious_low_cardinality_types
+          - 19.8.3.8   # SimpleAggregateFunction
+          - 19.3.3
+          - 18.12.17
+        include:
+          - clickhouse-version: 20.3.20.6
+            use-numpy: 1
+            python-version: 3.8
+
+    name: ${{ matrix.python-version }} CH=${{ matrix.clickhouse-version }} NUMPY=${{ matrix.use-numpy }}
+    steps:
+      - uses: actions/checkout@v2
+      - name: Set up Python
+        uses: actions/setup-python@v2
+        with:
+          python-version: ${{ matrix.python-version }}
+          architecture: x64
+#      - name: Login to Docker Hub
+#        uses: docker/login-action@v1
+#        with:
+#          username: ${{ secrets.DOCKER_HUB_USERNAME }}
+#          password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }}
+      - name: Install flake8
+        run: |
+          pip install --upgrade pip setuptools wheel
+          pip install flake8 flake8-print
+      - name: Run flake8
+        run: flake8
+      - name: Start ClickHouse server and client containers
+        run: |
+          echo "VERSION=${{ matrix.clickhouse-version }}" > tests/.env
+          docker-compose -f tests/docker-compose.yml up -d
+      - name: Setup clickhouse-client proxy for docker
+        run: |
+          # Faking clickhouse-client real communication with container via docker exec.
+          echo -e '#!/bin/bash\n\ndocker exec -e "`env | grep ^TZ=`" test-clickhouse-client clickhouse-client "$@"' | sudo tee /usr/local/bin/clickhouse-client > /dev/null
+          sudo chmod +x /usr/local/bin/clickhouse-client
+          # Overriding setup.cfg. Set host=clickhouse-server
+          sed -i 's/^host=localhost$/host=clickhouse-server/' setup.cfg
+          # Make host think that clickhouse-server is localhost
+          echo '127.0.0.1 clickhouse-server' | sudo tee /etc/hosts > /dev/null
+      - name: Build cython extensions with tracing
+        run: CYTHON_TRACE=1 python setup.py build_ext --define CYTHON_TRACE
+        if: ${{ !contains(matrix.python-version, 'pypy') }}
+      - name: Install requirements
+        run: |
+          # Newer coveralls do not work with github actions.
+          pip install 'coveralls<3.0.0'
+          pip install cython
+          python testsrequire.py
+          python setup.py develop
+          # Limit each test time execution.
+          pip install pytest-timeout
+        env:
+          USE_NUMPY: ${{ matrix.use-numpy }}
+      - name: Run tests
+        run: coverage run -m py.test --timeout=10 -v
+        timeout-minutes: 5
+        env:
+          # Set initial TZ for docker exec -e "`env | grep ^TZ`"
+          TZ: UTC
+      - name: Upload coverage
+        run: coveralls
+        env:
+          GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+          COVERALLS_PARALLEL: true
+          COVERALLS_FLAG_NAME: ${{ matrix.python-version }} CH=${{ matrix.clickhouse-version }} NUMPY=${{ matrix.use-numpy }}
+
+  coveralls-finished:
+    name: Indicate completion to coveralls.io
+    needs: tests
+    runs-on: ubuntu-latest
+    steps:
+      - name: Finished
+        uses: coverallsapp/github-action@1.1.3
+        with:
+          github-token: ${{ secrets.GITHUB_TOKEN }}
+          parallel-finished: true
+
+  valgrind:
+    name: Valgrind check
+    needs: tests
+    runs-on: ubuntu-20.04
+    steps:
+      - uses: actions/checkout@v2
+      - name: Set up Python
+        uses: actions/setup-python@v2
+        with:
+          python-version: 3.8
+          architecture: x64
+      - name: Install valgrind
+        run: sudo apt-get install -y valgrind
+#      - name: Login to Docker Hub
+#        uses: docker/login-action@v1
+#        with:
+#          username: ${{ secrets.DOCKER_HUB_USERNAME }}
+#          password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }}
+      - name: Start ClickHouse server and client containers
+        run: |
+          echo "VERSION=$VERSION" > tests/.env
+          docker-compose -f tests/docker-compose.yml up -d
+        env:
+          VERSION: 20.3.7.46
+      - name: Setup clickhouse-client proxy for docker
+        run: |
+          # Faking clickhouse-client real communication with container via docker exec.
+          echo -e '#!/bin/bash\n\ndocker exec -e "`env | grep ^TZ=`" test-clickhouse-client clickhouse-client "$@"' | sudo tee /usr/local/bin/clickhouse-client > /dev/null
+          sudo chmod +x /usr/local/bin/clickhouse-client
+          # Overriding setup.cfg. Set host=clickhouse-server
+          sed -i 's/^host=localhost$/host=clickhouse-server/' setup.cfg
+          # Make host think that clickhouse-server is localhost
+          echo '127.0.0.1 clickhouse-server' | sudo tee /etc/hosts > /dev/null
+      - name: Install requirements
+        run: |
+          python testsrequire.py
+          python setup.py develop
+        env:
+          USE_NUMPY: 1
+      - name: Run tests under valgrind
+        run: valgrind --error-exitcode=1 --suppressions=valgrind.supp py.test -v
+        env:
+          # Set initial TZ for docker exec -e "`env | grep ^TZ`"
+          TZ: UTC
+          USE_NUMPY: 1
+          PYTHONMALLOC: malloc
+
+  wheels-linux:
+    name: Wheels for Linux
+    needs: valgrind
+    runs-on: ubuntu-20.04
+    steps:
+      - uses: actions/checkout@v2
+      - name: Set up Python
+        uses: actions/setup-python@v2
+        with:
+          python-version: 3.8
+          architecture: x64
+#      - name: Login to Docker Hub
+#        uses: docker/login-action@v1
+#        with:
+#          username: ${{ secrets.DOCKER_HUB_USERNAME }}
+#          password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }}
+      - name: Install cibuildwheel
+        run: |
+          pip install --upgrade pip setuptools
+          pip install cython cibuildwheel==$VERSION
+        env:
+          VERSION: 2.2.2
+      - name: Build wheels
+        run: cibuildwheel --output-dir wheelhouse
+        env:
+          CIBW_BUILD: '*p3*'
+          CIBW_BEFORE_BUILD: pip install cython
+      - uses: ncipollo/release-action@v1
+        name: Upload wheels
+        if: ${{ github.ref_type == 'tag' }}
+        with:
+          artifacts: "wheelhouse/*"
+          allowUpdates: true
+          draft: true
+          tag: Linux
+          token: ${{ secrets.GITHUB_TOKEN }}
+
+  wheels-macos:
+    name: Wheels for OS X
+    needs: valgrind
+    runs-on: macos-10.15
+    steps:
+      - uses: actions/checkout@v2
+      - name: Set up Python
+        uses: actions/setup-python@v2
+        with:
+          python-version: 3.8
+          architecture: x64
+#      - name: Login to Docker Hub
+#        uses: docker/login-action@v1
+#        with:
+#          username: ${{ secrets.DOCKER_HUB_USERNAME }}
+#          password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }}
+      - name: Install cibuildwheel
+        run: |
+          pip install --upgrade pip setuptools
+          pip install cython cibuildwheel==$VERSION
+        env:
+          VERSION: 2.2.2
+      - name: Build wheels
+        run: cibuildwheel --output-dir wheelhouse
+        env:
+          CIBW_BUILD: '*p3*'
+          CIBW_BEFORE_BUILD: pip install cython
+      - uses: ncipollo/release-action@v1
+        name: Upload wheels
+        if: ${{ github.ref_type == 'tag' }}
+        with:
+          artifacts: "wheelhouse/*"
+          allowUpdates: true
+          draft: true
+          tag: OS X
+          token: ${{ secrets.GITHUB_TOKEN }}
+
+  wheels-windows:
+    name: Wheels for Windows
+    needs: valgrind
+    runs-on: windows-2019
+    steps:
+      - uses: actions/checkout@v2
+      - name: Set up Python
+        uses: actions/setup-python@v2
+        with:
+          python-version: 3.8
+          architecture: x64
+#      - name: Login to Docker Hub
+#        uses: docker/login-action@v1
+#        with:
+#          username: ${{ secrets.DOCKER_HUB_USERNAME }}
+#          password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }}
+      - name: Install cibuildwheel
+        run: |
+          pip install cibuildwheel==$env:VERSION
+        env:
+          VERSION: 2.2.2
+      - name: Build wheels
+        run: cibuildwheel --output-dir wheelhouse
+        env:
+          CIBW_BUILD: '*p3*'
+          CIBW_BEFORE_BUILD: pip install cython
+      - uses: ncipollo/release-action@v1
+        name: Upload wheels
+        if: ${{ github.ref_type == 'tag' }}
+        with:
+          artifacts: "wheelhouse/*"
+          allowUpdates: true
+          draft: true
+          tag: Windows
+          token: ${{ secrets.GITHUB_TOKEN }}
+
+  wheels-linux-non-x86:
+    name: Wheels for Linux non-x86
+    needs: valgrind
+    runs-on: ubuntu-20.04
+    strategy:
+      matrix:
+        arch:
+          - aarch64
+          - ppc64le
+          - s390x
+    steps:
+      - uses: actions/checkout@v2.1.0
+#      - name: Login to Docker Hub
+#        uses: docker/login-action@v1
+#        with:
+#          username: ${{ secrets.DOCKER_HUB_USERNAME }}
+#          password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }}
+      - name: Set up Python
+        uses: actions/setup-python@v2
+        with:
+          python-version: 3.8
+          architecture: x64
+      - name: Set up QEMU
+        id: qemu
+        uses: docker/setup-qemu-action@v1.2.0
+        with:
+          image: tonistiigi/binfmt:latest
+          platforms: all
+      - name: Install cibuildwheel
+        run: |
+          pip install --upgrade pip setuptools
+          pip install cibuildwheel==$VERSION
+        env:
+          VERSION: 2.2.2
+      - name: Build wheels
+        run: cibuildwheel --output-dir wheelhouse
+        env:
+          CIBW_BUILD: '*p3*'
+          CIBW_BEFORE_BUILD: pip install cython
+          CIBW_ARCHS: ${{ matrix.arch }}
+      - uses: ncipollo/release-action@v1
+        name: Upload wheels
+        if: ${{ github.ref_type == 'tag' }}
+        with:
+          artifacts: "wheelhouse/*"
+          allowUpdates: true
+          draft: true
+          tag: Linux non-x86
+          token: ${{ secrets.GITHUB_TOKEN }}
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..9e00340
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,92 @@
+# Byte-compiled / optimized / DLL files
+__pycache__/
+*.py[cod]
+*$py.class
+
+# C extensions
+*.so
+
+# Distribution / packaging
+.Python
+env/
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+.eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+*.egg-info/
+.installed.cfg
+*.egg
+
+# PyInstaller
+#  Usually these files are written by a python script from a template
+#  before PyInstaller builds the exe, so as to inject date/other infos into it.
+*.manifest
+*.spec
+
+# Installer logs
+pip-log.txt
+pip-delete-this-directory.txt
+
+# Unit test / coverage reports
+htmlcov/
+.tox/
+.coverage
+.coverage.*
+.cache
+nosetests.xml
+coverage.xml
+*,cover
+.hypothesis/
+
+# Translations
+*.mo
+*.pot
+
+# Django stuff:
+*.log
+local_settings.py
+
+# Flask stuff:
+instance/
+.webassets-cache
+
+# Scrapy stuff:
+.scrapy
+
+# Sphinx documentation
+docs/_build/
+
+# PyBuilder
+target/
+
+# IPython Notebook
+.ipynb_checkpoints
+
+# pyenv
+.python-version
+
+# celery beat schedule file
+celerybeat-schedule
+
+# dotenv
+.env
+
+# virtualenv
+venv/
+ENV/
+
+# Spyder project settings
+.spyderproject
+
+# Rope project settings
+.ropeproject
+
+# PyCharm project settings
+.idea/
diff --git a/.travis.yml b/.travis.yml
deleted file mode 100644
index 1fa1404..0000000
--- a/.travis.yml
+++ /dev/null
@@ -1,268 +0,0 @@
-env:
-  - VERSION=21.9.3.30
-  - VERSION=21.4.6.55
-  - VERSION=21.3.10.1
-  - VERSION=21.2.10.48
-  - VERSION=21.1.9.41
-  - VERSION=20.11.2.1
-  - VERSION=20.10.2.20
-  - VERSION=20.9.3.45
-  - VERSION=20.8.4.11
-  - VERSION=20.7.4.11
-  - VERSION=20.6.8.5
-  - VERSION=20.5.5.74
-  - VERSION=20.4.9.110
-  - VERSION=20.3.20.6
-  - VERSION=20.3.20.6 USE_NUMPY=1
-  - VERSION=19.16.17.80
-  - VERSION=19.15.3.6
-  - VERSION=19.9.2.4   # allow_suspicious_low_cardinality_types
-  - VERSION=19.8.3.8   # SimpleAggregateFunction
-  - VERSION=19.3.3
-  - VERSION=18.12.17
-#  - VERSION=18.10.3
-#  - VERSION=18.6.0
-#  - VERSION=18.5.1
-#  - VERSION=18.4.0
-#  - VERSION=18.1.0
-#  - VERSION=1.1.54394
-#  - VERSION=1.1.54390
-#  - VERSION=1.1.54388
-#  - VERSION=1.1.54385
-#  - VERSION=1.1.54383
-#  - VERSION=1.1.54381
-#  - VERSION=1.1.54380
-#  - VERSION=1.1.54378 client's image miss tzdata package: https://github.com/yandex/ClickHouse/commit/1bf49fe8446c7dea95beaef2b131e6c6708b0b62#diff-cc737435a5ba74620a889b7718f39a80
-#  - VERSION=1.1.54343
-#  - VERSION=1.1.54342
-##  - VERSION=1.1.54337 Broken network
-#  - VERSION=1.1.54327
-#  - VERSION=1.1.54310
-#  - VERSION=1.1.54304
-#  - VERSION=1.1.54292
-#  - VERSION=1.1.54289
-#  - VERSION=1.1.54284
-#  - VERSION=1.1.54282
-
-language: python
-python:
-  - "3.4"
-  - "3.5"
-  - "3.6"
-  - "3.7"
-  - "3.8"
-  - "3.9"
-  - "pypy3.5"
-cache: pip
-services:
-  - docker
-install:
-  - pip install --upgrade pip setuptools
-  # Check flake8 first
-  - pip install flake8 flake8-print
-  - flake8
-  - if [ ! -z $DOCKER_PASSWORD ] ; then echo "$DOCKER_PASSWORD" | docker login -u "$DOCKER_USERNAME" --password-stdin ; fi
-  - docker run -e "TZ=Europe/Moscow" -d -p 127.0.0.1:9000:9000 --name test-clickhouse-server --ulimit nofile=262144:262144 yandex/clickhouse-server:$VERSION
-  - docker run -d --entrypoint "/bin/sh" --name test-clickhouse-client --link test-clickhouse-server:clickhouse-server yandex/clickhouse-client:$VERSION -c 'while :; do sleep 1; done'
-  - docker ps -a
-  # Faking clickhouse-client real communication with container via docker exec.
-  - echo -e '#!/bin/bash\n\ndocker exec -e "`env | grep ^TZ=`" test-clickhouse-client clickhouse-client "$@"' | sudo tee /usr/local/bin/clickhouse-client > /dev/null
-  - sudo chmod +x /usr/local/bin/clickhouse-client
-  # Overriding setup.cfg. Set host=clickhouse-server
-  - sed -i 's/^host=localhost$/host=clickhouse-server/' setup.cfg
-  # Make host think that clickhouse-server is localhost
-  - echo '127.0.0.1 clickhouse-server' | sudo tee /etc/hosts > /dev/null
-  - pip install coveralls cython 'cryptography<3.3'
-  - if [ -z ${USE_NUMPY+x} ]; then pip uninstall -y numpy pandas; fi
-script:
-  # Enable cython tracing only for cpython
-  - if [ "$TRAVIS_PYTHON_VERSION" != "pypy3.5" ]; then CYTHON_TRACE=1 python setup.py build_ext --define CYTHON_TRACE ; fi
-  - coverage run setup.py test
-after_success:
-  coveralls
-
-jobs:
-  # Exclude numpy unsupported versions,
-  exclude:
-    - python: 3.4
-      env: VERSION=20.3.20.6 USE_NUMPY=1
-    - python: 3.9-dev
-      env: VERSION=20.3.20.6 USE_NUMPY=1
-    - python: pypy3.5
-      env: VERSION=20.3.20.6 USE_NUMPY=1
-
-  include:
-    - stage: valgrind
-      name: Valgrind check
-      os: linux
-      language: python
-      python:
-        - "3.6"
-      addons:
-        apt:
-          packages:
-            - valgrind
-      install:
-        - if [ ! -z $DOCKER_PASSWORD ] ; then echo "$DOCKER_PASSWORD" | docker login -u "$DOCKER_USERNAME" --password-stdin ; fi
-        - docker run -e "TZ=Europe/Moscow" -d -p 127.0.0.1:9000:9000 --name test-clickhouse-server --ulimit nofile=262144:262144 yandex/clickhouse-server:$VERSION
-        - docker run -d --entrypoint "/bin/sh" --name test-clickhouse-client --link test-clickhouse-server:clickhouse-server yandex/clickhouse-client:$VERSION -c 'while :; do sleep 1; done'
-        - docker ps -a
-        # Faking clickhouse-client real communication with container via docker exec.
-        - echo -e '#!/bin/bash\n\ndocker exec -e "`env | grep ^TZ=`" test-clickhouse-client clickhouse-client "$@"' | sudo tee /usr/local/bin/clickhouse-client > /dev/null
-        - sudo chmod +x /usr/local/bin/clickhouse-client
-        # Overriding setup.cfg. Set host=clickhouse-server
-        - sed -i 's/^host=localhost$/host=clickhouse-server/' setup.cfg
-        # Make host think that clickhouse-server is localhost
-        - echo '127.0.0.1 clickhouse-server' | sudo tee /etc/hosts > /dev/null
-        - pip install --upgrade pip setuptools
-        - pip install cython
-
-      script:
-        - valgrind --error-exitcode=1 python setup.py test
-
-      env:
-        - VERSION=20.3.7.46
-        - USE_NUMPY=1
-        - PYTHONMALLOC=malloc
-
-    - stage: wheels
-      name: Wheels for Linux
-      os: linux
-      language: python
-      python:
-        - "3.6"
-
-      install:
-        - pip install --upgrade pip setuptools
-        - pip install cython cibuildwheel==1.11.0
-
-      script:
-        - cibuildwheel --output-dir wheelhouse
-
-      env:
-        - CIBW_BUILD='*p3*'
-        - CIBW_BEFORE_BUILD='pip install cython'
-      deploy:
-        name: Linux
-        provider: releases
-        api_key: $GITHUB_TOKEN
-        file_glob: true
-        file: wheelhouse/*
-        skip_cleanup: true
-        draft: true
-        prerelease: true
-        overwrite: true
-        on:
-          tags: true
-
-    - stage: wheels
-      name: Wheels for Linux aarch64
-      arch: arm64
-      os: linux
-      language: python
-      python:
-        - "3.6"
-      install:
-        - pip install --upgrade pip setuptools
-        - pip install cython cibuildwheel==1.11.0
-
-      script:
-        - cibuildwheel --output-dir wheelhouse
-
-      env:
-        - CIBW_BUILD='*p3*'
-        - CIBW_BEFORE_BUILD='pip install cython'
-      deploy:
-        name: Linux aarch64
-        provider: releases
-        api_key: $GITHUB_TOKEN
-        file_glob: true
-        file: wheelhouse/*
-        skip_cleanup: true
-        draft: true
-        prerelease: true
-        overwrite: true
-        on:
-          tags: true
-
-    - stage: wheels
-      name: Wheels for OS X
-      os: osx
-      language: generic
-
-      install:
-        - pip3 install --upgrade pip setuptools
-        - pip3 install cython cibuildwheel==1.11.0
-
-      script:
-        - cibuildwheel --output-dir wheelhouse
-
-      env:
-        - CIBW_BUILD='*p3*'
-        - CIBW_BEFORE_BUILD='pip install cython'
-      deploy:
-        name: Mac OS X
-        provider: releases
-        api_key: $GITHUB_TOKEN
-        file_glob: true
-        file: wheelhouse/*
-        skip_cleanup: true
-        draft: true
-        prerelease: true
-        overwrite: true
-        on:
-          tags: true
-
-    - stage: wheels
-      name: Wheels for Windows
-      os: windows
-      language: shell
-
-      install:
-        - choco install python3 --version 3.9.0 --params "/InstallDir:C:\Python39"
-      script:
-        - C:/Python39/python -m pip install cibuildwheel==1.11.0
-        - C:/Python39/python -m cibuildwheel --platform windows --output-dir wheelhouse
-
-      env:
-        - CIBW_BUILD='cp*'
-        - CIBW_BEFORE_BUILD='python -m pip install cython'
-      deploy:
-        name: Windows Python
-        provider: releases
-        api_key: $GITHUB_TOKEN
-        file_glob: true
-        file: wheelhouse/*
-        skip_cleanup: true
-        draft: true
-        prerelease: true
-        overwrite: true
-        on:
-          tags: true
-
-    - stage: wheels
-      name: Wheels for Windows PyPy
-      os: windows
-      language: shell
-
-      install:
-        - choco install python3 --version 3.9.0 --params "/InstallDir:C:\Python39"
-      script:
-        - C:/Python39/python -m pip install cibuildwheel==1.11.0
-        - C:/Python39/python -m cibuildwheel --platform windows --output-dir wheelhouse
-
-      env:
-        - CIBW_BUILD='pp*'
-        - CIBW_BEFORE_BUILD='python -m pip install cython'
-      deploy:
-        name: Windows Python PyPy
-        provider: releases
-        api_key: $GITHUB_TOKEN
-        file_glob: true
-        file: wheelhouse/*
-        skip_cleanup: true
-        draft: true
-        prerelease: true
-        overwrite: true
-        on:
-          tags: true
diff --git a/CHANGELOG.md b/CHANGELOG.md
index dce8cc4..602911c 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,8 +1,29 @@
 # Changelog
 
 ## Unreleased
+### Added
+- `tzlocal`>=4.0 support. Pull request [#263](https://github.com/mymarilyn/clickhouse-driver/pull/263) by [azat](https://github.com/azat).
+- `quota_key` support.
+- Wheels for Python 3.10.
+- Bool type. Pull request [#279](https://github.com/mymarilyn/clickhouse-driver/pull/279) by [adrian17](https://github.com/adrian17).
+- Nested type with `flatten_nested=0`. Pull request [#285](https://github.com/mymarilyn/clickhouse-driver/pull/285) by [spff](https://github.com/spff).
+
+### Fixed
+- Handle partially consumed query. Solves issue [#117](https://github.com/mymarilyn/clickhouse-driver/issues/117).
+- Fallback to generic columns when NumPy support is not implemented for column type. Solves issue [#254](https://github.com/mymarilyn/clickhouse-driver/issues/254).
+- Broken ZSTD decompression. Solves issue [#269](https://github.com/mymarilyn/clickhouse-driver/issues/269).
+- External tables passing with NumPy. Solves issue [#267](https://github.com/mymarilyn/clickhouse-driver/issues/267).
+- Consider tzinfo for datetime parameters substitution. Solves issue [#268](https://github.com/mymarilyn/clickhouse-driver/issues/268).
+- Do not use NumPy columns inside generic columns. Solves issue [#272](https://github.com/mymarilyn/clickhouse-driver/issues/272).
+- Decimal128 and Decimal256 types_check. Solves issue [#274](https://github.com/mymarilyn/clickhouse-driver/issues/274).
+- Insertion using `execute` in DB API. Solves issue [#179](https://github.com/mymarilyn/clickhouse-driver/issues/179). Pull request [#276](https://github.com/mymarilyn/clickhouse-driver/pull/276) by [nnseva](https://github.com/nnseva).
+- Variables cannot be declared with `cpdef` in Cython 3. Pull request [#281](https://github.com/mymarilyn/clickhouse-driver/pull/281) by [ym](https://github.com/ym).
+
+### Changed
+- Switch from nose test runner to pytest.
+- Migrate from Travis CI to GitHub Actions.
 
-## [0.2.2] - 2021-10-24
+## [0.2.2] - 2021-09-24
 ### Added
 - DateTime64 extended range. Pull request [#222](https://github.com/mymarilyn/clickhouse-driver/pull/222) by [0x4ec7](https://github.com/0x4ec7).
 - Support for using `Client` as context manager closing connection on exit. Solves issue [#237](https://github.com/mymarilyn/clickhouse-driver/issues/237). Pull request [#206](https://github.com/mymarilyn/clickhouse-driver/pull/238) by [wlhjason](https://github.com/wlhjason).
@@ -359,7 +380,8 @@
 - Date/DateTime types.
 - String types.
 
-[Unreleased]: https://github.com/mymarilyn/clickhouse-driver/compare/0.2.2...HEAD
+[Unreleased]: https://github.com/mymarilyn/clickhouse-driver/compare/0.2.3...HEAD
+[0.2.3]: https://github.com/mymarilyn/clickhouse-driver/compare/0.2.2...0.2.3
 [0.2.2]: https://github.com/mymarilyn/clickhouse-driver/compare/0.2.1...0.2.2
 [0.2.1]: https://github.com/mymarilyn/clickhouse-driver/compare/0.2.0...0.2.1
 [0.2.0]: https://github.com/mymarilyn/clickhouse-driver/compare/0.1.5...0.2.0
diff --git a/README.rst b/README.rst
index 814d043..b739e32 100644
--- a/README.rst
+++ b/README.rst
@@ -16,8 +16,8 @@ ClickHouse Python Driver
 .. image:: https://img.shields.io/pypi/dm/clickhouse-driver.svg
     :target: https://pypi.org/project/clickhouse-driver
 
-.. image:: https://travis-ci.org/mymarilyn/clickhouse-driver.svg?branch=master
-   :target: https://travis-ci.org/mymarilyn/clickhouse-driver
+.. image:: https://github.com/mymarilyn/clickhouse-driver/actions/workflows/actions.yml/badge.svg
+   :target: https://github.com/mymarilyn/clickhouse-driver/actions/workflows/actions.yml
 
 ClickHouse Python Driver with native (TCP) interface support.
 
@@ -43,6 +43,7 @@ Features
   * Enum8/16
   * Array(T)
   * Nullable(T)
+  * Bool
   * UUID
   * Decimal
   * IPv4/IPv6
diff --git a/clickhouse_driver/__init__.py b/clickhouse_driver/__init__.py
index 0279612..3d19036 100644
--- a/clickhouse_driver/__init__.py
+++ b/clickhouse_driver/__init__.py
@@ -3,7 +3,7 @@ from .client import Client
 from .dbapi import connect
 
 
-VERSION = (0, 2, 2)
+VERSION = (0, 2, 3)
 __version__ = '.'.join(str(x) for x in VERSION)
 
 __all__ = ['Client', 'connect']
diff --git a/clickhouse_driver/block.py b/clickhouse_driver/block.py
index 3f9dd20..5c63291 100644
--- a/clickhouse_driver/block.py
+++ b/clickhouse_driver/block.py
@@ -1,6 +1,7 @@
 from .reader import read_varint, read_binary_uint8, read_binary_int32
 from .varint import write_varint
 from .writer import write_binary_uint8, write_binary_int32
+from .columns import nestedcolumn
 
 
 class BlockInfo(object):
@@ -151,17 +152,44 @@ class RowOrientedBlock(BaseBlock):
         return [row[index] for row in self.data]
 
     def _mutate_dicts_to_rows(self, data):
-        column_names = [x[0] for x in self.columns_with_types]
-
         check_row_type = False
         if self.types_check:
             check_row_type = self._check_dict_row_type
 
+        return self._pure_mutate_dicts_to_rows(
+            data,
+            self.columns_with_types,
+            check_row_type,
+        )
+
+    def _pure_mutate_dicts_to_rows(
+        self,
+        data,
+        columns_with_types,
+        check_row_type,
+    ):
+        columns_with_cwt = []
+        for name, type_ in columns_with_types:
+            cwt = None
+            if type_.startswith('Nested'):
+                cwt = nestedcolumn.get_columns_with_types(type_)
+            columns_with_cwt.append((name, cwt))
+
         for i, row in enumerate(data):
             if check_row_type:
                 check_row_type(row)
 
-            data[i] = [row[name] for name in column_names]
+            new_data = []
+            for name, cwt in columns_with_cwt:
+                if cwt is None:
+                    new_data.append(row[name])
+                else:
+                    new_data.append(self._pure_mutate_dicts_to_rows(
+                        row[name], cwt, check_row_type
+                    ))
+            data[i] = new_data
+        # return for recursion
+        return data
 
     def _check_rows(self, data):
         expected_row_len = len(self.columns_with_types)
diff --git a/clickhouse_driver/client.py b/clickhouse_driver/client.py
index 1103a07..3daf6a4 100644
--- a/clickhouse_driver/client.py
+++ b/clickhouse_driver/client.py
@@ -46,6 +46,9 @@ class Client(object):
         * ``opentelemetry_tracestate`` -- OpenTelemetry tracestate header as
                            described by W3C Trace Context recommendation.
                            New in version *0.2.2*.
+        * ``quota_key`` -- A string to differentiate quotas when the user have
+                           keyed quotas configured on server.
+                           New in version *0.2.3*.
     """
 
     available_client_settings = (
@@ -54,11 +57,12 @@ class Client(object):
         'strings_encoding',
         'use_numpy',
         'opentelemetry_traceparent',
-        'opentelemetry_tracestate'
+        'opentelemetry_tracestate',
+        'quota_key'
     )
 
     def __init__(self, *args, **kwargs):
-        self.settings = kwargs.pop('settings', {}).copy()
+        self.settings = (kwargs.pop('settings', None) or {}).copy()
 
         self.client_settings = {
             'insert_block_size': int(self.settings.pop(
@@ -78,6 +82,9 @@ class Client(object):
             ),
             'opentelemetry_tracestate': self.settings.pop(
                 'opentelemetry_tracestate', ''
+            ),
+            'quota_key': self.settings.pop(
+                'quota_key', ''
             )
         }
 
@@ -213,9 +220,15 @@ class Client(object):
             self.connection.database = query[4:].strip()
 
     @contextmanager
-    def disconnect_on_error(self, query):
+    def disconnect_on_error(self, query, settings):
+        self.make_query_settings(settings)
+
         try:
+            self.connection.force_connect()
+            self.last_query = QueryInfo()
+
             yield
+
             self.track_current_database(query)
 
         except (Exception, KeyboardInterrupt):
@@ -267,11 +280,8 @@ class Client(object):
         """
 
         start_time = time()
-        self.make_query_settings(settings)
-        self.connection.force_connect()
-        self.last_query = QueryInfo()
 
-        with self.disconnect_on_error(query):
+        with self.disconnect_on_error(query, settings):
             # INSERT queries can use list/tuple/generator of list/tuples/dicts.
             # For SELECT parameters can be passed in only in dict right now.
             is_insert = isinstance(params, (list, tuple, types.GeneratorType))
@@ -322,11 +332,7 @@ class Client(object):
         :return: :ref:`progress-query-result` proxy.
         """
 
-        self.make_query_settings(settings)
-        self.connection.force_connect()
-        self.last_query = QueryInfo()
-
-        with self.disconnect_on_error(query):
+        with self.disconnect_on_error(query, settings):
             return self.process_ordinary_query_with_progress(
                 query, params=params, with_column_types=with_column_types,
                 external_tables=external_tables, query_id=query_id,
@@ -361,11 +367,7 @@ class Client(object):
         :return: :ref:`iter-query-result` proxy.
         """
 
-        self.make_query_settings(settings)
-        self.connection.force_connect()
-        self.last_query = QueryInfo()
-
-        with self.disconnect_on_error(query):
+        with self.disconnect_on_error(query, settings):
             return self.iter_process_ordinary_query(
                 query, params=params, with_column_types=with_column_types,
                 external_tables=external_tables,
@@ -432,11 +434,8 @@ class Client(object):
             raise RuntimeError('Extras for NumPy must be installed')
 
         start_time = time()
-        self.make_query_settings(settings)
-        self.connection.force_connect()
-        self.last_query = QueryInfo()
 
-        with self.disconnect_on_error(query):
+        with self.disconnect_on_error(query, settings):
             self.connection.send_query(query, query_id=query_id)
             self.connection.send_external_tables(external_tables)
 
@@ -457,7 +456,9 @@ class Client(object):
             types_check=False, columnar=False):
 
         if params is not None:
-            query = self.substitute_params(query, params)
+            query = self.substitute_params(
+                query, params, self.connection.context
+            )
 
         self.connection.send_query(query, query_id=query_id)
         self.connection.send_external_tables(external_tables,
@@ -471,7 +472,9 @@ class Client(object):
             types_check=False, columnar=False):
 
         if params is not None:
-            query = self.substitute_params(query, params)
+            query = self.substitute_params(
+                query, params, self.connection.context
+            )
 
         self.connection.send_query(query, query_id=query_id)
         self.connection.send_external_tables(external_tables,
@@ -485,7 +488,9 @@ class Client(object):
             types_check=False):
 
         if params is not None:
-            query = self.substitute_params(query, params)
+            query = self.substitute_params(
+                query, params, self.connection.context
+            )
 
         self.connection.send_query(query, query_id=query_id)
         self.connection.send_external_tables(external_tables,
@@ -589,11 +594,11 @@ class Client(object):
         # Client must still read until END_OF_STREAM packet.
         return self.receive_result(with_column_types=with_column_types)
 
-    def substitute_params(self, query, params):
+    def substitute_params(self, query, params, context):
         if not isinstance(params, dict):
             raise ValueError('Parameters are expected in dict form')
 
-        escaped = escape_params(params)
+        escaped = escape_params(params, context)
         return query % escaped
 
     @classmethod
diff --git a/clickhouse_driver/clientinfo.py b/clickhouse_driver/clientinfo.py
index df3078b..64cfc9d 100644
--- a/clickhouse_driver/clientinfo.py
+++ b/clickhouse_driver/clientinfo.py
@@ -34,8 +34,6 @@ class ClientInfo(object):
     initial_query_id = ''
     initial_address = '0.0.0.0:0'
 
-    quota_key = ''
-
     def __init__(self, client_name, context):
         self.query_kind = ClientInfo.QueryKind.NO_QUERY
 
@@ -51,6 +49,8 @@ class ClientInfo(object):
             context.client_settings['opentelemetry_tracestate']
         )
 
+        self.quota_key = context.client_settings['quota_key']
+
         super(ClientInfo, self).__init__()
 
     @property
diff --git a/clickhouse_driver/columns/boolcolumn.py b/clickhouse_driver/columns/boolcolumn.py
new file mode 100644
index 0000000..8b75258
--- /dev/null
+++ b/clickhouse_driver/columns/boolcolumn.py
@@ -0,0 +1,7 @@
+from .base import FormatColumn
+
+
+class BoolColumn(FormatColumn):
+    ch_type = 'Bool'
+    py_types = (bool, )
+    format = '?'
diff --git a/clickhouse_driver/columns/datetimecolumn.py b/clickhouse_driver/columns/datetimecolumn.py
index 40b6ce8..a5bfeeb 100644
--- a/clickhouse_driver/columns/datetimecolumn.py
+++ b/clickhouse_driver/columns/datetimecolumn.py
@@ -1,8 +1,7 @@
 from datetime import datetime
 
 from pytz import timezone as get_timezone, utc
-from tzlocal import get_localzone
-
+from ..util.compat import get_localzone_name_compat
 from .base import FormatColumn
 
 EPOCH = datetime(1970, 1, 1, tzinfo=utc)
@@ -193,13 +192,7 @@ def create_datetime_column(spec, column_options):
         offset_naive = False
     else:
         if not context.settings.get('use_client_time_zone', False):
-            try:
-                local_timezone = get_localzone().key
-            except AttributeError:
-                local_timezone = get_localzone().zone
-            except Exception:
-                local_timezone = None
-
+            local_timezone = get_localzone_name_compat()
             if local_timezone != context.server_info.timezone:
                 tz_name = context.server_info.timezone
 
diff --git a/clickhouse_driver/columns/decimalcolumn.py b/clickhouse_driver/columns/decimalcolumn.py
index 0ab3309..c28649d 100644
--- a/clickhouse_driver/columns/decimalcolumn.py
+++ b/clickhouse_driver/columns/decimalcolumn.py
@@ -8,7 +8,6 @@ from .intcolumn import Int128Column, Int256Column
 class DecimalColumn(FormatColumn):
     py_types = (Decimal, float, int)
     max_precision = None
-    int_size = None
 
     def __init__(self, precision, scale, types_check=False, **kwargs):
         self.precision = precision
@@ -16,10 +15,15 @@ class DecimalColumn(FormatColumn):
         super(DecimalColumn, self).__init__(**kwargs)
 
         if types_check:
-            max_signed_int = (1 << (8 * self.int_size - 1)) - 1
-
             def check_item(value):
-                if value < -max_signed_int or value > max_signed_int:
+                parts = str(value).split('.')
+                int_part = parts[0]
+                frac_part = parts[1] if len(parts) > 1 else ''
+
+                if len(int_part) > precision:
+                    raise ColumnTypeMismatchException(value)
+
+                if len(frac_part) > scale:
                     raise ColumnTypeMismatchException(value)
 
             self.check_item = check_item
@@ -80,13 +84,11 @@ class DecimalColumn(FormatColumn):
 class Decimal32Column(DecimalColumn):
     format = 'i'
     max_precision = 9
-    int_size = 4
 
 
 class Decimal64Column(DecimalColumn):
     format = 'q'
     max_precision = 18
-    int_size = 8
 
 
 class Decimal128Column(DecimalColumn, Int128Column):
diff --git a/clickhouse_driver/columns/largeint.pyx b/clickhouse_driver/columns/largeint.pyx
index 9313885..47deb52 100644
--- a/clickhouse_driver/columns/largeint.pyx
+++ b/clickhouse_driver/columns/largeint.pyx
@@ -3,8 +3,8 @@ from cpython.tuple cimport PyTuple_New, PyTuple_SET_ITEM
 
 from .. import writer
 
-cpdef object MAX_UINT64 = writer.MAX_UINT64
-cpdef object MAX_INT64 = writer.MAX_INT64
+cdef object MAX_UINT64 = writer.MAX_UINT64
+cdef object MAX_INT64 = writer.MAX_INT64
 
 
 def int128_from_quads(quad_items, unsigned long long n_items):
diff --git a/clickhouse_driver/columns/nestedcolumn.py b/clickhouse_driver/columns/nestedcolumn.py
new file mode 100644
index 0000000..fefbafd
--- /dev/null
+++ b/clickhouse_driver/columns/nestedcolumn.py
@@ -0,0 +1,73 @@
+
+from .arraycolumn import create_array_column
+
+
+def create_nested_column(spec, column_by_spec_getter):
+    return create_array_column(
+        'Array(Tuple({}))'.format(','.join(get_nested_columns(spec))),
+        column_by_spec_getter=column_by_spec_getter
+    )
+
+
+def get_nested_columns(spec):
+    brackets = 0
+    column_begin = 0
+
+    inner_spec = get_inner_spec(spec)
+    nested_columns = []
+    for i, x in enumerate(inner_spec + ','):
+        if x == ',':
+            if brackets == 0:
+                nested_columns.append(inner_spec[column_begin:i])
+                column_begin = i + 1
+        elif x == '(':
+            brackets += 1
+        elif x == ')':
+            brackets -= 1
+        elif x == ' ':
+            if brackets == 0:
+                column_begin = i + 1
+    return nested_columns
+
+
+def get_columns_with_types(spec):
+    brackets = 0
+    prev_comma = 0
+    prev_space = 0
+
+    inner_spec = get_inner_spec(spec)
+    columns_with_types = []
+
+    for i, x in enumerate(inner_spec + ','):
+        if x == ',':
+            if brackets == 0:
+                columns_with_types.append((
+                    inner_spec[prev_comma:prev_space].strip(),
+                    inner_spec[prev_space:i]
+                ))
+                prev_comma = i + 1
+        elif x == '(':
+            brackets += 1
+        elif x == ')':
+            brackets -= 1
+        elif x == ' ':
+            if brackets == 0:
+                prev_space = i + 1
+    return columns_with_types
+
+
+def get_inner_spec(spec):
+    brackets = 0
+    offset = len('Nested')
+    i = offset
+    for i, ch in enumerate(spec[offset:], offset):
+        if ch == '(':
+            brackets += 1
+
+        elif ch == ')':
+            brackets -= 1
+
+        if brackets == 0:
+            break
+
+    return spec[offset + 1:i]
diff --git a/clickhouse_driver/columns/numpy/datetimecolumn.py b/clickhouse_driver/columns/numpy/datetimecolumn.py
index 5878380..6e22ed2 100644
--- a/clickhouse_driver/columns/numpy/datetimecolumn.py
+++ b/clickhouse_driver/columns/numpy/datetimecolumn.py
@@ -1,9 +1,9 @@
 import numpy as np
 import pandas as pd
 from pytz import timezone as get_timezone
-from tzlocal import get_localzone
 
 from .base import NumpyColumn
+from ...util.compat import get_localzone_name_compat
 
 
 class NumpyDateTimeColumnBase(NumpyColumn):
@@ -122,16 +122,10 @@ def create_numpy_datetime_column(spec, column_options):
 
     tz_name = timezone = None
     offset_naive = True
-    local_timezone = None
 
     # As Numpy do not use local timezone for converting timestamp to
     # datetime we need always detect local timezone for manual converting.
-    try:
-        local_timezone = get_localzone().key
-    except AttributeError:
-        local_timezone = get_localzone().zone
-    except Exception:
-        pass
+    local_timezone = get_localzone_name_compat()
 
     # Use column's timezone if it's specified.
     if spec and spec[-1] == ')':
diff --git a/clickhouse_driver/columns/numpy/service.py b/clickhouse_driver/columns/numpy/service.py
index 29601c7..52a09fe 100644
--- a/clickhouse_driver/columns/numpy/service.py
+++ b/clickhouse_driver/columns/numpy/service.py
@@ -1,40 +1,20 @@
 from ... import errors
-from ..arraycolumn import create_array_column
 from .datecolumn import NumpyDateColumn
 from .datetimecolumn import create_numpy_datetime_column
-from ..decimalcolumn import create_decimal_column
-from ..enumcolumn import create_enum_column
 from .floatcolumn import NumpyFloat32Column, NumpyFloat64Column
 from .intcolumn import (
     NumpyInt8Column, NumpyInt16Column, NumpyInt32Column, NumpyInt64Column,
     NumpyUInt8Column, NumpyUInt16Column, NumpyUInt32Column, NumpyUInt64Column
 )
 from .lowcardinalitycolumn import create_numpy_low_cardinality_column
-from ..nothingcolumn import NothingColumn
-from ..nullcolumn import NullColumn
-from ..nullablecolumn import create_nullable_column
-from ..simpleaggregatefunctioncolumn import (
-    create_simple_aggregate_function_column
-)
 from .stringcolumn import create_string_column
-from ..tuplecolumn import create_tuple_column
-from ..uuidcolumn import UUIDColumn
-from ..intervalcolumn import (
-    IntervalYearColumn, IntervalMonthColumn, IntervalWeekColumn,
-    IntervalDayColumn, IntervalHourColumn, IntervalMinuteColumn,
-    IntervalSecondColumn
-)
-from ..ipcolumn import IPv4Column, IPv6Column
+from ..nullablecolumn import create_nullable_column
 
 column_by_type = {c.ch_type: c for c in [
     NumpyDateColumn,
     NumpyFloat32Column, NumpyFloat64Column,
     NumpyInt8Column, NumpyInt16Column, NumpyInt32Column, NumpyInt64Column,
-    NumpyUInt8Column, NumpyUInt16Column, NumpyUInt32Column, NumpyUInt64Column,
-    NothingColumn, NullColumn, UUIDColumn,
-    IntervalYearColumn, IntervalMonthColumn, IntervalWeekColumn,
-    IntervalDayColumn, IntervalHourColumn, IntervalMinuteColumn,
-    IntervalSecondColumn, IPv4Column, IPv6Column
+    NumpyUInt8Column, NumpyUInt16Column, NumpyUInt32Column, NumpyUInt64Column
 ]}
 
 
@@ -45,36 +25,18 @@ def get_numpy_column_by_spec(spec, column_options):
     if spec == 'String' or spec.startswith('FixedString'):
         return create_string_column(spec, column_options)
 
-    elif spec.startswith('Enum'):
-        return create_enum_column(spec, column_options)
-
     elif spec.startswith('DateTime'):
         return create_numpy_datetime_column(spec, column_options)
 
-    elif spec.startswith('Decimal'):
-        return create_decimal_column(spec, column_options)
-
-    elif spec.startswith('Array'):
-        return create_array_column(spec, create_column_with_options)
-
-    elif spec.startswith('Tuple'):
-        return create_tuple_column(spec, create_column_with_options)
-
     elif spec.startswith('Nullable'):
         return create_nullable_column(spec, create_column_with_options)
 
     elif spec.startswith('LowCardinality'):
         return create_numpy_low_cardinality_column(spec,
                                                    create_column_with_options)
-
-    elif spec.startswith('SimpleAggregateFunction'):
-        return create_simple_aggregate_function_column(
-            spec, create_column_with_options)
-
     else:
-        try:
+        if spec in column_by_type:
             cls = column_by_type[spec]
             return cls(**column_options)
 
-        except KeyError as e:
-            raise errors.UnknownTypeError('Unknown type {}'.format(e.args[0]))
+        raise errors.UnknownTypeError('Unknown type {}'.format(spec))
diff --git a/clickhouse_driver/columns/service.py b/clickhouse_driver/columns/service.py
index 64d4b15..89163a5 100644
--- a/clickhouse_driver/columns/service.py
+++ b/clickhouse_driver/columns/service.py
@@ -1,5 +1,8 @@
+import logging
+
 from .. import errors
 from .arraycolumn import create_array_column
+from .boolcolumn import BoolColumn
 from .datecolumn import DateColumn, Date32Column
 from .datetimecolumn import create_datetime_column
 from .decimalcolumn import create_decimal_column
@@ -21,6 +24,7 @@ from .simpleaggregatefunctioncolumn import (
 )
 from .stringcolumn import create_string_column
 from .tuplecolumn import create_tuple_column
+from .nestedcolumn import create_nested_column
 from .uuidcolumn import UUIDColumn
 from .intervalcolumn import (
     IntervalYearColumn, IntervalMonthColumn, IntervalWeekColumn,
@@ -38,20 +42,30 @@ column_by_type = {c.ch_type: c for c in [
     NothingColumn, NullColumn, UUIDColumn,
     IntervalYearColumn, IntervalMonthColumn, IntervalWeekColumn,
     IntervalDayColumn, IntervalHourColumn, IntervalMinuteColumn,
-    IntervalSecondColumn, IPv4Column, IPv6Column
+    IntervalSecondColumn, IPv4Column, IPv6Column, BoolColumn
 ]}
 
+logger = logging.getLogger(__name__)
+
 
-def get_column_by_spec(spec, column_options):
+def get_column_by_spec(spec, column_options, use_numpy=None):
     context = column_options['context']
-    use_numpy = context.client_settings['use_numpy'] if context else False
+
+    if use_numpy is None:
+        use_numpy = context.client_settings['use_numpy'] if context else False
 
     if use_numpy:
         from .numpy.service import get_numpy_column_by_spec
-        return get_numpy_column_by_spec(spec, column_options)
+
+        try:
+            return get_numpy_column_by_spec(spec, column_options)
+        except errors.UnknownTypeError:
+            use_numpy = False
+            logger.warning('NumPy support is not implemented for %s. '
+                           'Using generic column', spec)
 
     def create_column_with_options(x):
-        return get_column_by_spec(x, column_options)
+        return get_column_by_spec(x, column_options, use_numpy=use_numpy)
 
     if spec == 'String' or spec.startswith('FixedString'):
         return create_string_column(spec, column_options)
@@ -71,6 +85,9 @@ def get_column_by_spec(spec, column_options):
     elif spec.startswith('Tuple'):
         return create_tuple_column(spec, create_column_with_options)
 
+    elif spec.startswith('Nested'):
+        return create_nested_column(spec, create_column_with_options)
+
     elif spec.startswith('Nullable'):
         return create_nullable_column(spec, create_column_with_options)
 
@@ -89,8 +106,8 @@ def get_column_by_spec(spec, column_options):
             cls = column_by_type[spec]
             return cls(**column_options)
 
-        except KeyError as e:
-            raise errors.UnknownTypeError('Unknown type {}'.format(e.args[0]))
+        except KeyError:
+            raise errors.UnknownTypeError('Unknown type {}'.format(spec))
 
 
 def read_column(context, column_spec, n_items, buf):
diff --git a/clickhouse_driver/columns/tuplecolumn.py b/clickhouse_driver/columns/tuplecolumn.py
index ed5fe5f..cd6c04b 100644
--- a/clickhouse_driver/columns/tuplecolumn.py
+++ b/clickhouse_driver/columns/tuplecolumn.py
@@ -49,17 +49,17 @@ def create_tuple_column(spec, column_by_spec_getter):
 
 
 def get_inner_spec(spec):
-    brackets = 1
-    offset = len('Tuple(')
+    brackets = 0
+    offset = len('Tuple')
     i = offset
     for i, ch in enumerate(spec[offset:], offset):
-        if brackets == 0:
-            break
-
         if ch == '(':
             brackets += 1
 
         elif ch == ')':
             brackets -= 1
 
-    return spec[offset:i]
+        if brackets == 0:
+            break
+
+    return spec[offset + 1:i]
diff --git a/clickhouse_driver/compression/base.py b/clickhouse_driver/compression/base.py
index c8783d3..5dae0cd 100644
--- a/clickhouse_driver/compression/base.py
+++ b/clickhouse_driver/compression/base.py
@@ -1,5 +1,9 @@
 from io import BytesIO
 
+from ..reader import read_binary_uint32
+from ..writer import write_binary_uint8, write_binary_uint32
+from .. import errors
+
 try:
     from clickhouse_cityhash.cityhash import CityHash128
 except ImportError:
@@ -7,8 +11,6 @@ except ImportError:
         'Package clickhouse-cityhash is required to use compression'
     )
 
-from .. import errors
-
 
 class BaseCompressor(object):
     """
@@ -31,9 +33,23 @@ class BaseCompressor(object):
     def write(self, p_str):
         self.data.write(p_str)
 
-    def get_compressed_data(self, extra_header_size):
+    def compress_data(self, data):
         raise NotImplementedError
 
+    def get_compressed_data(self, extra_header_size):
+        rv = BytesIO()
+
+        data = self.get_value()
+        compressed = self.compress_data(data)
+
+        header_size = extra_header_size + 4 + 4  # sizes
+
+        write_binary_uint32(header_size + len(compressed), rv)
+        write_binary_uint32(len(data), rv)
+        rv.write(compressed)
+
+        return rv.getvalue()
+
 
 class BaseDecompressor(object):
     method = None
@@ -43,10 +59,29 @@ class BaseDecompressor(object):
         self.stream = real_stream
         super(BaseDecompressor, self).__init__()
 
+    def decompress_data(self, data, uncompressed_size):
+        raise NotImplementedError
+
     def check_hash(self, compressed_data, compressed_hash):
         if CityHash128(compressed_data) != compressed_hash:
             raise errors.ChecksumDoesntMatchError()
 
     def get_decompressed_data(self, method_byte, compressed_hash,
                               extra_header_size):
-        raise NotImplementedError
+        size_with_header = read_binary_uint32(self.stream)
+        compressed_size = size_with_header - extra_header_size - 4
+
+        compressed = BytesIO(self.stream.read(compressed_size))
+
+        block_check = BytesIO()
+        write_binary_uint8(method_byte, block_check)
+        write_binary_uint32(size_with_header, block_check)
+        block_check.write(compressed.getvalue())
+
+        self.check_hash(block_check.getvalue(), compressed_hash)
+
+        uncompressed_size = read_binary_uint32(compressed)
+
+        compressed = compressed.read(compressed_size - 4)
+
+        return self.decompress_data(compressed, uncompressed_size)
diff --git a/clickhouse_driver/compression/lz4.py b/clickhouse_driver/compression/lz4.py
index 94044e4..dc63495 100644
--- a/clickhouse_driver/compression/lz4.py
+++ b/clickhouse_driver/compression/lz4.py
@@ -1,12 +1,7 @@
-from __future__ import absolute_import
-from io import BytesIO
-
 from lz4 import block
 
 from .base import BaseCompressor, BaseDecompressor
 from ..protocol import CompressionMethod, CompressionMethodByte
-from ..reader import read_binary_uint32
-from ..writer import write_binary_uint32, write_binary_uint8
 
 
 class Compressor(BaseCompressor):
@@ -14,42 +9,13 @@ class Compressor(BaseCompressor):
     method_byte = CompressionMethodByte.LZ4
     mode = 'default'
 
-    def get_compressed_data(self, extra_header_size):
-        rv = BytesIO()
-
-        data = self.get_value()
-        compressed = block.compress(data, store_size=False, mode=self.mode)
-
-        header_size = extra_header_size + 4 + 4  # sizes
-
-        write_binary_uint32(header_size + len(compressed), rv)
-        write_binary_uint32(len(data), rv)
-        rv.write(compressed)
-
-        return rv.getvalue()
+    def compress_data(self, data):
+        return block.compress(data, store_size=False, mode=self.mode)
 
 
 class Decompressor(BaseDecompressor):
     method = CompressionMethod.LZ4
     method_byte = CompressionMethodByte.LZ4
 
-    def get_decompressed_data(self, method_byte, compressed_hash,
-                              extra_header_size):
-        size_with_header = read_binary_uint32(self.stream)
-        compressed_size = size_with_header - extra_header_size - 4
-
-        compressed = BytesIO(self.stream.read(compressed_size))
-
-        block_check = BytesIO()
-        write_binary_uint8(method_byte, block_check)
-        write_binary_uint32(size_with_header, block_check)
-        block_check.write(compressed.getvalue())
-
-        self.check_hash(block_check.getvalue(), compressed_hash)
-
-        uncompressed_size = read_binary_uint32(compressed)
-
-        compressed = compressed.read(compressed_size - 4)
-
-        return block.decompress(compressed,
-                                uncompressed_size=uncompressed_size)
+    def decompress_data(self, data, uncompressed_size):
+        return block.decompress(data, uncompressed_size=uncompressed_size)
diff --git a/clickhouse_driver/compression/zstd.py b/clickhouse_driver/compression/zstd.py
index 296f3e0..ae5169f 100644
--- a/clickhouse_driver/compression/zstd.py
+++ b/clickhouse_driver/compression/zstd.py
@@ -1,51 +1,20 @@
-from __future__ import absolute_import
-from io import BytesIO
-
 import zstd
 
 from .base import BaseCompressor, BaseDecompressor
 from ..protocol import CompressionMethod, CompressionMethodByte
-from ..reader import read_binary_uint32
-from ..writer import write_binary_uint32, write_binary_uint8
 
 
 class Compressor(BaseCompressor):
     method = CompressionMethod.ZSTD
     method_byte = CompressionMethodByte.ZSTD
 
-    def get_compressed_data(self, extra_header_size):
-        rv = BytesIO()
-
-        data = self.get_value()
-        compressed = zstd.compress(data)
-
-        header_size = extra_header_size + 4 + 4  # sizes
-
-        write_binary_uint32(header_size + len(compressed), rv)
-        write_binary_uint32(len(data), rv)
-        rv.write(compressed)
-
-        return rv.getvalue()
+    def compress_data(self, data):
+        return zstd.compress(data)
 
 
 class Decompressor(BaseDecompressor):
     method = CompressionMethod.ZSTD
     method_byte = CompressionMethodByte.ZSTD
 
-    def get_decompressed_data(self, method_byte, compressed_hash,
-                              extra_header_size):
-        size_with_header = read_binary_uint32(self.stream)
-        compressed_size = size_with_header - extra_header_size - 4
-
-        compressed = BytesIO(self.stream.read(compressed_size))
-
-        block_check = BytesIO()
-        write_binary_uint8(method_byte, block_check)
-        write_binary_uint32(size_with_header, block_check)
-        block_check.write(compressed.getvalue())
-
-        self.check_hash(block_check.getvalue(), compressed_hash)
-
-        compressed = compressed.read(compressed_size - 4)
-
-        return zstd.decompress(compressed)
+    def decompress_data(self, data, uncompressed_size):
+        return zstd.decompress(data)
diff --git a/clickhouse_driver/connection.py b/clickhouse_driver/connection.py
index 020bcd2..ca40570 100644
--- a/clickhouse_driver/connection.py
+++ b/clickhouse_driver/connection.py
@@ -23,6 +23,7 @@ from .reader import read_binary_str
 from .readhelpers import read_exception
 from .settings.writer import write_settings
 from .streams.native import BlockInputStream, BlockOutputStream
+from .util.compat import threading
 from .varint import write_varint, read_varint
 from .writer import write_binary_str
 
@@ -203,12 +204,17 @@ class Connection(object):
         self.block_out = None
         self.block_in_raw = None  # log blocks are always not compressed
 
+        self._lock = threading.Lock()
+        self.is_query_executing = False
+
         super(Connection, self).__init__()
 
     def get_description(self):
         return '{}:{}'.format(self.host, self.port)
 
     def force_connect(self):
+        self.check_query_execution()
+
         if not self.connected:
             self.connect()
 
@@ -355,6 +361,8 @@ class Connection(object):
         self.block_in_raw = None
         self.block_out = None
 
+        self.is_query_executing = False
+
     def disconnect(self):
         """
         Closes connection between server and client.
@@ -496,6 +504,7 @@ class Connection(object):
             log_block(block)
 
         elif packet_type == ServerPacketTypes.END_OF_STREAM:
+            self.is_query_executing = False
             pass
 
         elif packet_type == ServerPacketTypes.TABLE_COLUMNS:
@@ -613,8 +622,19 @@ class Connection(object):
                     'Empty table "{}" structure'.format(table['name'])
                 )
 
-            block = RowOrientedBlock(table['structure'], table['data'],
-                                     types_check=types_check)
+            data = table['data']
+            block_cls = RowOrientedBlock
+
+            if self.context.client_settings['use_numpy']:
+                from .numpy.block import NumpyColumnOrientedBlock
+
+                columns = [x[0] for x in table['structure']]
+                data = [data[column].values for column in columns]
+
+                block_cls = NumpyColumnOrientedBlock
+
+            block = block_cls(table['structure'], data,
+                              types_check=types_check)
             self.send_data(block, table_name=table['name'])
 
         # Empty block, end of data transfer.
@@ -636,3 +656,12 @@ class Connection(object):
             'Unexpected packet from server {} (expected {}, got {})'
             .format(self.get_description(), expected, packet_type)
         )
+
+    def check_query_execution(self):
+        self._lock.acquire(blocking=False)
+
+        if self.is_query_executing:
+            raise errors.PartiallyConsumedQueryError()
+
+        self.is_query_executing = True
+        self._lock.release()
diff --git a/clickhouse_driver/dbapi/cursor.py b/clickhouse_driver/dbapi/cursor.py
index 64d730c..b88b359 100644
--- a/clickhouse_driver/dbapi/cursor.py
+++ b/clickhouse_driver/dbapi/cursor.py
@@ -310,8 +310,10 @@ class Cursor(object):
             self._rowcount = response
             response = None
 
-        if not response:
+        if not response or isinstance(response, int):
             self._columns = self._types = self._rows = []
+            if isinstance(response, int):
+                self._rowcount = response
             return
 
         if self._stream_results:
diff --git a/clickhouse_driver/errors.py b/clickhouse_driver/errors.py
index ef833f5..8d9ce60 100644
--- a/clickhouse_driver/errors.py
+++ b/clickhouse_driver/errors.py
@@ -444,3 +444,10 @@ class CannotParseUuidError(Error):
 
 class CannotParseDomainError(Error):
     code = ErrorCodes.CANNOT_PARSE_DOMAIN_VALUE_FROM_STRING
+
+
+class PartiallyConsumedQueryError(Error):
+    code = -1
+
+    def __str__(self):
+        return 'Simultaneous queries on single connection detected'
diff --git a/clickhouse_driver/util/compat.py b/clickhouse_driver/util/compat.py
new file mode 100644
index 0000000..bdcf3d7
--- /dev/null
+++ b/clickhouse_driver/util/compat.py
@@ -0,0 +1,27 @@
+
+# Drop this when minimum supported version will be 3.7.
+try:
+    import threading
+except ImportError:
+    import dummy_threading as threading  # noqa: F401
+
+try:
+    # since tzlocal 4.0+
+    # this will avoid warning for get_localzone().key
+    from tzlocal import get_localzone_name
+
+    def get_localzone_name_compat():
+        try:
+            return get_localzone_name()
+        except Exception:
+            return None
+except ImportError:
+    from tzlocal import get_localzone
+
+    def get_localzone_name_compat():
+        try:
+            return get_localzone().key
+        except AttributeError:
+            return get_localzone().zone
+        except Exception:
+            return None
diff --git a/clickhouse_driver/util/escape.py b/clickhouse_driver/util/escape.py
index 7eb7270..630e02b 100644
--- a/clickhouse_driver/util/escape.py
+++ b/clickhouse_driver/util/escape.py
@@ -2,6 +2,8 @@ from datetime import date, datetime
 from enum import Enum
 from uuid import UUID
 
+from pytz import timezone
+
 
 escape_chars_map = {
     "\b": "\\b",
@@ -17,12 +19,21 @@ escape_chars_map = {
 }
 
 
-def escape_param(item):
+def escape_datetime(item, context):
+    server_tz = timezone(context.server_info.timezone)
+
+    if item.tzinfo is not None:
+        item = item.astimezone(server_tz)
+
+    return "'%s'" % item.strftime('%Y-%m-%d %H:%M:%S')
+
+
+def escape_param(item, context):
     if item is None:
         return 'NULL'
 
     elif isinstance(item, datetime):
-        return "'%s'" % item.strftime('%Y-%m-%d %H:%M:%S')
+        return escape_datetime(item, context)
 
     elif isinstance(item, date):
         return "'%s'" % item.strftime('%Y-%m-%d')
@@ -31,13 +42,13 @@ def escape_param(item):
         return "'%s'" % ''.join(escape_chars_map.get(c, c) for c in item)
 
     elif isinstance(item, list):
-        return "[%s]" % ', '.join(str(escape_param(x)) for x in item)
+        return "[%s]" % ', '.join(str(escape_param(x, context)) for x in item)
 
     elif isinstance(item, tuple):
-        return "(%s)" % ', '.join(str(escape_param(x)) for x in item)
+        return "(%s)" % ', '.join(str(escape_param(x, context)) for x in item)
 
     elif isinstance(item, Enum):
-        return escape_param(item.value)
+        return escape_param(item.value, context)
 
     elif isinstance(item, UUID):
         return "'%s'" % str(item)
@@ -46,10 +57,10 @@ def escape_param(item):
         return item
 
 
-def escape_params(params):
+def escape_params(params, context):
     escaped = {}
 
     for key, value in params.items():
-        escaped[key] = escape_param(value)
+        escaped[key] = escape_param(value, context)
 
     return escaped
diff --git a/debian/changelog b/debian/changelog
index c464900..50e354f 100644
--- a/debian/changelog
+++ b/debian/changelog
@@ -1,8 +1,12 @@
-python-clickhouse-driver (0.2.2-1) UNRELEASED; urgency=medium
+python-clickhouse-driver (0.2.3-1) UNRELEASED; urgency=medium
 
+  [ Federico Ceratto ]
   * New upstream release
 
- -- Federico Ceratto <federico@debian.org>  Thu, 14 Oct 2021 19:13:24 +0100
+  [ Debian Janitor ]
+  * New upstream release.
+
+ -- Federico Ceratto <federico@debian.org>  Mon, 14 Mar 2022 18:07:35 -0000
 
 python-clickhouse-driver (0.2.0-1) unstable; urgency=medium
 
diff --git a/debian/patches/insert-doc-build-path.patch b/debian/patches/insert-doc-build-path.patch
index d1bbeec..dc221f1 100644
--- a/debian/patches/insert-doc-build-path.patch
+++ b/debian/patches/insert-doc-build-path.patch
@@ -2,8 +2,10 @@ Author: Federico Ceratto <federico@debian.org>
 Description: add Python import paths to conf.py
 Forwarded: not-needed
 
---- a/docs/conf.py
-+++ b/docs/conf.py
+Index: python-clickhouse-driver/docs/conf.py
+===================================================================
+--- python-clickhouse-driver.orig/docs/conf.py
++++ python-clickhouse-driver/docs/conf.py
 @@ -16,6 +16,10 @@
  # import sys
  # sys.path.insert(0, os.path.abspath('.'))
diff --git a/debian/patches/remove-sphinx-github-iframe.patch b/debian/patches/remove-sphinx-github-iframe.patch
index 8771f67..0b4a5d1 100644
--- a/debian/patches/remove-sphinx-github-iframe.patch
+++ b/debian/patches/remove-sphinx-github-iframe.patch
@@ -2,9 +2,11 @@ Author: Federico Ceratto <federico@debian.org>
 Description: remove Sign In button from main HTML page
 Forwarded: not-needed
 
---- a/docs/conf.py
-+++ b/docs/conf.py
-@@ -102,8 +102,6 @@
+Index: python-clickhouse-driver/docs/conf.py
+===================================================================
+--- python-clickhouse-driver.orig/docs/conf.py
++++ python-clickhouse-driver/docs/conf.py
+@@ -102,8 +102,6 @@ html_logo = ""
  html_static_path = ['_static']
  html_theme_options = {
      "description": "Python driver for ClickHouse",
diff --git a/docs/development.rst b/docs/development.rst
index 0f29feb..6aff38b 100644
--- a/docs/development.rst
+++ b/docs/development.rst
@@ -14,6 +14,18 @@ Running tests locally
 
 Install desired Python version with system package manager/pyenv/another manager.
 
+Install test requirements and build package:
+
+    .. code-block:: bash
+
+        python testsrequire.py && python setup.py develop
+
+You should install cython if you want to change ``*.pyx`` files:
+
+    .. code-block:: bash
+
+        pip install cython
+
 ClickHouse on host machine
 ^^^^^^^^^^^^^^^^^^^^^^^^^^
 
@@ -24,7 +36,7 @@ Run tests:
 
     .. code-block:: bash
 
-       python setup.py test
+        py.test -v
 
 ClickHouse in docker
 ^^^^^^^^^^^^^^^^^^^^
@@ -33,13 +45,13 @@ Create container desired version of ``clickhouse-server``:
 
     .. code-block:: bash
 
-       docker run --rm -e "TZ=Europe/Moscow" -p 127.0.0.1:9000:9000 --name test-clickhouse-server yandex/clickhouse-server:$VERSION
+        docker run --rm -e "TZ=Europe/Moscow" -p 127.0.0.1:9000:9000 --name test-clickhouse-server yandex/clickhouse-server:$VERSION
 
 Create container with the same version of ``clickhouse-client``:
 
     .. code-block:: bash
 
-       docker run --rm --entrypoint "/bin/sh" --name test-clickhouse-client --link test-clickhouse-server:clickhouse-server yandex/clickhouse-client:$VERSION -c 'while :; do sleep 1; done'
+        docker run --rm --entrypoint "/bin/sh" --name test-clickhouse-client --link test-clickhouse-server:clickhouse-server yandex/clickhouse-client:$VERSION -c 'while :; do sleep 1; done'
 
 Create ``clickhouse-client`` script on your host machine:
 
@@ -64,4 +76,12 @@ Set ``TZ=UTC`` and run tests:
     .. code-block:: bash
 
         export TZ=UTC
-        python setup.py test
+        py.test -v
+
+GitHub Actions in forked repository
+-----------------------------------
+
+Workflows in forked repositories can be used for running tests.
+
+Workflows don't run in forked repositories by default.
+You must enable GitHub Actions in the **Actions** tab of the forked repository.
diff --git a/docs/installation.rst b/docs/installation.rst
index a487aa6..ef34d57 100644
--- a/docs/installation.rst
+++ b/docs/installation.rst
@@ -21,9 +21,11 @@ Example for `python:alpine` docker image:
 
 By default there are wheels for Linux, Mac OS X and Windows.
 
-Packages for Linux and Mac OS X are available for python: 3.4 -- 3.9.
+Packages for Linux and Mac OS X are available for python: 3.6 -- 3.10.
 
-Packages for Windows are available for python: 3.5 -- 3.9.
+Packages for Windows are available for python: 3.6 -- 3.10.
+
+Starting from version *0.2.3* there are wheels for musl-based Linux distributions.
 
 Dependencies
 ------------
diff --git a/docs/quickstart.rst b/docs/quickstart.rst
index e462d55..20535c0 100644
--- a/docs/quickstart.rst
+++ b/docs/quickstart.rst
@@ -57,15 +57,18 @@ Of course queries can and should be parameterized to avoid SQL injections:
         [('2018-10-21', 3)]
 
 Percent symbols in inlined constants should be doubled if you mix constants
-with ``%`` symbol and ``%(x)s`` parameters.
+with ``%`` symbol and ``%(myvar)s`` parameters.
 
     .. code-block:: python
 
         >>> client.execute(
-        ...     "SELECT 'test' like '%%es%%', %(x)s",
-        ...     {'x': 1}
+        ...     "SELECT 'test' like '%%es%%', %(myvar)s",
+        ...     {'myvar': 1}
         ... )
 
+NOTE: formatting queries using Python's f-strings or concatenation can lead to SQL injections.
+Use ``%(myvar)s`` parameters instead.
+
 Customisation ``SELECT`` output with ``FORMAT`` clause is not supported.
 
 .. _execute-with-progress:
diff --git a/docs/types.rst b/docs/types.rst
index 2a335fb..2227ea9 100644
--- a/docs/types.rst
+++ b/docs/types.rst
@@ -169,6 +169,14 @@ INSERT types: :data:`~types.NoneType`, ``T``.
 SELECT type: :data:`~types.NoneType`, ``T``.
 
 
+Bool
+----
+
+INSERT types: :class:`bool`,
+
+SELECT type: :class:`bool`.
+
+
 UUID
 ----
 
@@ -275,10 +283,10 @@ INSERT types: :class:`list`, :class:`tuple`.
 SELECT type: :class:`tuple`.
 
 
-Nested
+Nested(flatten_nested=1, default)
 ------
 
-Nested type is represented by sequence of arrays. In example below actual
+Nested type is represented by sequence of arrays when flatten_nested=1. In example below actual
 columns for are ``col.name`` and ``col.version``.
 
     .. code-block:: sql
@@ -327,6 +335,68 @@ Inserting data into nested column with ``clickhouse-driver``:
           (['a', 'b', 'c'], [100, 200, 300]),
       ])
 
+Nested(flatten_nested=0)
+------
+
+Nested type is represented by array of named tuples when flatten_nested=0.
+
+    .. code-block:: sql
+
+      :) SET flatten_nested = 0;
+
+      SET flatten_nested = 0
+
+      Ok.
+
+      0 rows in set. Elapsed: 0.006 sec. 
+
+      :) CREATE TABLE test_nested (col Nested(name String, version UInt16)) Engine = Memory;
+
+      CREATE TABLE test_nested
+      (
+          `col` Nested(name String, version UInt16)
+      )
+      ENGINE = Memory
+
+      Ok.
+
+      0 rows in set. Elapsed: 0.005 sec.
+
+      :) DESCRIBE TABLE test_nested FORMAT TSV;
+
+      DESCRIBE TABLE test_nested
+      FORMAT TSV
+
+      col	Nested(name String, version UInt16)					
+
+      1 rows in set. Elapsed: 0.004 sec.
+
+Inserting data into nested column in ``clickhouse-client``:
+
+    .. code-block:: sql
+
+      :) INSERT INTO test_nested VALUES ([('a', 100), ('b', 200), ('c', 300)]);
+
+      INSERT INTO test_nested VALUES
+
+      Ok.
+
+      1 rows in set. Elapsed: 0.003 sec.
+
+Inserting data into nested column with ``clickhouse-driver``:
+
+    .. code-block:: python
+
+      client.execute(
+          'INSERT INTO test_nested VALUES',
+          [([('a', 100), ('b', 200), ('c', 300)]),]
+      )
+      # or
+      client.execute(
+          'INSERT INTO test_nested VALUES',
+          [{'col': [{'name': 'a', 'version': 100}, {'name': 'b', 'version': 200}, {'name': 'c', 'version': 300}]}]
+      )
+
 Map(key, value)
 ------------------
 
diff --git a/setup.py b/setup.py
index fef1947..36abced 100644
--- a/setup.py
+++ b/setup.py
@@ -12,10 +12,8 @@ except ImportError:
 else:
     USE_CYTHON = True
 
-USE_NUMPY = bool(os.getenv('USE_NUMPY', False))
 CYTHON_TRACE = bool(os.getenv('CYTHON_TRACE', False))
 
-
 here = os.path.abspath(os.path.dirname(__file__))
 
 
@@ -64,19 +62,6 @@ if USE_CYTHON:
 
     extensions = cythonize(extensions, compiler_directives=compiler_directives)
 
-tests_require = [
-    'nose',
-    'parameterized',
-    'freezegun',
-    'lz4<=3.0.1; implementation_name=="pypy"',
-    'lz4; implementation_name!="pypy"',
-    'zstd',
-    'clickhouse-cityhash>=1.0.2.1'
-]
-
-if USE_NUMPY:
-    tests_require.extend(['numpy', 'pandas'])
-
 setup(
     name='clickhouse-driver',
     version=read_version(),
@@ -110,12 +95,12 @@ setup(
 
         'Programming Language :: SQL',
         'Programming Language :: Python :: 3',
-        'Programming Language :: Python :: 3.4',
         'Programming Language :: Python :: 3.5',
         'Programming Language :: Python :: 3.6',
         'Programming Language :: Python :: 3.7',
         'Programming Language :: Python :: 3.8',
         'Programming Language :: Python :: 3.9',
+        'Programming Language :: Python :: 3.10',
         'Programming Language :: Python :: Implementation :: PyPy',
 
         'Topic :: Database',
@@ -135,7 +120,8 @@ setup(
     python_requires='>=3.4.*, <4',
     install_requires=[
         'pytz',
-        'tzlocal'
+        'tzlocal',
+        'tzlocal<2.1; python_version=="3.5"'
     ],
     ext_modules=extensions,
     extras_require={
@@ -147,6 +133,5 @@ setup(
         'zstd': ['zstd', 'clickhouse-cityhash>=1.0.2.1'],
         'numpy': ['numpy>=1.12.0', 'pandas>=0.24.0']
     },
-    test_suite='nose.collector',
-    tests_require=tests_require
+    test_suite='pytest'
 )
diff --git a/tests/columns/test_bool.py b/tests/columns/test_bool.py
new file mode 100644
index 0000000..b35f7b1
--- /dev/null
+++ b/tests/columns/test_bool.py
@@ -0,0 +1,71 @@
+from tests.testcase import BaseTestCase
+from clickhouse_driver import errors
+
+
+class BoolTestCase(BaseTestCase):
+    required_server_version = (21, 12)
+
+    def test_simple(self):
+        columns = ("a Bool")
+
+        data = [(1,), (0,), (True,), (False,), (None,), ("False",), ("",)]
+        with self.create_table(columns):
+            self.client.execute('INSERT INTO test (a) VALUES', data)
+
+            query = 'SELECT * FROM test'
+            inserted = self.emit_cli(query)
+            self.assertEqual(
+                inserted, (
+                    'true\n'
+                    'false\n'
+                    'true\n'
+                    'false\n'
+                    'false\n'
+                    'true\n'
+                    'false\n'
+                )
+            )
+
+            inserted = self.client.execute(query)
+            self.assertEqual(
+                inserted, [
+                    (True, ),
+                    (False, ),
+                    (True, ),
+                    (False, ),
+                    (False, ),
+                    (True, ),
+                    (False, ),
+                ]
+            )
+
+    def test_errors(self):
+        columns = "a Bool"
+        with self.create_table(columns):
+            with self.assertRaises(errors.TypeMismatchError):
+                self.client.execute(
+                    'INSERT INTO test (a) VALUES', [(1, )],
+                    types_check=True
+                )
+
+    def test_nullable(self):
+        columns = "a Nullable(Bool)"
+
+        data = [(None, ), (True, ), (False, )]
+        with self.create_table(columns):
+            self.client.execute('INSERT INTO test (a) VALUES', data)
+
+            query = 'SELECT * FROM test'
+            inserted = self.emit_cli(query)
+            self.assertEqual(
+                inserted, (
+                    '\\N\ntrue\nfalse\n'
+                )
+            )
+
+            inserted = self.client.execute(query)
+            self.assertEqual(
+                inserted, [
+                    (None, ), (True, ), (False, ),
+                ]
+            )
diff --git a/tests/columns/test_datetime.py b/tests/columns/test_datetime.py
index 45e624d..8ed60ae 100644
--- a/tests/columns/test_datetime.py
+++ b/tests/columns/test_datetime.py
@@ -1,33 +1,14 @@
-from contextlib import contextmanager
 from datetime import date, datetime
-import os
-from time import tzset
 from unittest.mock import patch
 
 from pytz import timezone, utc, UnknownTimeZoneError
 import tzlocal
 
 from tests.testcase import BaseTestCase
-from tests.util import require_server_version
+from tests.util import require_server_version, patch_env_tz
 
 
-class BaseDateTimeTestCase(BaseTestCase):
-    def setUp(self):
-        super(BaseDateTimeTestCase, self).setUp()
-
-        # Bust tzlocal cache.
-        try:
-            tzlocal.unix._cache_tz = None
-        except AttributeError:
-            pass
-
-        try:
-            tzlocal.win32._cache_tz = None
-        except AttributeError:
-            pass
-
-
-class DateTimeTestCase(BaseDateTimeTestCase):
+class DateTimeTestCase(BaseTestCase):
     def test_simple(self):
         with self.create_table('a Date, b DateTime'):
             data = [(date(2012, 10, 25), datetime(2012, 10, 25, 14, 7, 19))]
@@ -82,10 +63,15 @@ class DateTimeTestCase(BaseDateTimeTestCase):
             self.assertEqual(inserted, data)
 
     def test_handle_errors_from_tzlocal(self):
-        with patch('tzlocal.get_localzone') as mocked_get_localzone:
-            mocked_get_localzone.side_effect = UnknownTimeZoneError()
+        with patch('tzlocal.get_localzone') as mocked:
+            mocked.side_effect = UnknownTimeZoneError()
             self.client.execute('SELECT now()')
 
+        if hasattr(tzlocal, 'get_localzone_name'):
+            with patch('tzlocal.get_localzone_name') as mocked:
+                mocked.side_effect = None
+                self.client.execute('SELECT now()')
+
     @require_server_version(20, 1, 2)
     def test_datetime64_frac_trunc(self):
         with self.create_table('a DateTime64'):
@@ -183,21 +169,9 @@ class DateTimeTestCase(BaseDateTimeTestCase):
             )
 
 
-class DateTimeTimezonesTestCase(BaseDateTimeTestCase):
+class DateTimeTimezonesTestCase(BaseTestCase):
     dt_type = 'DateTime'
 
-    @contextmanager
-    def patch_env_tz(self, tz_name):
-        # Although in many cases, changing the TZ environment variable may
-        # affect the output of functions like localtime() without calling
-        # tzset(), this behavior should not be relied on.
-        # https://docs.python.org/3/library/time.html#time.tzset
-        with patch.dict(os.environ, {'TZ': tz_name}):
-            tzset()
-            yield
-
-        tzset()
-
     # Asia/Kamchatka = UTC+12
     # Asia/Novosibirsk = UTC+7
     # Europe/Moscow = UTC+3
@@ -229,7 +203,7 @@ class DateTimeTimezonesTestCase(BaseDateTimeTestCase):
         offset = timezone(server_tz_name).utcoffset(self.dt).total_seconds()
         timestamp = 1500010800 - int(offset)
 
-        with self.patch_env_tz('Asia/Novosibirsk'):
+        with patch_env_tz('Asia/Novosibirsk'):
             with self.create_table(self.table_columns()):
                 self.client.execute(
                     'INSERT INTO test (a) VALUES', [(self.dt, )]
@@ -260,7 +234,7 @@ class DateTimeTimezonesTestCase(BaseDateTimeTestCase):
 
         settings = {'use_client_time_zone': True}
 
-        with self.patch_env_tz('Asia/Novosibirsk'):
+        with patch_env_tz('Asia/Novosibirsk'):
             with self.create_table(self.table_columns()):
                 self.client.execute(
                     'INSERT INTO test (a) VALUES', [(self.dt, )],
@@ -296,7 +270,7 @@ class DateTimeTimezonesTestCase(BaseDateTimeTestCase):
         server_tz_name = self.client.execute('SELECT timezone()')[0][0]
         offset = timezone(server_tz_name).utcoffset(self.dt)
 
-        with self.patch_env_tz('Asia/Novosibirsk'):
+        with patch_env_tz('Asia/Novosibirsk'):
             with self.create_table(self.table_columns()):
                 self.client.execute(
                     'INSERT INTO test (a) VALUES', [(self.dt_tz, )]
@@ -329,7 +303,7 @@ class DateTimeTimezonesTestCase(BaseDateTimeTestCase):
 
         settings = {'use_client_time_zone': True}
 
-        with self.patch_env_tz('Asia/Novosibirsk'):
+        with patch_env_tz('Asia/Novosibirsk'):
             with self.create_table(self.table_columns()):
                 self.client.execute(
                     'INSERT INTO test (a) VALUES', [(self.dt_tz, )],
@@ -366,7 +340,7 @@ class DateTimeTimezonesTestCase(BaseDateTimeTestCase):
         # into column with timezone Asia/Novosibirsk
         # using server's timezone (Europe/Moscow)
 
-        with self.patch_env_tz('Europe/Moscow'):
+        with patch_env_tz('Europe/Moscow'):
             with self.create_table(self.table_columns(with_tz=True)):
                 self.client.execute(
                     'INSERT INTO test (a) VALUES', [(self.dt, )]
@@ -402,7 +376,7 @@ class DateTimeTimezonesTestCase(BaseDateTimeTestCase):
 
         settings = {'use_client_time_zone': True}
 
-        with self.patch_env_tz('Europe/Moscow'):
+        with patch_env_tz('Europe/Moscow'):
             with self.create_table(self.table_columns(with_tz=True)):
                 self.client.execute(
                     'INSERT INTO test (a) VALUES', [(self.dt, )],
@@ -437,7 +411,7 @@ class DateTimeTimezonesTestCase(BaseDateTimeTestCase):
         # into column with timezone Asia/Novosibirsk
         # using server's timezone (Europe/Moscow)
 
-        with self.patch_env_tz('Europe/Moscow'):
+        with patch_env_tz('Europe/Moscow'):
             with self.create_table(self.table_columns(with_tz=True)):
                 self.client.execute(
                     'INSERT INTO test (a) VALUES', [(self.dt_tz, )]
@@ -477,7 +451,7 @@ class DateTimeTimezonesTestCase(BaseDateTimeTestCase):
 
         settings = {'use_client_time_zone': True}
 
-        with self.patch_env_tz('Europe/Moscow'):
+        with patch_env_tz('Europe/Moscow'):
             with self.create_table(self.table_columns(with_tz=True)):
                 self.client.execute(
                     'INSERT INTO test (a) VALUES', [(self.dt_tz, )],
diff --git a/tests/columns/test_decimal.py b/tests/columns/test_decimal.py
index 337478b..ff6c54b 100644
--- a/tests/columns/test_decimal.py
+++ b/tests/columns/test_decimal.py
@@ -140,7 +140,7 @@ class DecimalTestCase(BaseTestCase):
 
     def test_nullable(self):
         with self.create_table('a Nullable(Decimal32(3))'):
-            data = [(300.42, ), (None, ), ]
+            data = [(300.42, ), (None, )]
             self.client.execute(
                 'INSERT INTO test (a) VALUES', data
             )
@@ -158,7 +158,7 @@ class DecimalTestCase(BaseTestCase):
 
     def test_no_scale(self):
         with self.create_table('a Decimal32(0)'):
-            data = [(2147483647, ), ]
+            data = [(2147483647, )]
             self.client.execute(
                 'INSERT INTO test (a) VALUES', data
             )
@@ -171,7 +171,7 @@ class DecimalTestCase(BaseTestCase):
             self.assertEqual(inserted, [(Decimal('2147483647'), )])
 
     def test_type_mismatch(self):
-        data = [(2147483649,), ]
+        data = [(2147483649, )]
         with self.create_table('a Decimal32(0)'):
             with self.assertRaises(errors.TypeMismatchError) as e:
                 self.client.execute(
@@ -187,6 +187,24 @@ class DecimalTestCase(BaseTestCase):
 
             self.assertIn('Column a', str(e.exception))
 
+    def test_type_mismatch_scale(self):
+        data = [(1.234,)]
+        with self.create_table('a Decimal32(2)'):
+            with self.assertRaises(errors.TypeMismatchError) as e:
+                self.client.execute(
+                    'INSERT INTO test (a) VALUES', data, types_check=True
+                )
+
+            self.assertIn('1.234 for column "a"', str(e.exception))
+
+            # Without types_check decimal will be cropped.
+            self.client.execute('INSERT INTO test (a) VALUES', data)
+            query = 'SELECT * FROM test'
+            inserted = self.emit_cli(query)
+            self.assertEqual(inserted, '1.23\n')
+            inserted = self.client.execute(query)
+            self.assertEqual(inserted, [(Decimal('1.23'), )])
+
     def test_preserve_precision(self):
         data = [(1.66, ), (1.15, )]
 
diff --git a/tests/columns/test_nested.py b/tests/columns/test_nested.py
new file mode 100644
index 0000000..3b50b87
--- /dev/null
+++ b/tests/columns/test_nested.py
@@ -0,0 +1,112 @@
+from tests.testcase import BaseTestCase
+from tests.util import require_server_version
+from clickhouse_driver.columns import nestedcolumn
+
+
+class NestedTestCase(BaseTestCase):
+    def entuple(self, lst):
+        return tuple(
+            self.entuple(x) if isinstance(x, list) else x for x in lst
+        )
+
+    @require_server_version(21, 3, 13)
+    def test_simple(self):
+        columns = 'n Nested(i Int32, s String)'
+
+        # INSERT INTO test_nested VALUES ([(0, 'a'), (1, 'b')]);
+        data = [([(0, 'a'), (1, 'b')],)]
+
+        with self.create_table(columns, flatten_nested=0):
+            self.client.execute(
+                'INSERT INTO test (n) VALUES', data
+            )
+
+            query = 'SELECT * FROM test'
+            inserted = self.emit_cli(query)
+            self.assertEqual(inserted, "[(0,'a'),(1,'b')]\n")
+
+            inserted = self.client.execute(query)
+            self.assertEqual(inserted, data)
+
+            projected_i = self.client.execute('SELECT n.i FROM test')
+            self.assertEqual(
+                projected_i,
+                [([0, 1],)]
+            )
+
+            projected_s = self.client.execute('SELECT n.s FROM test')
+            self.assertEqual(
+                projected_s,
+                [(['a', 'b'],)]
+            )
+
+    @require_server_version(21, 3, 13)
+    def test_multiple_rows(self):
+        columns = 'n Nested(i Int32, s String)'
+
+        data = [([(0, 'a'), (1, 'b')],), ([(3, 'd'), (4, 'e')],)]
+
+        with self.create_table(columns, flatten_nested=0):
+            self.client.execute(
+                'INSERT INTO test (n) VALUES', data
+            )
+
+            query = 'SELECT * FROM test'
+            inserted = self.emit_cli(query)
+            self.assertEqual(
+                inserted,
+                "[(0,'a'),(1,'b')]\n[(3,'d'),(4,'e')]\n"
+            )
+
+            inserted = self.client.execute(query)
+            self.assertEqual(inserted, data)
+
+    @require_server_version(21, 3, 13)
+    def test_dict(self):
+        columns = 'n Nested(i Int32, s String)'
+
+        data = [
+            {'n': [{'i': 0, 's': 'a'}, {'i': 1, 's': 'b'}]},
+            {'n': [{'i': 3, 's': 'd'}, {'i': 4, 's': 'e'}]},
+        ]
+
+        with self.create_table(columns, flatten_nested=0):
+            self.client.execute(
+                'INSERT INTO test (n) VALUES', data
+            )
+
+            query = 'SELECT * FROM test'
+            inserted = self.emit_cli(query)
+            self.assertEqual(
+                inserted,
+                "[(0,'a'),(1,'b')]\n[(3,'d'),(4,'e')]\n"
+            )
+
+            inserted = self.client.execute(query)
+            self.assertEqual(
+                inserted,
+                [([(0, 'a'), (1, 'b')],), ([(3, 'd'), (4, 'e')],)]
+            )
+
+    def test_get_nested_columns(self):
+        self.assertEqual(
+            nestedcolumn.get_nested_columns(
+                'Nested(a Tuple(Array(Int8)),\n b Nullable(String))',
+            ),
+            ['Tuple(Array(Int8))', 'Nullable(String)']
+        )
+
+    def test_get_columns_with_types(self):
+        self.assertEqual(
+            nestedcolumn.get_columns_with_types(
+                'Nested(a Tuple(Array(Int8)),\n b Nullable(String))',
+            ),
+            [('a', 'Tuple(Array(Int8))'), ('b', 'Nullable(String)')]
+        )
+
+    def test_get_inner_spec(self):
+        inner = 'a Tuple(Array(Int8), Array(Int64)), b Nullable(String)'
+        self.assertEqual(
+            nestedcolumn.get_inner_spec('Nested({}) dummy '.format(inner)),
+            inner
+        )
diff --git a/tests/conftest.py b/tests/conftest.py
new file mode 100644
index 0000000..9a24f8e
--- /dev/null
+++ b/tests/conftest.py
@@ -0,0 +1,11 @@
+import pytest
+
+
+@pytest.fixture(autouse=True)
+def assert_empty_output(capfd):
+    yield
+
+    captured = capfd.readouterr()
+
+    assert captured.out == ''
+    assert captured.err == ''
diff --git a/tests/docker-compose.yml b/tests/docker-compose.yml
new file mode 100644
index 0000000..e04d1b9
--- /dev/null
+++ b/tests/docker-compose.yml
@@ -0,0 +1,16 @@
+version: '3'
+
+services:
+  clickhouse-server:
+    image: "yandex/clickhouse-server:$VERSION"
+    container_name: test-clickhouse-server
+    environment:
+      - TZ=Europe/Moscow
+    ports:
+      - "127.0.0.1:9000:9000"
+
+  clickhouse-client:
+    image: "yandex/clickhouse-client:$VERSION"
+    container_name: test-clickhouse-client
+    entrypoint: /bin/sh
+    command: [-c, 'while :; do sleep 1; done']
diff --git a/tests/numpy/columns/test_datetime.py b/tests/numpy/columns/test_datetime.py
index 855ab85..60119f2 100644
--- a/tests/numpy/columns/test_datetime.py
+++ b/tests/numpy/columns/test_datetime.py
@@ -1,7 +1,4 @@
-from contextlib import contextmanager
 from datetime import datetime, date
-import os
-from time import tzset
 from unittest.mock import patch
 
 try:
@@ -18,24 +15,10 @@ from pytz import timezone, utc, UnknownTimeZoneError
 import tzlocal
 
 from tests.numpy.testcase import NumpyBaseTestCase
-from tests.util import require_server_version
+from tests.util import require_server_version, patch_env_tz
 
 
 class BaseDateTimeTestCase(NumpyBaseTestCase):
-    def setUp(self):
-        super(BaseDateTimeTestCase, self).setUp()
-
-        # Bust tzlocal cache.
-        try:
-            tzlocal.unix._cache_tz = None
-        except AttributeError:
-            pass
-
-        try:
-            tzlocal.win32._cache_tz = None
-        except AttributeError:
-            pass
-
     def make_numpy_d64ns(self, items):
         return np.array(items, dtype='datetime64[ns]')
 
@@ -121,10 +104,15 @@ class DateTimeTestCase(BaseDateTimeTestCase):
             self.assertEqual(inserted[0].dtype, object)
 
     def test_handle_errors_from_tzlocal(self):
-        with patch('tzlocal.get_localzone') as mocked_get_localzone:
-            mocked_get_localzone.side_effect = UnknownTimeZoneError()
+        with patch('tzlocal.get_localzone') as mocked:
+            mocked.side_effect = UnknownTimeZoneError()
             self.client.execute('SELECT now()')
 
+        if hasattr(tzlocal, 'get_localzone_name'):
+            with patch('tzlocal.get_localzone_name') as mocked:
+                mocked.side_effect = None
+                self.client.execute('SELECT now()')
+
     @require_server_version(20, 1, 2)
     def test_datetime64_frac_trunc(self):
         with self.create_table('a DateTime64'):
@@ -204,18 +192,6 @@ class DateTimeTimezonesTestCase(BaseDateTimeTestCase):
         return pd.to_datetime(np.array([dt] * 2, dtype=dtype)) \
             .tz_localize(tz_name).to_numpy(dtype)
 
-    @contextmanager
-    def patch_env_tz(self, tz_name):
-        # Although in many cases, changing the TZ environment variable may
-        # affect the output of functions like localtime() without calling
-        # tzset(), this behavior should not be relied on.
-        # https://docs.python.org/3/library/time.html#time.tzset
-        with patch.dict(os.environ, {'TZ': tz_name}):
-            tzset()
-            yield
-
-        tzset()
-
     # Asia/Kamchatka = UTC+12
     # Asia/Novosibirsk = UTC+7
     # Europe/Moscow = UTC+3
@@ -257,7 +233,7 @@ class DateTimeTimezonesTestCase(BaseDateTimeTestCase):
         offset = timezone(server_tz_name).utcoffset(self.dt).total_seconds()
         timestamp = 1500010800 - int(offset)
 
-        with self.patch_env_tz('Asia/Novosibirsk'):
+        with patch_env_tz('Asia/Novosibirsk'):
             with self.create_table(self.table_columns()):
                 self.client.execute(
                     'INSERT INTO test (a) VALUES', [self.dt_arr], columnar=True
@@ -290,7 +266,7 @@ class DateTimeTimezonesTestCase(BaseDateTimeTestCase):
 
         settings = {'use_client_time_zone': True}
 
-        with self.patch_env_tz('Asia/Novosibirsk'):
+        with patch_env_tz('Asia/Novosibirsk'):
             with self.create_table(self.table_columns()):
                 self.client.execute(
                     'INSERT INTO test (a) VALUES', [self.dt_arr],
@@ -329,7 +305,7 @@ class DateTimeTimezonesTestCase(BaseDateTimeTestCase):
         server_tz_name = self.client.execute('SELECT timezone()')[0][0]
         offset = timezone(server_tz_name).utcoffset(self.dt)
 
-        with self.patch_env_tz('Asia/Novosibirsk'):
+        with patch_env_tz('Asia/Novosibirsk'):
             with self.create_table(self.table_columns()):
                 self.client.execute(
                     'INSERT INTO test (a) VALUES', [self.dt_tz], columnar=True
@@ -365,7 +341,7 @@ class DateTimeTimezonesTestCase(BaseDateTimeTestCase):
 
         settings = {'use_client_time_zone': True}
 
-        with self.patch_env_tz('Asia/Novosibirsk'):
+        with patch_env_tz('Asia/Novosibirsk'):
             with self.create_table(self.table_columns()):
                 self.client.execute(
                     'INSERT INTO test (a) VALUES', [self.dt_tz],
@@ -405,7 +381,7 @@ class DateTimeTimezonesTestCase(BaseDateTimeTestCase):
         # into column with timezone Asia/Novosibirsk
         # using server's timezone (Europe/Moscow)
 
-        with self.patch_env_tz('Europe/Moscow'):
+        with patch_env_tz('Europe/Moscow'):
             with self.create_table(self.table_columns(with_tz=True)):
                 self.client.execute(
                     'INSERT INTO test (a) VALUES', [self.dt_arr], columnar=True
@@ -441,7 +417,7 @@ class DateTimeTimezonesTestCase(BaseDateTimeTestCase):
 
         settings = {'use_client_time_zone': True}
 
-        with self.patch_env_tz('Europe/Moscow'):
+        with patch_env_tz('Europe/Moscow'):
             with self.create_table(self.table_columns(with_tz=True)):
                 self.client.execute(
                     'INSERT INTO test (a) VALUES', [self.dt_arr],
@@ -477,7 +453,7 @@ class DateTimeTimezonesTestCase(BaseDateTimeTestCase):
         # into column with timezone Asia/Novosibirsk
         # using server's timezone (Europe/Moscow)
 
-        with self.patch_env_tz('Europe/Moscow'):
+        with patch_env_tz('Europe/Moscow'):
             with self.create_table(self.table_columns(with_tz=True)):
                 self.client.execute(
                     'INSERT INTO test (a) VALUES', [self.dt_tz], columnar=True
@@ -516,7 +492,7 @@ class DateTimeTimezonesTestCase(BaseDateTimeTestCase):
 
         settings = {'use_client_time_zone': True}
 
-        with self.patch_env_tz('Europe/Moscow'):
+        with patch_env_tz('Europe/Moscow'):
             with self.create_table(self.table_columns(with_tz=True)):
                 self.client.execute(
                     'INSERT INTO test (a) VALUES', [self.dt_tz],
diff --git a/tests/numpy/columns/test_other.py b/tests/numpy/columns/test_other.py
index 540f325..ca8971b 100644
--- a/tests/numpy/columns/test_other.py
+++ b/tests/numpy/columns/test_other.py
@@ -1,11 +1,7 @@
-from clickhouse_driver import errors
-
-try:
-    from clickhouse_driver.columns.numpy.service import \
-        get_numpy_column_by_spec
-except ImportError:
-    get_numpy_column_by_spec = None
+from parameterized import parameterized
 
+from clickhouse_driver import errors
+from clickhouse_driver.columns.service import get_column_by_spec
 from clickhouse_driver.context import Context
 
 from tests.numpy.testcase import NumpyBaseTestCase
@@ -14,27 +10,20 @@ from tests.numpy.testcase import NumpyBaseTestCase
 class OtherColumnsTestCase(NumpyBaseTestCase):
     def get_column(self, spec):
         ctx = Context()
-        ctx.client_settings = {'strings_as_bytes': False}
-        return get_numpy_column_by_spec(spec, {'context': ctx})
-
-    def test_enum(self):
-        col = self.get_column("Enum8('hello' = 1, 'world' = 2)")
-        self.assertIsNotNone(col)
-
-    def test_decimal(self):
-        col = self.get_column('Decimal(8, 4)')
-        self.assertIsNotNone(col)
-
-    def test_array(self):
-        col = self.get_column('Array(String)')
-        self.assertIsNotNone(col)
-
-    def test_tuple(self):
-        col = self.get_column('Tuple(String)')
-        self.assertIsNotNone(col)
-
-    def test_simple_aggregation_function(self):
-        col = self.get_column('SimpleAggregateFunction(any, Int32)')
+        ctx.client_settings = {'strings_as_bytes': False, 'use_numpy': True}
+        return get_column_by_spec(spec, {'context': ctx})
+
+    @parameterized.expand([
+        ("Enum8('hello' = 1, 'world' = 2)", ),
+        ('Decimal(8, 4)', ),
+        ('Array(String)', ),
+        ('Tuple(String)', ),
+        ('SimpleAggregateFunction(any, Int32)', ),
+        ('Map(String, String)', ),
+        ('Array(LowCardinality(String))', )
+    ])
+    def test_generic_type(self, spec):
+        col = self.get_column(spec)
         self.assertIsNotNone(col)
 
     def test_get_unknown_column(self):
diff --git a/tests/numpy/test_external_tables.py b/tests/numpy/test_external_tables.py
new file mode 100644
index 0000000..b0affd0
--- /dev/null
+++ b/tests/numpy/test_external_tables.py
@@ -0,0 +1,50 @@
+try:
+    import numpy as np
+    import pandas as pd
+except ImportError:
+    np = None
+    pd = None
+
+from tests.numpy.testcase import NumpyBaseTestCase
+
+
+class ExternalTablesTestCase(NumpyBaseTestCase):
+    def test_select(self):
+        tables = [{
+            'name': 'test',
+            'structure': [('x', 'Int32'), ('y', 'String')],
+            'data': pd.DataFrame({
+                'x': [100, 500],
+                'y': ['abc', 'def']
+            })
+        }]
+        rv = self.client.execute(
+            'SELECT * FROM test', external_tables=tables, columnar=True
+        )
+        self.assertArraysListEqual(
+            rv, [np.array([100, 500]), np.array(['abc', 'def'])]
+        )
+
+    def test_send_empty_table(self):
+        tables = [{
+            'name': 'test',
+            'structure': [('x', 'Int32')],
+            'data': pd.DataFrame({'x': []})
+        }]
+        rv = self.client.execute(
+            'SELECT * FROM test', external_tables=tables, columnar=True
+        )
+        self.assertArraysListEqual(rv, [])
+
+    def test_send_empty_table_structure(self):
+        tables = [{
+            'name': 'test',
+            'structure': [],
+            'data': pd.DataFrame()
+        }]
+        with self.assertRaises(ValueError) as e:
+            self.client.execute(
+                'SELECT * FROM test', external_tables=tables, columnar=True
+            )
+
+        self.assertIn('Empty table "test" structure', str(e.exception))
diff --git a/tests/numpy/testcase.py b/tests/numpy/testcase.py
index e8723c4..2231670 100644
--- a/tests/numpy/testcase.py
+++ b/tests/numpy/testcase.py
@@ -16,3 +16,8 @@ class NumpyBaseTestCase(BaseTestCase):
 
     def assertArraysEqual(self, first, second):
         return self.assertTrue((first == second).all())
+
+    def assertArraysListEqual(self, first, second):
+        self.assertEqual(len(first), len(second))
+        for x, y in zip(first, second):
+            self.assertTrue((x == y).all())
diff --git a/tests/test_buffered_reader.py b/tests/test_buffered_reader.py
index 2c3b01b..a464f88 100644
--- a/tests/test_buffered_reader.py
+++ b/tests/test_buffered_reader.py
@@ -6,7 +6,7 @@ from clickhouse_driver.bufferedreader import BufferedSocketReader
 
 class BufferedReaderTestCase(TestCase):
     def test_overflow_signed_int_string_size(self):
-        data = b'\xFF\xFE\xFC\xFE\x29\x80\x40\x00\x00\x01'
+        data = b'\xFF\xFE\xFC\xFE\xFE\xFE\xFE\xFE\x29\x80\x40\x00\x00\x01'
 
         def recv_into(buf):
             size = len(data)
diff --git a/tests/test_client.py b/tests/test_client.py
index 0b8ae36..ff1fed3 100644
--- a/tests/test_client.py
+++ b/tests/test_client.py
@@ -255,3 +255,14 @@ class ClientFromUrlTestCase(TestCase):
             c.connection.context.client_settings['opentelemetry_tracestate'],
             'state'
         )
+
+    def test_quota_key(self):
+        c = Client.from_url('clickhouse://host?quota_key=myquota')
+        self.assertEqual(
+            c.connection.context.client_settings['quota_key'], 'myquota'
+        )
+
+        c = Client.from_url('clickhouse://host')
+        self.assertEqual(
+            c.connection.context.client_settings['quota_key'], ''
+        )
diff --git a/tests/test_compression.py b/tests/test_compression.py
index dd5a0a0..77ad830 100644
--- a/tests/test_compression.py
+++ b/tests/test_compression.py
@@ -13,9 +13,18 @@ class BaseCompressionTestCase(BaseTestCase):
     supported_compressions = file_config.get('db', 'compression').split(',')
 
     def _create_client(self):
+        settings = None
+        if self.compression:
+            # Set server compression method explicitly
+            # By default server sends blocks compressed by LZ4.
+            method = self.compression
+            if self.server_version > (19, ):
+                method = method.upper()
+            settings = {'network_compression_method': method}
+
         return Client(
             self.host, self.port, self.database, self.user, self.password,
-            compression=self.compression
+            compression=self.compression, settings=settings
         )
 
     def setUp(self):
diff --git a/tests/test_connect.py b/tests/test_connect.py
index 62c9c96..3ef788b 100644
--- a/tests/test_connect.py
+++ b/tests/test_connect.py
@@ -222,6 +222,24 @@ class ConnectTestCase(BaseTestCase):
             )
             self.assertEqual(str(e.exception), msg)
 
+    def test_partially_consumed_query(self):
+        self.client.execute_iter('SELECT 1')
+
+        error = errors.PartiallyConsumedQueryError
+        with self.assertRaises(error) as e:
+            self.client.execute_iter('SELECT 1')
+
+        self.assertEqual(
+            str(e.exception),
+            'Simultaneous queries on single connection detected'
+        )
+        rv = self.client.execute('SELECT 1')
+        self.assertEqual(rv, [(1, )])
+
+    def test_read_all_packets_on_execute_iter(self):
+        list(self.client.execute_iter('SELECT 1'))
+        list(self.client.execute_iter('SELECT 1'))
+
 
 class FakeBufferedReader(BufferedReader):
     def __init__(self, inputs, bufsize=128):
diff --git a/tests/test_dbapi.py b/tests/test_dbapi.py
index 85fd2bb..9a3e663 100644
--- a/tests/test_dbapi.py
+++ b/tests/test_dbapi.py
@@ -151,6 +151,11 @@ class DBAPITestCase(DBAPITestCaseBase):
             )
             self.assertEqual(cursor.rowcount, -1)
 
+    def test_execute_insert(self):
+        with self.created_cursor() as cursor, self.create_table('a UInt8'):
+            cursor.execute('INSERT INTO test VALUES', [[4]])
+            self.assertEqual(cursor.rowcount, 1)
+
     def test_description(self):
         with self.created_cursor() as cursor:
             self.assertIsNone(cursor.description)
diff --git a/tests/test_substitution.py b/tests/test_substitution.py
index 04bb448..2268bb4 100644
--- a/tests/test_substitution.py
+++ b/tests/test_substitution.py
@@ -3,11 +3,14 @@ from __future__ import unicode_literals
 
 from datetime import date, datetime
 from decimal import Decimal
+from unittest.mock import Mock
 from uuid import UUID
 
 from enum import IntEnum, Enum
+from pytz import timezone
 
 from tests.testcase import BaseTestCase
+from tests.util import patch_env_tz
 
 
 class ParametersSubstitutionTestCase(BaseTestCase):
@@ -15,7 +18,9 @@ class ParametersSubstitutionTestCase(BaseTestCase):
     double_tpl = 'SELECT %(x)s, %(y)s'
 
     def assert_subst(self, tpl, params, sql):
-        self.assertEqual(self.client.substitute_params(tpl, params), sql)
+        ctx = Mock()
+        ctx.server_info.timezone = 'Europe/Moscow'
+        self.assertEqual(self.client.substitute_params(tpl, params, ctx), sql)
 
     def test_int(self):
         params = {'x': 123}
@@ -65,6 +70,56 @@ class ParametersSubstitutionTestCase(BaseTestCase):
         rv = self.client.execute(tpl, params)
         self.assertEqual(rv, [(dt, )])
 
+    def test_datetime_with_timezone(self):
+        dt = datetime(2017, 7, 14, 5, 40, 0)
+        params = {'x': timezone('Asia/Kamchatka').localize(dt)}
+
+        self.assert_subst(self.single_tpl, params,
+                          "SELECT '2017-07-13 20:40:00'")
+
+        tpl = (
+            'SELECT toDateTime(toInt32(toDateTime(%(x)s))), '
+            'toInt32(toDateTime(%(x)s))'
+        )
+
+        with patch_env_tz('Asia/Novosibirsk'):
+            # use server timezone
+            rv = self.client.execute(
+                tpl, params, settings={'use_client_time_zone': False}
+            )
+
+            self.assertEqual(
+                rv, [(datetime(2017, 7, 13, 20, 40, 0), 1499967600)]
+            )
+
+            query = (
+                "SELECT "
+                "toDateTime(toInt32(toDateTime('{0}', 'Asia/Kamchatka'))), "
+                "toInt32(toDateTime('{0}', 'Asia/Kamchatka'))"
+            ).format('2017-07-14 05:40:00')
+
+            rv = self.emit_cli(query, use_client_time_zone=0)
+
+            self.assertEqual(rv, '2017-07-13 20:40:00\t1499967600\n')
+
+            # use client timezone
+            rv = self.client.execute(
+                tpl, params, settings={'use_client_time_zone': True}
+            )
+
+            self.assertEqual(
+                rv, [(datetime(2017, 7, 14, 0, 40, 0), 1499967600)]
+            )
+
+            query = (
+                "SELECT "
+                "toDateTime(toInt32(toDateTime('{0}', 'Asia/Kamchatka'))), "
+                "toInt32(toDateTime('{0}', 'Asia/Kamchatka'))"
+            ).format('2017-07-14 05:40:00')
+
+            rv = self.emit_cli(query, use_client_time_zone=1)
+            self.assertEqual(rv, '2017-07-14 00:40:00\t1499967600\n')
+
     def test_string(self):
         params = {'x': 'test\t\n\x16', 'y': 'ั‚ะตัั‚\t\n\x16'}
 
@@ -172,7 +227,7 @@ class ParametersSubstitutionTestCase(BaseTestCase):
         params = object()
 
         with self.assertRaises(ValueError) as e:
-            self.client.substitute_params(self.single_tpl, params)
+            self.client.substitute_params(self.single_tpl, params, Mock())
 
         self.assertEqual(e.exception.args[0],
                          'Parameters are expected in dict form')
diff --git a/tests/util.py b/tests/util.py
index 1b2ce13..dfc756a 100644
--- a/tests/util.py
+++ b/tests/util.py
@@ -1,6 +1,12 @@
+import os
+from contextlib import contextmanager
 from functools import wraps
 import logging
 from io import StringIO
+from time import tzset
+from unittest.mock import patch
+
+import tzlocal
 
 
 def skip_by_server_version(testcase, version_required):
@@ -52,3 +58,32 @@ class LoggingCapturer(object):
 
 
 capture_logging = LoggingCapturer
+
+
+def bust_tzlocal_cache():
+    try:
+        tzlocal.unix._cache_tz = None
+        tzlocal.unix._cache_tz_name = None
+    except AttributeError:
+        pass
+
+    try:
+        tzlocal.win32._cache_tz = None
+        tzlocal.unix._cache_tz_name = None
+    except AttributeError:
+        pass
+
+
+@contextmanager
+def patch_env_tz(tz_name):
+    bust_tzlocal_cache()
+
+    # Although in many cases, changing the TZ environment variable may
+    # affect the output of functions like localtime() without calling
+    # tzset(), this behavior should not be relied on.
+    # https://docs.python.org/3/library/time.html#time.tzset
+    with patch.dict(os.environ, {'TZ': tz_name}):
+        tzset()
+        yield
+
+    tzset()
diff --git a/testsrequire.py b/testsrequire.py
new file mode 100644
index 0000000..06c98cc
--- /dev/null
+++ b/testsrequire.py
@@ -0,0 +1,27 @@
+import os
+import sys
+
+USE_NUMPY = bool(int(os.getenv('USE_NUMPY', '0')))
+
+tests_require = [
+    'pytest',
+    'parameterized',
+    'freezegun',
+    'zstd',
+    'clickhouse-cityhash>=1.0.2.1'
+]
+
+if sys.implementation.name == 'pypy':
+    tests_require.append('lz4<=3.0.1')
+else:
+    tests_require.append('lz4')
+
+if USE_NUMPY:
+    tests_require.extend(['numpy', 'pandas'])
+
+try:
+    from pip import main as pipmain
+except ImportError:
+    from pip._internal import main as pipmain
+
+pipmain(['install'] + tests_require)
diff --git a/valgrind.supp b/valgrind.supp
new file mode 100644
index 0000000..d8f3e73
--- /dev/null
+++ b/valgrind.supp
@@ -0,0 +1,6 @@
+{
+   <PyUnicode_Decode>
+   # See https://bugs.python.org/issue42176
+   Memcheck:Cond
+   fun:PyUnicode_Decode
+}