New Upstream Release - python-sh

Ready changes

Summary

Merged new upstream version: 2.0.4 (was: 1.14.3).

Diff

diff --git a/.coveragerc b/.coveragerc
index 6d889f5..e5d67e6 100644
--- a/.coveragerc
+++ b/.coveragerc
@@ -1,7 +1,7 @@
 [run]
 branch = True
-source =
-    sh.py
+source = sh
+relative_files = True
 
 [report]
 exclude_lines =
diff --git a/.flake8 b/.flake8
new file mode 100644
index 0000000..e0ea542
--- /dev/null
+++ b/.flake8
@@ -0,0 +1,3 @@
+[flake8]
+max-line-length = 88
+extend-ignore = E203
\ No newline at end of file
diff --git a/.github/FUNDING.yml b/.github/FUNDING.yml
index 6ad7089..d9f74c8 100644
--- a/.github/FUNDING.yml
+++ b/.github/FUNDING.yml
@@ -1,6 +1,6 @@
 # These are supported funding model platforms
 
-github: [amoffat]
+github: [ecederstrand, amoffat]
 patreon: # Replace with a single Patreon username
 open_collective: # Replace with a single Open Collective username
 ko_fi: # Replace with a single Ko-fi username
diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml
new file mode 100644
index 0000000..3c850f7
--- /dev/null
+++ b/.github/workflows/main.yml
@@ -0,0 +1,167 @@
+# This workflow will install Python dependencies, run tests and converage with a variety of Python versions
+# For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions
+
+name: Run tests
+
+on:
+  pull_request:
+  push:
+    branches:
+      - master
+
+jobs:
+  lint:
+    name: Lint
+    runs-on: ubuntu-latest
+    steps:
+      - uses: actions/checkout@v2
+
+      - uses: actions/cache@v2
+        name: Cache pip directory
+        with:
+          path: ~/.cache/pip
+          key: ${{ runner.os }}-pip-3.9
+
+      - uses: actions/cache@v2
+        name: Cache poetry deps
+        with:
+          path: .venv
+          key: ${{ runner.os }}-build-${{ hashFiles('poetry.lock') }}-3.9
+
+      - name: Set up Python
+        uses: actions/setup-python@v2
+        with:
+          python-version: 3.9
+
+      - name: Install poetry
+        run: |
+          pip install poetry
+
+      - name: Install dependencies
+        run: |
+          poetry config virtualenvs.in-project true
+          poetry install
+
+      - name: Lint
+        run: |
+          poetry run python -m flake8 sh.py tests/*.py
+          poetry run black --check --diff sh.py tests/*.py
+          poetry run rstcheck README.rst
+          poetry run mypy sh.py
+
+  test:
+    name: Run tests
+    runs-on: ${{ matrix.os }}
+    strategy:
+      matrix:
+        os: [ubuntu-latest]
+        python-version: ["3.8", "3.9", "3.10", "3.11"]
+        use-select: [0, 1]
+        lang: [C, en_US.UTF-8]
+
+    steps:
+      - uses: actions/checkout@v2
+
+      - uses: actions/cache@v2
+        name: Cache pip directory
+        with:
+          path: ~/.cache/pip
+          key: ${{ runner.os }}-pip-3.9
+
+      - uses: actions/cache@v2
+        name: Cache poetry deps
+        env:
+          cache-name: poetry-deps
+        with:
+          path: .venv
+          key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('poetry.lock') }}-${{ matrix.python-version }}
+
+      - name: Set up Python ${{ matrix.python-version }}
+        uses: actions/setup-python@v2
+        with:
+          python-version: ${{ matrix.python-version }}
+
+      - name: Install poetry
+        run: |
+          pip install poetry
+
+      - name: Install dependencies
+        run: |
+          poetry config virtualenvs.in-project true
+          poetry install
+
+      - name: Run tests
+        run: |
+          SH_TESTS_RUNNING=1 SH_TESTS_USE_SELECT=${{ matrix.use-select }} LANG=${{ matrix.lang }} poetry run coverage run -a -m unittest
+
+      - name: Store coverage
+        uses: actions/upload-artifact@v2
+        with:
+          name: coverage.${{ matrix.use-select }}.${{ matrix.lang }}.${{ matrix.python-version }}
+          path: .coverage
+
+  report:
+    name: Report Coverage
+    needs: test
+    runs-on: ubuntu-latest
+    steps:
+      # required because coveralls complains if we're not in a git dir
+      - uses: actions/checkout@v2
+
+      - name: Set up Python
+        uses: actions/setup-python@v2
+        with:
+          python-version: 3.9
+
+      - name: Install dependencies
+        run: |
+          pip install coverage coveralls
+
+      - name: Download coverage artifacts
+        uses: actions/download-artifact@v2
+        with:
+          path: coverage-artifacts
+
+      - name: Combine coverage
+        run: |
+          find coverage-artifacts -name .coverage | xargs coverage combine -a
+
+      - name: Report coverage
+        env:
+          GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+        run: |
+          coverage report
+          coveralls --service=github
+
+  deploy:
+    name: Deploy
+    needs: test
+    runs-on: ubuntu-latest
+    if: github.ref_name == 'master'
+    steps:
+      - uses: actions/checkout@v2
+
+      - name: Get current version
+        id: get_version
+        run: echo "::set-output name=version::$(sed -n 's/^version = "\(.*\)"/\1/p' pyproject.toml)"
+
+      - name: Tag commit
+        run: |
+          git tag "${{steps.get_version.outputs.version}}" "${{github.ref_name}}"
+          git push -f origin "${{steps.get_version.outputs.version}}"
+
+      - name: Set up Python
+        uses: actions/setup-python@v2
+        with:
+          python-version: 3.9
+
+      - name: Install dependencies
+        run: pip install build
+
+      - name: Build
+        run: python -m build
+
+      - name: Publish
+        uses: pypa/gh-action-pypi-publish@release/v1
+        with:
+          password: ${{ secrets.PYPI_API_TOKEN }}
diff --git a/.gitignore b/.gitignore
index 1deb479..97725d3 100644
--- a/.gitignore
+++ b/.gitignore
@@ -2,3 +2,7 @@ __pycache__/
 *.py[co]
 .tox
 .coverage
+/.cache/
+/.venv/
+/build
+/dist
diff --git a/.python-version b/.python-version
new file mode 100644
index 0000000..9f3d4c1
--- /dev/null
+++ b/.python-version
@@ -0,0 +1 @@
+3.9.16
diff --git a/.travis.yml b/.travis.yml
deleted file mode 100644
index 57628af..0000000
--- a/.travis.yml
+++ /dev/null
@@ -1,27 +0,0 @@
-os:
-    - linux
-
-language: python
-
-python:
-    # - 2.6 No longer supported on Travis
-    - 2.7
-    # - 3.3 No longer supported on Travis
-    - 3.4
-    - 3.5
-    - 3.6
-    - 3.7
-    - 3.8
-    - pypy
-    - pypy3.5
-
-before_script:
-    - pip install -r requirements-dev.txt
-
-script:
-    - python sh.py travis
-    - python -m flake8 sh.py test.py
-    - if python -c 'import sys; sys.exit(int(not sys.version_info >= (3, 5)))' ; then python setup.py check --restructuredtext --metadata --strict ; fi
-
-after_success:
-    - coveralls
diff --git a/CHANGELOG.md b/CHANGELOG.md
index ab69be2..6685684 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,209 +1,241 @@
 # Changelog
+
+## 2.0.4 - 5/13/22
+
+- Allow `ok_code` to be used with `fg` [#655](https://github.com/amoffat/sh/pull/655)
+- Make sure `new_group` never creates a new session [#675](https://github.com/amoffat/sh/pull/675)
+
+## 2.0.2 / 2.0.3 (misversioned) - 2/13/22
+
+- Performance regression when using a generator with `_in` [#650](https://github.com/amoffat/sh/pull/650)
+- Adding test support for python 3.11
+
+## 2.0.0 - 2/9/22
+
+- Executed commands now return a unicode string by default
+- Removed magical module-like execution contexts [#636](https://github.com/amoffat/sh/issues/636)
+- Added basic asyncio support via `_async`
+- Dropped support for Python < 3.8
+- Bumped default tty size to more standard (24, 80)
+- First argument being a RunningCommand no longer automatically passes it as stdin
+- `RunningCommand.__eq__` no longer has the side effect of executing the command [#518](https://github.com/amoffat/sh/pull/531)
+- `_tee` now supports both "err" and "out" [#215](https://github.com/amoffat/sh/issues/215)
+- Removed the builtin override `cd` [link](https://github.com/amoffat/sh/pull/584#discussion_r698055681)
+- Altered process launching model to behave more expectedly [#495](https://github.com/amoffat/sh/issues/495)
+- Bugfix where `_no_out` isn't allowed with `_iter="err"` [#638](https://github.com/amoffat/sh/issues/638)
+- Allow keyword arguments to have a list of values [#529](https://github.com/amoffat/sh/issues/529)
+
+## 1.14.3 - 7/17/22
+
+- Bugfix where `Command` was not aware of default call args when wrapping the module [#559](https://github.com/amoffat/sh/pull/573)
+
 ## 1.14.1 - 10/24/20
-*   bugfix where setting `_ok_code` to not include 0, but 0 was the exit code [#545](https://github.com/amoffat/sh/pull/545)
+
+- bugfix where setting `_ok_code` to not include 0, but 0 was the exit code [#545](https://github.com/amoffat/sh/pull/545)
 
 ## 1.14.0 - 8/28/20
-*   `_env` now more lenient in accepting dictionary-like objects [#527](https://github.com/amoffat/sh/issues/527)
-*   `None` and `False` arguments now do not pass through to underlying command [#525](https://github.com/amoffat/sh/pull/525)
-*   Implemented `find_spec` on the fancy importer, which fixes some Python3.4+ issues [#536](https://github.com/amoffat/sh/pull/536)
+
+- `_env` now more lenient in accepting dictionary-like objects [#527](https://github.com/amoffat/sh/issues/527)
+- `None` and `False` arguments now do not pass through to underlying command [#525](https://github.com/amoffat/sh/pull/525)
+- Implemented `find_spec` on the fancy importer, which fixes some Python3.4+ issues [#536](https://github.com/amoffat/sh/pull/536)
 
 ## 1.13.1 - 4/28/20
-*   regression fix if `_fg=False` [#520](https://github.com/amoffat/sh/issues/520)
+
+- regression fix if `_fg=False` [#520](https://github.com/amoffat/sh/issues/520)
 
 ## 1.13.0 - 4/27/20
-*   minor Travis CI fixes [#492](https://github.com/amoffat/sh/pull/492)
-*   bugfix for boolean long options not respecting `_long_prefix` [#488](https://github.com/amoffat/sh/pull/488)
-*   fix deprecation warning on Python 3.6 regexes [#482](https://github.com/amoffat/sh/pull/482)
-*   `_pass_fds` and `_close_fds` special kwargs for controlling file descriptor inheritance in child.
-*   more efficiently closing inherited fds [#406](https://github.com/amoffat/sh/issues/406)
-*   bugfix where passing invalid dictionary to `_env` will cause a mysterious child 255 exit code. [#497](https://github.com/amoffat/sh/pull/497)
-*   bugfix where `_in` using 0 or `sys.stdin` wasn't behaving like a TTY, if it was in fact a TTY. [#514](https://github.com/amoffat/sh/issues/514)
-*   bugfix where `help(sh)` raised an exception [#455](https://github.com/amoffat/sh/issues/455)
-*   bugfix fixing broken interactive ssh tutorial from docs
-*   change to automatic tty merging into a single pty if `_tty_in=True` and `_tty_out=True`
-*   introducing `_unify_ttys`, default False, which allows explicit tty merging into single pty
-*   contrib command for `ssh` connections requiring passwords
-*   performance fix for polling output too fast when using `_iter` [#462](https://github.com/amoffat/sh/issues/462)
-*   execution contexts can now be used in python shell [#466](https://github.com/amoffat/sh/pull/466)
-*   bugfix `ErrorReturnCode` instances can now be pickled
-*   bugfix passing empty string or `None` for `_in` hanged [#427](https://github.com/amoffat/sh/pull/427)
-*   bugfix where passing a filename or file-like object to `_out` wasn't using os.dup2 [#449](https://github.com/amoffat/sh/issues/449)
-*   regression make `_fg` work with `_cwd` again [#330](https://github.com/amoffat/sh/issues/330)
-*   an invalid `_cwd` now raises a `ForkException` not an `OSError`.
-*   AIX support [#477](https://github.com/amoffat/sh/issues/477)
-*   added a `timeout=None` param to `RunningCommand.wait()` [#515](https://github.com/amoffat/sh/issues/515)
+
+- minor Travis CI fixes [#492](https://github.com/amoffat/sh/pull/492)
+- bugfix for boolean long options not respecting `_long_prefix` [#488](https://github.com/amoffat/sh/pull/488)
+- fix deprecation warning on Python 3.6 regexes [#482](https://github.com/amoffat/sh/pull/482)
+- `_pass_fds` and `_close_fds` special kwargs for controlling file descriptor inheritance in child.
+- more efficiently closing inherited fds [#406](https://github.com/amoffat/sh/issues/406)
+- bugfix where passing invalid dictionary to `_env` will cause a mysterious child 255 exit code. [#497](https://github.com/amoffat/sh/pull/497)
+- bugfix where `_in` using 0 or `sys.stdin` wasn't behaving like a TTY, if it was in fact a TTY. [#514](https://github.com/amoffat/sh/issues/514)
+- bugfix where `help(sh)` raised an exception [#455](https://github.com/amoffat/sh/issues/455)
+- bugfix fixing broken interactive ssh tutorial from docs
+- change to automatic tty merging into a single pty if `_tty_in=True` and `_tty_out=True`
+- introducing `_unify_ttys`, default False, which allows explicit tty merging into single pty
+- contrib command for `ssh` connections requiring passwords
+- performance fix for polling output too fast when using `_iter` [#462](https://github.com/amoffat/sh/issues/462)
+- execution contexts can now be used in python shell [#466](https://github.com/amoffat/sh/pull/466)
+- bugfix `ErrorReturnCode` instances can now be pickled
+- bugfix passing empty string or `None` for `_in` hanged [#427](https://github.com/amoffat/sh/pull/427)
+- bugfix where passing a filename or file-like object to `_out` wasn't using os.dup2 [#449](https://github.com/amoffat/sh/issues/449)
+- regression make `_fg` work with `_cwd` again [#330](https://github.com/amoffat/sh/issues/330)
+- an invalid `_cwd` now raises a `ForkException` not an `OSError`.
+- AIX support [#477](https://github.com/amoffat/sh/issues/477)
+- added a `timeout=None` param to `RunningCommand.wait()` [#515](https://github.com/amoffat/sh/issues/515)
 
 ## 1.12.14 - 6/6/17
-*   bugfix for poor sleep performance [#378](https://github.com/amoffat/sh/issues/378)
-*   allow passing raw integer file descriptors for `_out` and `_err` handlers
-*   bugfix for when `_tee` and `_out` are used, and the `_out` is a tty or pipe [#384](https://github.com/amoffat/sh/issues/384)
-*   bugfix where python 3.3+ detected different arg counts for bound method output callbacks [#380](https://github.com/amoffat/sh/issues/380)
+
+- bugfix for poor sleep performance [#378](https://github.com/amoffat/sh/issues/378)
+- allow passing raw integer file descriptors for `_out` and `_err` handlers
+- bugfix for when `_tee` and `_out` are used, and the `_out` is a tty or pipe [#384](https://github.com/amoffat/sh/issues/384)
+- bugfix where python 3.3+ detected different arg counts for bound method output callbacks [#380](https://github.com/amoffat/sh/issues/380)
 
 ## 1.12.12, 1.12.13 - 3/30/17
-*   pypi readme doc bugfix [PR#377](https://github.com/amoffat/sh/pull/377)
+
+- pypi readme doc bugfix [PR#377](https://github.com/amoffat/sh/pull/377)
 
 ## 1.12.11 - 3/13/17
 
-*   bugfix for relative paths to `sh.Command` not expanding to absolute paths [#372](https://github.com/amoffat/sh/issues/372)
-*   updated for python 3.6
-*   bugfix for SIGPIPE not being handled correctly on pipelined processes [#373](https://github.com/amoffat/sh/issues/373)
+- bugfix for relative paths to `sh.Command` not expanding to absolute paths [#372](https://github.com/amoffat/sh/issues/372)
+- updated for python 3.6
+- bugfix for SIGPIPE not being handled correctly on pipelined processes [#373](https://github.com/amoffat/sh/issues/373)
 
 ## 1.12.10 - 3/02/17
 
-*   bugfix for file descriptors over 1024 [#356](https://github.com/amoffat/sh/issues/356)
-*   bugfix when `_err_to_out` is True and `_out` is pipe or tty [#365](https://github.com/amoffat/sh/issues/365)
+- bugfix for file descriptors over 1024 [#356](https://github.com/amoffat/sh/issues/356)
+- bugfix when `_err_to_out` is True and `_out` is pipe or tty [#365](https://github.com/amoffat/sh/issues/365)
 
 ## 1.12.9 - 1/04/17
 
-*   added `_bg_exc` for silencing exceptions in background threads [#350](https://github.com/amoffat/sh/pull/350)
+- added `_bg_exc` for silencing exceptions in background threads [#350](https://github.com/amoffat/sh/pull/350)
 
 ## 1.12.8 - 12/16/16
 
-*   bugfix for patched glob.glob on python3.5 [#341](https://github.com/amoffat/sh/issues/341)
+- bugfix for patched glob.glob on python3.5 [#341](https://github.com/amoffat/sh/issues/341)
 
 ## 1.12.7 - 12/07/16
 
-*   added `_out` and `_out_bufsize` validator [#346](https://github.com/amoffat/sh/issues/346)
-*   bugfix for internal stdout thread running when it shouldn't [#346](https://github.com/amoffat/sh/issues/346)
+- added `_out` and `_out_bufsize` validator [#346](https://github.com/amoffat/sh/issues/346)
+- bugfix for internal stdout thread running when it shouldn't [#346](https://github.com/amoffat/sh/issues/346)
 
 ## 1.12.6 - 12/02/16
 
-*   regression bugfix on timeout [#344](https://github.com/amoffat/sh/issues/344)
-*   regression bugfix on `_ok_code=None`
+- regression bugfix on timeout [#344](https://github.com/amoffat/sh/issues/344)
+- regression bugfix on `_ok_code=None`
 
 ## 1.12.5 - 12/01/16
 
-*   further improvements on cpu usage
+- further improvements on cpu usage
 
 ## 1.12.4 - 11/30/16
 
-*   regression in cpu usage [#339](https://github.com/amoffat/sh/issues/339)
+- regression in cpu usage [#339](https://github.com/amoffat/sh/issues/339)
 
 ## 1.12.3 - 11/29/16
 
-*   fd leak regression and fix for flawed fd leak detection test [#337](https://github.com/amoffat/sh/pull/337)
+- fd leak regression and fix for flawed fd leak detection test [#337](https://github.com/amoffat/sh/pull/337)
 
 ## 1.12.2 - 11/28/16
 
-*   support for `io.StringIO` in python2
+- support for `io.StringIO` in python2
 
 ## 1.12.1 - 11/28/16
 
-*   added support for using raw file descriptors for `_in`, `_out`, and `_err`
-*   removed `.close()`ing `_out` handler if FIFO detected
+- added support for using raw file descriptors for `_in`, `_out`, and `_err`
+- removed `.close()`ing `_out` handler if FIFO detected
 
 ## 1.12.0 - 11/21/16
 
-*   composed commands no longer propagate `_bg`
-*   better support for using `sys.stdin` and `sys.stdout` for `_in` and `_out`
-*   bugfix where `which()` would not stop searching at the first valid executable found in PATH
-*   added `_long_prefix` for programs whose long arguments start with something other than `--` [#278](https://github.com/amoffat/sh/pull/278)
-*   added `_log_msg` for advanced configuration of log message [#311](https://github.com/amoffat/sh/pull/311)
-*   added `sh.contrib.sudo`
-*   added `_arg_preprocess` for advanced command wrapping
-*   alter callable `_in` arguments to signify completion with falsy chunk
-*   bugfix where pipes passed into `_out` or `_err` were not flushed on process end [#252](https://github.com/amoffat/sh/pull/252)
-*   deprecated `with sh.args(**kwargs)` in favor of `sh2 = sh(**kwargs)`
-*   made `sh.pushd` thread safe
-*   added `.kill_group()` and `.signal_group()` methods for better process control [#237](https://github.com/amoffat/sh/pull/237)
-*   added `new_session` special keyword argument for controlling spawned process session [#266](https://github.com/amoffat/sh/issues/266)
-*   bugfix better handling for EINTR on system calls [#292](https://github.com/amoffat/sh/pull/292)
-*   bugfix where with-contexts were not threadsafe [#247](https://github.com/amoffat/sh/issues/195)
-*   `_uid` new special keyword param for specifying the user id of the process [#133](https://github.com/amoffat/sh/issues/133)
-*   bugfix where exceptions were swallowed by processes that weren't waited on [#309](https://github.com/amoffat/sh/issues/309)
-*   bugfix where processes that dupd their stdout/stderr to a long running child process would cause sh to hang [#310](https://github.com/amoffat/sh/issues/310)
-*   improved logging output [#323](https://github.com/amoffat/sh/issues/323)
-*   bugfix for python3+ where binary data was passed into a process's stdin [#325](https://github.com/amoffat/sh/issues/325)
-*   Introduced execution contexts which allow baking of common special keyword arguments into all commands [#269](https://github.com/amoffat/sh/issues/269)
-*   `Command` and `which` now can take an optional `paths` parameter which specifies the search paths [#226](https://github.com/amoffat/sh/issues/226)
-*   `_preexec_fn` option for executing a function after the child process forks but before it execs [#260](https://github.com/amoffat/sh/issues/260)
-*   `_fg` reintroduced, with limited functionality.  hurrah! [#92](https://github.com/amoffat/sh/issues/92)
-*   bugfix where a command would block if passed a fd for stdin that wasn't yet ready to read [#253](https://github.com/amoffat/sh/issues/253)
-*   `_long_sep` can now take `None` which splits the long form arguments into individual arguments [#258](https://github.com/amoffat/sh/issues/258)
-*   making `_piped` perform "direct" piping by default (linking fds together).  this fixes memory problems [#270](https://github.com/amoffat/sh/issues/270)
-*   bugfix where calling `next()` on an iterable process that has raised `StopIteration`, hangs [#273](https://github.com/amoffat/sh/issues/273)
-*   `sh.cd` called with no arguments no changes into the user's home directory, like native `cd` [#275](https://github.com/amoffat/sh/issues/275)
-*   `sh.glob` removed entirely.  the rationale is correctness over hand-holding. [#279](https://github.com/amoffat/sh/issues/279)
-*   added `_truncate_exc`, defaulting to `True`, which tells our exceptions to truncate output.
-*   bugfix for exceptions whose messages contained unicode
-*   `_done` callback no longer assumes you want your command put in the background.
-*   `_done` callback is now called asynchronously in a separate thread.
-*   `_done` callback is called regardless of exception, which is necessary in order to release held resources, for example a process pool
+- composed commands no longer propagate `_bg`
+- better support for using `sys.stdin` and `sys.stdout` for `_in` and `_out`
+- bugfix where `which()` would not stop searching at the first valid executable found in PATH
+- added `_long_prefix` for programs whose long arguments start with something other than `--` [#278](https://github.com/amoffat/sh/pull/278)
+- added `_log_msg` for advanced configuration of log message [#311](https://github.com/amoffat/sh/pull/311)
+- added `sh.contrib.sudo`
+- added `_arg_preprocess` for advanced command wrapping
+- alter callable `_in` arguments to signify completion with falsy chunk
+- bugfix where pipes passed into `_out` or `_err` were not flushed on process end [#252](https://github.com/amoffat/sh/pull/252)
+- deprecated `with sh.args(**kwargs)` in favor of `sh2 = sh(**kwargs)`
+- made `sh.pushd` thread safe
+- added `.kill_group()` and `.signal_group()` methods for better process control [#237](https://github.com/amoffat/sh/pull/237)
+- added `new_session` special keyword argument for controlling spawned process session [#266](https://github.com/amoffat/sh/issues/266)
+- bugfix better handling for EINTR on system calls [#292](https://github.com/amoffat/sh/pull/292)
+- bugfix where with-contexts were not threadsafe [#247](https://github.com/amoffat/sh/issues/195)
+- `_uid` new special keyword param for specifying the user id of the process [#133](https://github.com/amoffat/sh/issues/133)
+- bugfix where exceptions were swallowed by processes that weren't waited on [#309](https://github.com/amoffat/sh/issues/309)
+- bugfix where processes that dupd their stdout/stderr to a long running child process would cause sh to hang [#310](https://github.com/amoffat/sh/issues/310)
+- improved logging output [#323](https://github.com/amoffat/sh/issues/323)
+- bugfix for python3+ where binary data was passed into a process's stdin [#325](https://github.com/amoffat/sh/issues/325)
+- Introduced execution contexts which allow baking of common special keyword arguments into all commands [#269](https://github.com/amoffat/sh/issues/269)
+- `Command` and `which` now can take an optional `paths` parameter which specifies the search paths [#226](https://github.com/amoffat/sh/issues/226)
+- `_preexec_fn` option for executing a function after the child process forks but before it execs [#260](https://github.com/amoffat/sh/issues/260)
+- `_fg` reintroduced, with limited functionality. hurrah! [#92](https://github.com/amoffat/sh/issues/92)
+- bugfix where a command would block if passed a fd for stdin that wasn't yet ready to read [#253](https://github.com/amoffat/sh/issues/253)
+- `_long_sep` can now take `None` which splits the long form arguments into individual arguments [#258](https://github.com/amoffat/sh/issues/258)
+- making `_piped` perform "direct" piping by default (linking fds together). this fixes memory problems [#270](https://github.com/amoffat/sh/issues/270)
+- bugfix where calling `next()` on an iterable process that has raised `StopIteration`, hangs [#273](https://github.com/amoffat/sh/issues/273)
+- `sh.cd` called with no arguments no changes into the user's home directory, like native `cd` [#275](https://github.com/amoffat/sh/issues/275)
+- `sh.glob` removed entirely. the rationale is correctness over hand-holding. [#279](https://github.com/amoffat/sh/issues/279)
+- added `_truncate_exc`, defaulting to `True`, which tells our exceptions to truncate output.
+- bugfix for exceptions whose messages contained unicode
+- `_done` callback no longer assumes you want your command put in the background.
+- `_done` callback is now called asynchronously in a separate thread.
+- `_done` callback is called regardless of exception, which is necessary in order to release held resources, for example a process pool
 
 ## 1.10 - 12/30/14
 
-*   partially applied functions with `functools.partial` have been fixed for `_out` and `_err` callbacks [#160](https://github.com/amoffat/sh/issues/160)
-*   `_out` or `_err` being callables no longer puts the running command in the background.  to achieve the previous behavior, pass `_bg=True` to your command.
-*   deprecated `_with` contexts [#195](https://github.com/amoffat/sh/issues/195)
-*   `_timeout_signal` allows you to specify your own signal to kill a timed-out process with.  use a constant from the `signal` stdlib module. [#171](https://github.com/amoffat/sh/issues/171)
-*   signal exceptions can now be caught by number or name.  `SignalException_9 == SignalException_SIGKILL`
-*   child processes that timeout via `_timeout` raise `sh.TimeoutException` instead of `sh.SignalExeception_9` [#172](https://github.com/amoffat/sh/issues/172)
-*   fixed `help(sh)` from the python shell and `pydoc sh` from the command line. [#173](https://github.com/amoffat/sh/issues/173)
-*   program names can no longer be shadowed by names that sh.py defines internally. removed the requirement of trailing underscores for programs that could have their names shadowed, like `id`.
-*   memory optimization when a child process's stdin is a newline-delimted string and our bufsize is newlines
-*   feature, `_done` special keyword argument that accepts a callback to be called when the command completes successfully [#185](https://github.com/amoffat/sh/issues/185)
-*   bugfix for being unable to print a baked command in python3+ [#176](https://github.com/amoffat/sh/issues/176)
-*   bugfix for cwd not existing and causing the child process to continue running parent process code [#202](https://github.com/amoffat/sh/issues/202)
-*   child process is now guaranteed to exit on exception between fork and exec.
-*   fix python2 deprecation warning when running with -3 [PR #165](https://github.com/amoffat/sh/pull/165)
-*   bugfix where sh.py was attempting to execute directories [#196](https://github.com/amoffat/sh/issues/196), [PR #189](https://github.com/amoffat/sh/pull/189)
-*   only backgrounded processes will ignore SIGHUP
-*   allowed `ok_code` to take a `range` object. [#PR 210](https://github.com/amoffat/sh/pull/210/files)
-*   added `sh.args` with context which allows overriding of all command defaults for the duration of that context.
-*   added `sh.pushd` with context which takes a directory name and changes to that directory for the duration of that with context. [PR #206](https://github.com/amoffat/sh/pull/206)
-*   tests now include python 3.4 if available.  tests also stop on the first
-    python that suite that fails.
-*   SIGABRT, SIGBUS, SIGFPE, SIGILL, SIGPIPE, SIGSYS have been added to the list of signals that throw an exception [PR #201](https://github.com/amoffat/sh/pull/201)
-*   "callable" builtin has been faked for python3.1, which lacks it.
-*   "direct" option added to `_piped` special keyword argument, which allows sh to hand off a process's stdout fd directly to another process, instead of buffering its stdout internally, then handing it off.  [#119](https://github.com/amoffat/sh/issues/119)
+- partially applied functions with `functools.partial` have been fixed for `_out` and `_err` callbacks [#160](https://github.com/amoffat/sh/issues/160)
+- `_out` or `_err` being callables no longer puts the running command in the background. to achieve the previous behavior, pass `_bg=True` to your command.
+- deprecated `_with` contexts [#195](https://github.com/amoffat/sh/issues/195)
+- `_timeout_signal` allows you to specify your own signal to kill a timed-out process with. use a constant from the `signal` stdlib module. [#171](https://github.com/amoffat/sh/issues/171)
+- signal exceptions can now be caught by number or name. `SignalException_9 == SignalException_SIGKILL`
+- child processes that timeout via `_timeout` raise `sh.TimeoutException` instead of `sh.SignalExeception_9` [#172](https://github.com/amoffat/sh/issues/172)
+- fixed `help(sh)` from the python shell and `pydoc sh` from the command line. [#173](https://github.com/amoffat/sh/issues/173)
+- program names can no longer be shadowed by names that sh.py defines internally. removed the requirement of trailing underscores for programs that could have their names shadowed, like `id`.
+- memory optimization when a child process's stdin is a newline-delimted string and our bufsize is newlines
+- feature, `_done` special keyword argument that accepts a callback to be called when the command completes successfully [#185](https://github.com/amoffat/sh/issues/185)
+- bugfix for being unable to print a baked command in python3+ [#176](https://github.com/amoffat/sh/issues/176)
+- bugfix for cwd not existing and causing the child process to continue running parent process code [#202](https://github.com/amoffat/sh/issues/202)
+- child process is now guaranteed to exit on exception between fork and exec.
+- fix python2 deprecation warning when running with -3 [PR #165](https://github.com/amoffat/sh/pull/165)
+- bugfix where sh.py was attempting to execute directories [#196](https://github.com/amoffat/sh/issues/196), [PR #189](https://github.com/amoffat/sh/pull/189)
+- only backgrounded processes will ignore SIGHUP
+- allowed `ok_code` to take a `range` object. [#PR 210](https://github.com/amoffat/sh/pull/210/files)
+- added `sh.args` with context which allows overriding of all command defaults for the duration of that context.
+- added `sh.pushd` with context which takes a directory name and changes to that directory for the duration of that with context. [PR #206](https://github.com/amoffat/sh/pull/206)
+- tests now include python 3.4 if available. tests also stop on the first
+  python that suite that fails.
+- SIGABRT, SIGBUS, SIGFPE, SIGILL, SIGPIPE, SIGSYS have been added to the list of signals that throw an exception [PR #201](https://github.com/amoffat/sh/pull/201)
+- "callable" builtin has been faked for python3.1, which lacks it.
+- "direct" option added to `_piped` special keyword argument, which allows sh to hand off a process's stdout fd directly to another process, instead of buffering its stdout internally, then handing it off. [#119](https://github.com/amoffat/sh/issues/119)
 
 ## 1.09 - 9/08/13
 
-*   Fixed encoding errors related to a system encoding "ascii". [#123](https://github.com/amoffat/sh/issues/123)
-*   Added exit_code attribute to SignalException and ErrorReturnCode exception classes. [#127](https://github.com/amoffat/sh/issues/127)
-*   Making the default behavior of spawned processes to not be explicitly killed when the parent python process ends. Also making the spawned process ignore SIGHUP. [#139](https://github.com/amoffat/sh/issues/139)
-*   Made OSX sleep hack to apply to PY2 as well as PY3.
-
+- Fixed encoding errors related to a system encoding "ascii". [#123](https://github.com/amoffat/sh/issues/123)
+- Added exit_code attribute to SignalException and ErrorReturnCode exception classes. [#127](https://github.com/amoffat/sh/issues/127)
+- Making the default behavior of spawned processes to not be explicitly killed when the parent python process ends. Also making the spawned process ignore SIGHUP. [#139](https://github.com/amoffat/sh/issues/139)
+- Made OSX sleep hack to apply to PY2 as well as PY3.
 
 ## 1.08 - 1/29/12
 
-*	Added SignalException class and made all commands that end terminate by a signal defined in SIGNALS_THAT_SHOULD_THROW_EXCEPTION raise it. [#91](https://github.com/amoffat/sh/issues/91)
-*   Bugfix where CommandNotFound was not being raised if Command was created by instantiation.  [#113](https://github.com/amoffat/sh/issues/113)
-*   Bugfix for Commands that are wrapped with functools.wraps() [#121](https://github.com/amoffat/sh/issues/121]
-*   Bugfix where input arguments were being assumed as ascii or unicode, but never as a string in a different encoding.
-*   _long_sep keyword argument added joining together a dictionary of arguments passed in to a command
-*   Commands can now be passed a dictionary of args, and the keys will be interpretted "raw", with no underscore-to-hyphen conversion
-*   Reserved Python keywords can now be used as subcommands by appending an underscore `_` to them
-
+- Added SignalException class and made all commands that end terminate by a signal defined in SIGNALS_THAT_SHOULD_THROW_EXCEPTION raise it. [#91](https://github.com/amoffat/sh/issues/91)
+- Bugfix where CommandNotFound was not being raised if Command was created by instantiation. [#113](https://github.com/amoffat/sh/issues/113)
+- Bugfix for Commands that are wrapped with functools.wraps() [#121](https://github.com/amoffat/sh/issues/121]
+- Bugfix where input arguments were being assumed as ascii or unicode, but never as a string in a different encoding.
+- \_long_sep keyword argument added joining together a dictionary of arguments passed in to a command
+- Commands can now be passed a dictionary of args, and the keys will be interpretted "raw", with no underscore-to-hyphen conversion
+- Reserved Python keywords can now be used as subcommands by appending an underscore `_` to them
 
 ## 1.07 - 11/21/12
 
-*   Bugfix for PyDev when `locale.getpreferredencoding()` is empty.
-*   Fixes for IPython3 that involve `sh.<tab>` and `sh?`
-*   Added `_tee` special keyword argument to force stdout/stderr to store internally and make available for piping data that is being redirected.
-*   Added `_decode_errors` to be passed to all stdout/stderr decoding of a process.
-*   Added `_no_out`, `_no_err`, and `_no_pipe` special keyword arguments.  These are used for long-running processes with lots of output.
-*   Changed custom loggers that were created for each process to fixed loggers, so there are no longer logger references laying around in the logging module after the process ends and it garbage collected.
-
+- Bugfix for PyDev when `locale.getpreferredencoding()` is empty.
+- Fixes for IPython3 that involve `sh.<tab>` and `sh?`
+- Added `_tee` special keyword argument to force stdout/stderr to store internally and make available for piping data that is being redirected.
+- Added `_decode_errors` to be passed to all stdout/stderr decoding of a process.
+- Added `_no_out`, `_no_err`, and `_no_pipe` special keyword arguments. These are used for long-running processes with lots of output.
+- Changed custom loggers that were created for each process to fixed loggers, so there are no longer logger references laying around in the logging module after the process ends and it garbage collected.
 
 ## 1.06 - 11/10/12
 
-*   Removed old undocumented cruft of ARG1..ARGN and ARGV.
-*   Bugfix where `logging_enabled` could not be set from the importing module.
-*   Disabled garbage collection before fork to prevent garbage collection in child process.
-*   Major bugfix where cyclical references were preventing process objects (and their associated stdout/stderr buffers) from being garbage collected.
-*   Bugfix in RunningCommand and OProc loggers, which could get really huge if a command was called that had a large number of arguments.
-
+- Removed old undocumented cruft of ARG1..ARGN and ARGV.
+- Bugfix where `logging_enabled` could not be set from the importing module.
+- Disabled garbage collection before fork to prevent garbage collection in child process.
+- Major bugfix where cyclical references were preventing process objects (and their associated stdout/stderr buffers) from being garbage collected.
+- Bugfix in RunningCommand and OProc loggers, which could get really huge if a command was called that had a large number of arguments.
 
 ## 1.05 - 10/20/12
 
-*   Changing status from alpha to beta.
-*   Python 3.3 officially supported.
-*   Documentation fix.  The section on exceptions now references the fact that signals do not raise an exception, even for signals that might seem like they should, e.g. segfault.
-*   Bugfix with Python 3.3 where importing commands from the sh namespace resulted in an error related to `__path__`
-*   Long-form and short-form options to commands may now be given False to disable the option from being passed into the command.  This is useful to pass in a boolean flag that you flip to either True or False to enable or disable some functionality at runtime.
+- Changing status from alpha to beta.
+- Python 3.3 officially supported.
+- Documentation fix. The section on exceptions now references the fact that signals do not raise an exception, even for signals that might seem like they should, e.g. segfault.
+- Bugfix with Python 3.3 where importing commands from the sh namespace resulted in an error related to `__path__`
+- Long-form and short-form options to commands may now be given False to disable the option from being passed into the command. This is useful to pass in a boolean flag that you flip to either True or False to enable or disable some functionality at runtime.
 
 ## 1.04 - 10/07/12
 
-*   Making `Command` class resolve the `path` parameter with `which` by default instead of expecting it to be resolved before it is passed in.  This change shouldn't affect backwards compatibility.
-*   Fixing a bug when an exception is raised from a program, and the error output has non-ascii text.  This didn't work in Python < 3.0, because .decode()'s default encoding is typically ascii.
+- Making `Command` class resolve the `path` parameter with `which` by default instead of expecting it to be resolved before it is passed in. This change shouldn't affect backwards compatibility.
+- Fixing a bug when an exception is raised from a program, and the error output has non-ascii text. This didn't work in Python < 3.0, because .decode()'s default encoding is typically ascii.
diff --git a/CODEOWNERS b/CODEOWNERS
new file mode 100644
index 0000000..21bc057
--- /dev/null
+++ b/CODEOWNERS
@@ -0,0 +1 @@
+/.github/ @amoffat
\ No newline at end of file
diff --git a/MANIFEST.in b/MANIFEST.in
deleted file mode 100644
index 80f3462..0000000
--- a/MANIFEST.in
+++ /dev/null
@@ -1,3 +0,0 @@
-include CHANGELOG.md LICENSE.txt README.rst test.py
-
-global-exclude *.pyc
diff --git a/MIGRATION.md b/MIGRATION.md
new file mode 100644
index 0000000..e934838
--- /dev/null
+++ b/MIGRATION.md
@@ -0,0 +1,101 @@
+# Migrating from 1._ to 2._
+
+This document provides an upgrade path from `1.*` to `2.*`.
+
+## `sh.cd` builtin removed
+
+There is no `sh.cd` command anymore. It was always command implemented in sh, as
+some systems provide it as a shell builtin, while others have an actual binary.
+But neither of them persisted the directory change between other `sh` calls,
+which is why it was implemented in sh.
+
+### Workaround
+
+If you were using `sh.cd(dir)`, use the context manager `with sh.pushd(dir)`
+instead. All of the commands in the managed context will have the correct
+directory.
+
+## Removed execution contexts / default arguments
+
+In `1.*` you could do could spawn a new module from the `sh` module, one which
+had customized defaults for the special keyword arguments. This module could
+then be accessed just like `sh`, and you could even import commands from it.
+
+Unfortunately the magic required to make that work was brittle. Also it was not
+aligned syntactically with the similar baking concept. We have therefore changed
+the syntax to align with baking, and also removed the ability to import directly
+from this new baked execution context.
+
+### Workaround
+
+```python
+sh2 = sh(_tty_out=False)
+sh2.ls()
+```
+
+Becomes:
+
+```python
+sh2 = sh.bake(_tty_out=False)
+sh2.ls()
+```
+
+And
+
+```python
+sh2 = sh.bake(_tty_out=False)
+from sh2 import ls
+ls()
+```
+
+Becomes:
+
+```python
+sh2 = sh.bake(_tty_out=False)
+ls = sh2.ls
+ls()
+```
+
+## Return value now a true string
+
+In `2.*`, the return value of an executed `sh` command has changed (in most cases) from
+a `RunningCommand` object to a unicode string. This makes using the output of a command
+more natural.
+
+### Workaround
+
+To continue returning a `RunningCommand` object, you must use the `_return_cmd=True`
+special keyword argument. You can achieve this on each file with the following code at
+the top of files that use `sh`:
+
+```python
+import sh
+
+sh = sh.bake(_return_cmd=True)
+```
+
+## Piping to STDIN
+
+Previously, if the first argument of a sh command was an instance of `RunningCommand`,
+it was automatically fed into the process's STDIN. This is no longer the case and you
+must explicitly use `_in=`.
+
+### Workaround
+
+None
+
+## New processes don't launch in new session
+
+In `1.*`, `_new_session` defaulted to `True`. It now defaults to `False`. The reason
+for this is that it makes more sense for launched processes to default to being in
+the process group of the python script, so that they receive SIGINTs correctly.
+
+### Workaround
+
+To preserve the old behavior:
+
+```python
+import sh
+
+sh = sh.bake(_new_session=True)
+```
diff --git a/Makefile b/Makefile
new file mode 100644
index 0000000..1f3626f
--- /dev/null
+++ b/Makefile
@@ -0,0 +1,18 @@
+# runs all tests on all envs, in parallel
+.PHONY: test
+test: build_test_image
+	docker run -it --rm amoffat/shtest tox -p
+
+# one test on all envs, in parallel
+.PHONY: test_one
+test_one: build_test_image
+	docker run -it --rm amoffat/shtest tox -p -- $(test)
+
+.PHONY: build_test_image
+build_test_image:
+	docker build -t amoffat/shtest -f tests/Dockerfile --build-arg cache_bust=951 .
+
+# publishes to PYPI
+.PHONY: release
+release:
+	poetry publish --dry-run
\ No newline at end of file
diff --git a/README.rst b/README.rst
index f807900..b1452c7 100644
--- a/README.rst
+++ b/README.rst
@@ -1,7 +1,9 @@
-.. image:: https://raw.githubusercontent.com/amoffat/sh/master/logo-230.png
+.. image:: https://raw.githubusercontent.com/amoffat/sh/master/images/logo-230.png
     :target: https://amoffat.github.com/sh
     :alt: Logo
 
+**If you are migrating from 1.* to 2.*, please see MIGRATION.md**
+
 |
 
 .. image:: https://img.shields.io/pypi/v/sh.svg?style=flat-square
@@ -22,7 +24,7 @@
 
 |
 
-sh is a full-fledged subprocess replacement for Python 2.6 - 3.8, PyPy and PyPy3
+sh is a full-fledged subprocess replacement for Python 3.8 - 3.10, PyPy and PyPy3
 that allows you to call *any* program as if it were a function:
 
 .. code:: python
@@ -32,6 +34,9 @@ that allows you to call *any* program as if it were a function:
 
 sh is *not* a collection of system commands implemented in Python.
 
+sh relies on various Unix system calls and only works on Unix-like operating
+systems - Linux, macOS, BSDs etc. Specifically, Windows is not supported.
+
 `Complete documentation here <https://amoffat.github.io/sh>`_
 
 Installation
@@ -40,7 +45,7 @@ Installation
 ::
 
     $> pip install sh
-    
+
 Support
 =======
 * `Andrew Moffat <https://github.com/amoffat>`_ - author/maintainer
@@ -58,31 +63,20 @@ Check out the `gh-pages <https://github.com/amoffat/sh/tree/gh-pages>`_ branch a
 Testing
 -------
 
-I've included a Docker test suite in the `docker_test_suit/` folder.  To build the image, `cd` into that directory and
-run::
-
-    $> ./build.sh
-
-This will install ubuntu 18.04 LTS and all python versions from 2.6-3.8.  Once it's done, stay in that directory and
-run::
-
-    $> ./run.sh
-
-This will mount your local code directory into the container and start the test suite, which will take a long time to
-run.  If you wish to run a single test, you may pass that test to `./run.sh`::
+Tests are run in a docker container against all supported Python versions. To run, make the following target::
 
-    $> ./run.sh FunctionalTests.test_unicode_arg
+    $> make test
 
-To run a single test for a single environment::
+To run a single test::
 
-    $> ./run.sh -e 3.4 FunctionalTests.test_unicode_arg
+    $> make test='FunctionalTests.test_background' test_one
 
 Coverage
 --------
 
 First run all of the tests::
 
-    $> python sh.py test
+    $> SH_TESTS_RUNNING=1 coverage run --source=sh -m unittest
 
 This will aggregate a ``.coverage``.  You may then visualize the report with::
 
diff --git a/debian/changelog b/debian/changelog
index 2deb56d..bf4ae82 100644
--- a/debian/changelog
+++ b/debian/changelog
@@ -1,8 +1,10 @@
-python-sh (1.14.2-2) UNRELEASED; urgency=medium
+python-sh (2.0.4-1) UNRELEASED; urgency=medium
 
   * Update standards version to 4.6.2, no changes needed.
+  * New upstream release.
+  * New upstream release.
 
- -- Debian Janitor <janitor@jelmer.uk>  Wed, 11 Jan 2023 13:30:56 -0000
+ -- Debian Janitor <janitor@jelmer.uk>  Wed, 24 May 2023 05:46:41 -0000
 
 python-sh (1.14.2-1) unstable; urgency=medium
 
diff --git a/docker_test_suite/Dockerfile b/docker_test_suite/Dockerfile
deleted file mode 100644
index e491af1..0000000
--- a/docker_test_suite/Dockerfile
+++ /dev/null
@@ -1,47 +0,0 @@
-FROM ubuntu:bionic
-
-ARG cache_bust
-RUN apt-get update
-RUN apt-get -y install locales
-
-RUN locale-gen en_US.UTF-8
-ENV LANG en_US.UTF-8
-ENV LANGUAGE en_US:en
-ENV LC_ALL en_US.UTF-8
-
-RUN apt-get -y install\
-    software-properties-common\
-    curl\
-    sudo\
-    python\
-    lsof
-
-RUN add-apt-repository ppa:deadsnakes/ppa
-RUN apt-get update
-RUN apt-get -y install\
-    python2.6\
-    python2.7\
-    python3.1\
-    python3.2\
-    python3.3\
-    python3.4\
-    python3.5\
-    python3.6\
-    python3.7\
-    python3.8
-
-RUN apt-get -y install python3-distutils\
-    && curl https://bootstrap.pypa.io/get-pip.py | python -
-
-ARG uid=1000
-RUN groupadd -g $uid shtest\
-    && useradd -m -u $uid -g $uid shtest\
-    && gpasswd -a shtest sudo\
-    && echo "shtest:shtest" | chpasswd
-
-COPY requirements-dev.txt /tmp/
-RUN pip install -r /tmp/requirements-dev.txt
-
-USER shtest
-WORKDIR /home/shtest/sh
-ENTRYPOINT ["python", "sh.py", "test"]
diff --git a/docker_test_suite/build.sh b/docker_test_suite/build.sh
deleted file mode 100755
index 6ed7ec4..0000000
--- a/docker_test_suite/build.sh
+++ /dev/null
@@ -1,4 +0,0 @@
-#!/bin/bash
-set -ex
-cp ../requirements-dev.txt .
-docker build -t amoffat/shtest $@ .
diff --git a/docker_test_suite/run.sh b/docker_test_suite/run.sh
deleted file mode 100755
index bd92b85..0000000
--- a/docker_test_suite/run.sh
+++ /dev/null
@@ -1,3 +0,0 @@
-#!/bin/bash
-set -ex
-docker run -it --rm -v $(pwd)/../:/home/shtest/sh amoffat/shtest $@
diff --git a/docker_test_suite/shell.sh b/docker_test_suite/shell.sh
deleted file mode 100755
index 234e8e0..0000000
--- a/docker_test_suite/shell.sh
+++ /dev/null
@@ -1,3 +0,0 @@
-#!/bin/bash
-set -ex
-docker run -it --rm -v $(pwd)/../:/home/shtest/sh --entrypoint=/bin/bash amoffat/shtest $@
diff --git a/gitads.png b/gitads.png
deleted file mode 100644
index 5fa173e..0000000
Binary files a/gitads.png and /dev/null differ
diff --git a/logo-230.png b/images/logo-230.png
similarity index 100%
rename from logo-230.png
rename to images/logo-230.png
diff --git a/logo-big.png b/images/logo-big.png
similarity index 100%
rename from logo-big.png
rename to images/logo-big.png
diff --git a/poetry.lock b/poetry.lock
new file mode 100644
index 0000000..b5cf2d2
--- /dev/null
+++ b/poetry.lock
@@ -0,0 +1,1225 @@
+[[package]]
+name = "alabaster"
+version = "0.7.12"
+description = "A configurable sidebar-enabled Sphinx theme"
+category = "dev"
+optional = false
+python-versions = "*"
+
+[[package]]
+name = "attrs"
+version = "21.2.0"
+description = "Classes Without Boilerplate"
+category = "dev"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
+
+[package.extras]
+dev = ["coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "six", "sphinx", "sphinx-notfound-page", "zope.interface"]
+docs = ["furo", "sphinx", "sphinx-notfound-page", "zope.interface"]
+tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "mypy", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "six", "zope.interface"]
+tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "mypy", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "six"]
+
+[[package]]
+name = "babel"
+version = "2.9.1"
+description = "Internationalization utilities"
+category = "dev"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
+
+[package.dependencies]
+pytz = ">=2015.7"
+
+[[package]]
+name = "black"
+version = "23.1.0"
+description = "The uncompromising code formatter."
+category = "dev"
+optional = false
+python-versions = ">=3.7"
+
+[package.dependencies]
+click = ">=8.0.0"
+mypy-extensions = ">=0.4.3"
+packaging = ">=22.0"
+pathspec = ">=0.9.0"
+platformdirs = ">=2"
+tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""}
+typing-extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""}
+
+[package.extras]
+colorama = ["colorama (>=0.4.3)"]
+d = ["aiohttp (>=3.7.4)"]
+jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"]
+uvloop = ["uvloop (>=0.15.2)"]
+
+[[package]]
+name = "cachetools"
+version = "5.3.0"
+description = "Extensible memoizing collections and decorators"
+category = "dev"
+optional = false
+python-versions = "~=3.7"
+
+[[package]]
+name = "certifi"
+version = "2022.12.7"
+description = "Python package for providing Mozilla's CA Bundle."
+category = "dev"
+optional = false
+python-versions = ">=3.6"
+
+[[package]]
+name = "chardet"
+version = "5.1.0"
+description = "Universal encoding detector for Python 3"
+category = "dev"
+optional = false
+python-versions = ">=3.7"
+
+[[package]]
+name = "charset-normalizer"
+version = "2.0.4"
+description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
+category = "dev"
+optional = false
+python-versions = ">=3.5.0"
+
+[package.extras]
+unicode_backport = ["unicodedata2"]
+
+[[package]]
+name = "click"
+version = "8.0.1"
+description = "Composable command line interface toolkit"
+category = "dev"
+optional = false
+python-versions = ">=3.6"
+
+[package.dependencies]
+colorama = {version = "*", markers = "platform_system == \"Windows\""}
+
+[[package]]
+name = "colorama"
+version = "0.4.6"
+description = "Cross-platform colored terminal text."
+category = "dev"
+optional = false
+python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
+
+[[package]]
+name = "commonmark"
+version = "0.9.1"
+description = "Python parser for the CommonMark Markdown spec"
+category = "dev"
+optional = false
+python-versions = "*"
+
+[package.extras]
+test = ["flake8 (==3.7.8)", "hypothesis (==3.55.3)"]
+
+[[package]]
+name = "coverage"
+version = "5.5"
+description = "Code coverage measurement for Python"
+category = "dev"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4"
+
+[package.extras]
+toml = ["toml"]
+
+[[package]]
+name = "coveralls"
+version = "3.3.1"
+description = "Show coverage stats online via coveralls.io"
+category = "dev"
+optional = false
+python-versions = ">= 3.5"
+
+[package.dependencies]
+coverage = ">=4.1,<6.0.0 || >6.1,<6.1.1 || >6.1.1,<7.0"
+docopt = ">=0.6.1"
+requests = ">=1.0.0"
+
+[package.extras]
+yaml = ["PyYAML (>=3.10)"]
+
+[[package]]
+name = "distlib"
+version = "0.3.6"
+description = "Distribution utilities"
+category = "dev"
+optional = false
+python-versions = "*"
+
+[[package]]
+name = "docopt"
+version = "0.6.2"
+description = "Pythonic argument parser, that will make you smile"
+category = "dev"
+optional = false
+python-versions = "*"
+
+[[package]]
+name = "docutils"
+version = "0.18.1"
+description = "Docutils -- Python Documentation Utilities"
+category = "dev"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
+
+[[package]]
+name = "exceptiongroup"
+version = "1.1.0"
+description = "Backport of PEP 654 (exception groups)"
+category = "dev"
+optional = false
+python-versions = ">=3.7"
+
+[package.extras]
+test = ["pytest (>=6)"]
+
+[[package]]
+name = "filelock"
+version = "3.9.0"
+description = "A platform independent file lock."
+category = "dev"
+optional = false
+python-versions = ">=3.7"
+
+[package.extras]
+docs = ["furo (>=2022.12.7)", "sphinx (>=5.3)", "sphinx-autodoc-typehints (>=1.19.5)"]
+testing = ["covdefaults (>=2.2.2)", "coverage (>=7.0.1)", "pytest (>=7.2)", "pytest-cov (>=4)", "pytest-timeout (>=2.1)"]
+
+[[package]]
+name = "flake8"
+version = "6.0.0"
+description = "the modular source code checker: pep8 pyflakes and co"
+category = "dev"
+optional = false
+python-versions = ">=3.8.1"
+
+[package.dependencies]
+mccabe = ">=0.7.0,<0.8.0"
+pycodestyle = ">=2.10.0,<2.11.0"
+pyflakes = ">=3.0.0,<3.1.0"
+
+[[package]]
+name = "idna"
+version = "3.2"
+description = "Internationalized Domain Names in Applications (IDNA)"
+category = "dev"
+optional = false
+python-versions = ">=3.5"
+
+[[package]]
+name = "imagesize"
+version = "1.4.1"
+description = "Getting image size from png/jpeg/jpeg2000/gif file"
+category = "dev"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
+
+[[package]]
+name = "importlib-metadata"
+version = "4.8.1"
+description = "Read metadata from Python packages"
+category = "dev"
+optional = false
+python-versions = ">=3.6"
+
+[package.dependencies]
+zipp = ">=0.5"
+
+[package.extras]
+docs = ["jaraco.packaging (>=8.2)", "rst.linker (>=1.9)", "sphinx"]
+perf = ["ipython"]
+testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pep517", "pyfakefs", "pytest (>=4.6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-flake8", "pytest-mypy", "pytest-perf (>=0.9.2)"]
+
+[[package]]
+name = "iniconfig"
+version = "1.1.1"
+description = "iniconfig: brain-dead simple config-ini parsing"
+category = "dev"
+optional = false
+python-versions = "*"
+
+[[package]]
+name = "jinja2"
+version = "3.0.1"
+description = "A very fast and expressive template engine."
+category = "dev"
+optional = false
+python-versions = ">=3.6"
+
+[package.dependencies]
+MarkupSafe = ">=2.0"
+
+[package.extras]
+i18n = ["Babel (>=2.7)"]
+
+[[package]]
+name = "markupsafe"
+version = "2.0.1"
+description = "Safely add untrusted strings to HTML/XML markup."
+category = "dev"
+optional = false
+python-versions = ">=3.6"
+
+[[package]]
+name = "mccabe"
+version = "0.7.0"
+description = "McCabe checker, plugin for flake8"
+category = "dev"
+optional = false
+python-versions = ">=3.6"
+
+[[package]]
+name = "mypy"
+version = "1.0.0"
+description = "Optional static typing for Python"
+category = "dev"
+optional = false
+python-versions = ">=3.7"
+
+[package.dependencies]
+mypy-extensions = ">=0.4.3"
+tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""}
+typing-extensions = ">=3.10"
+
+[package.extras]
+dmypy = ["psutil (>=4.0)"]
+install-types = ["pip"]
+python2 = ["typed-ast (>=1.4.0,<2)"]
+reports = ["lxml"]
+
+[[package]]
+name = "mypy-extensions"
+version = "0.4.3"
+description = "Experimental type system extensions for programs checked with the mypy typechecker."
+category = "dev"
+optional = false
+python-versions = "*"
+
+[[package]]
+name = "packaging"
+version = "23.0"
+description = "Core utilities for Python packages"
+category = "dev"
+optional = false
+python-versions = ">=3.7"
+
+[[package]]
+name = "pathspec"
+version = "0.9.0"
+description = "Utility library for gitignore style pattern matching of file paths."
+category = "dev"
+optional = false
+python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7"
+
+[[package]]
+name = "platformdirs"
+version = "3.0.0"
+description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"."
+category = "dev"
+optional = false
+python-versions = ">=3.7"
+
+[package.extras]
+docs = ["furo (>=2022.12.7)", "proselint (>=0.13)", "sphinx (>=6.1.3)", "sphinx-autodoc-typehints (>=1.22,!=1.23.4)"]
+test = ["appdirs (==1.4.4)", "covdefaults (>=2.2.2)", "pytest (>=7.2.1)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"]
+
+[[package]]
+name = "pluggy"
+version = "1.0.0"
+description = "plugin and hook calling mechanisms for python"
+category = "dev"
+optional = false
+python-versions = ">=3.6"
+
+[package.extras]
+dev = ["pre-commit", "tox"]
+testing = ["pytest", "pytest-benchmark"]
+
+[[package]]
+name = "pycodestyle"
+version = "2.10.0"
+description = "Python style guide checker"
+category = "dev"
+optional = false
+python-versions = ">=3.6"
+
+[[package]]
+name = "pydantic"
+version = "1.9.2"
+description = "Data validation and settings management using python type hints"
+category = "dev"
+optional = false
+python-versions = ">=3.6.1"
+
+[package.dependencies]
+typing-extensions = ">=3.7.4.3"
+
+[package.extras]
+dotenv = ["python-dotenv (>=0.10.4)"]
+email = ["email-validator (>=1.0.3)"]
+
+[[package]]
+name = "pyflakes"
+version = "3.0.1"
+description = "passive checker of Python programs"
+category = "dev"
+optional = false
+python-versions = ">=3.6"
+
+[[package]]
+name = "pygments"
+version = "2.14.0"
+description = "Pygments is a syntax highlighting package written in Python."
+category = "dev"
+optional = false
+python-versions = ">=3.6"
+
+[package.extras]
+plugins = ["importlib-metadata"]
+
+[[package]]
+name = "pyproject-api"
+version = "1.5.0"
+description = "API to interact with the python pyproject.toml based projects"
+category = "dev"
+optional = false
+python-versions = ">=3.7"
+
+[package.dependencies]
+packaging = ">=21.3"
+tomli = {version = ">=2.0.1", markers = "python_version < \"3.11\""}
+
+[package.extras]
+docs = ["furo (>=2022.9.29)", "sphinx (>=5.3)", "sphinx-autodoc-typehints (>=1.19.5)"]
+testing = ["covdefaults (>=2.2.2)", "importlib-metadata (>=5.1)", "pytest (>=7.2)", "pytest-cov (>=4)", "pytest-mock (>=3.10)", "virtualenv (>=20.17)", "wheel (>=0.38.4)"]
+
+[[package]]
+name = "pytest"
+version = "7.2.1"
+description = "pytest: simple powerful testing with Python"
+category = "dev"
+optional = false
+python-versions = ">=3.7"
+
+[package.dependencies]
+attrs = ">=19.2.0"
+colorama = {version = "*", markers = "sys_platform == \"win32\""}
+exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""}
+iniconfig = "*"
+packaging = "*"
+pluggy = ">=0.12,<2.0"
+tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""}
+
+[package.extras]
+testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"]
+
+[[package]]
+name = "pytz"
+version = "2021.1"
+description = "World timezone definitions, modern and historical"
+category = "dev"
+optional = false
+python-versions = "*"
+
+[[package]]
+name = "requests"
+version = "2.26.0"
+description = "Python HTTP for Humans."
+category = "dev"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*"
+
+[package.dependencies]
+certifi = ">=2017.4.17"
+charset-normalizer = {version = ">=2.0.0,<2.1.0", markers = "python_version >= \"3\""}
+idna = {version = ">=2.5,<4", markers = "python_version >= \"3\""}
+urllib3 = ">=1.21.1,<1.27"
+
+[package.extras]
+socks = ["PySocks (>=1.5.6,!=1.5.7)", "win-inet-pton"]
+use_chardet_on_py3 = ["chardet (>=3.0.2,<5)"]
+
+[[package]]
+name = "rich"
+version = "12.0.1"
+description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal"
+category = "dev"
+optional = false
+python-versions = ">=3.6.2,<4.0.0"
+
+[package.dependencies]
+commonmark = ">=0.9.0,<0.10.0"
+pygments = ">=2.6.0,<3.0.0"
+
+[package.extras]
+jupyter = ["ipywidgets (>=7.5.1,<8.0.0)"]
+
+[[package]]
+name = "rstcheck"
+version = "6.1.1"
+description = "Checks syntax of reStructuredText and code blocks nested within it"
+category = "dev"
+optional = false
+python-versions = ">=3.7,<4.0"
+
+[package.dependencies]
+rstcheck-core = ">=1.0.2,<2.0.0"
+typer = {version = ">=0.4.1,<0.8", extras = ["all"]}
+
+[package.extras]
+docs = ["m2r2 (>=0.3.2)", "sphinx", "sphinx-autobuild (==2021.3.14)", "sphinx-click (>=4.0.3,<5.0.0)", "sphinx-rtd-dark-mode (>=1.2.4,<2.0.0)", "sphinx-rtd-theme (<1)", "sphinxcontrib-spelling (>=7.3)"]
+sphinx = ["sphinx"]
+testing = ["coverage-conditional-plugin (>=0.5)", "coverage[toml] (>=6.0)", "pytest (>=6.0)", "pytest-cov (>=3.0)", "pytest-randomly (>=3.0)", "pytest-sugar (>=0.9.5)"]
+toml = ["tomli"]
+
+[[package]]
+name = "rstcheck-core"
+version = "1.0.3"
+description = "Checks syntax of reStructuredText and code blocks nested within it"
+category = "dev"
+optional = false
+python-versions = ">=3.7,<4.0"
+
+[package.dependencies]
+docutils = ">=0.7,<0.20"
+pydantic = ">=1.2,<2.0"
+types-docutils = ">=0.18,<0.20"
+
+[package.extras]
+docs = ["m2r2 (>=0.3.2)", "sphinx (>=4.0,<6.0)", "sphinx-autobuild (==2021.3.14)", "sphinx-autodoc-typehints (>=1.15)", "sphinx-rtd-dark-mode (>=1.2.4,<2.0.0)", "sphinx-rtd-theme (<1)", "sphinxcontrib-apidoc (>=0.3)", "sphinxcontrib-spelling (>=7.3)"]
+sphinx = ["sphinx (>=4.0,<6.0)"]
+testing = ["coverage-conditional-plugin (>=0.5)", "coverage[toml] (>=6.0)", "pytest (>=6.0)", "pytest-cov (>=3.0)", "pytest-mock (>=3.7)", "pytest-randomly (>=3.0)", "pytest-sugar (>=0.9.5)"]
+toml = ["tomli (>=2.0,<3.0)"]
+
+[[package]]
+name = "setuptools"
+version = "67.2.0"
+description = "Easily download, build, install, upgrade, and uninstall Python packages"
+category = "dev"
+optional = false
+python-versions = ">=3.7"
+
+[package.extras]
+docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"]
+testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"]
+testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"]
+
+[[package]]
+name = "shellingham"
+version = "1.5.0.post1"
+description = "Tool to Detect Surrounding Shell"
+category = "dev"
+optional = false
+python-versions = ">=3.7"
+
+[[package]]
+name = "snowballstemmer"
+version = "2.1.0"
+description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms."
+category = "dev"
+optional = false
+python-versions = "*"
+
+[[package]]
+name = "sphinx"
+version = "6.1.3"
+description = "Python documentation generator"
+category = "dev"
+optional = false
+python-versions = ">=3.8"
+
+[package.dependencies]
+alabaster = ">=0.7,<0.8"
+babel = ">=2.9"
+colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""}
+docutils = ">=0.18,<0.20"
+imagesize = ">=1.3"
+importlib-metadata = {version = ">=4.8", markers = "python_version < \"3.10\""}
+Jinja2 = ">=3.0"
+packaging = ">=21.0"
+Pygments = ">=2.13"
+requests = ">=2.25.0"
+snowballstemmer = ">=2.0"
+sphinxcontrib-applehelp = "*"
+sphinxcontrib-devhelp = "*"
+sphinxcontrib-htmlhelp = ">=2.0.0"
+sphinxcontrib-jsmath = "*"
+sphinxcontrib-qthelp = "*"
+sphinxcontrib-serializinghtml = ">=1.1.5"
+
+[package.extras]
+docs = ["sphinxcontrib-websupport"]
+lint = ["docutils-stubs", "flake8 (>=3.5.0)", "flake8-simplify", "isort", "mypy (>=0.990)", "ruff", "sphinx-lint", "types-requests"]
+test = ["cython", "html5lib", "pytest (>=4.6)"]
+
+[[package]]
+name = "sphinx-rtd-theme"
+version = "1.2.0"
+description = "Read the Docs theme for Sphinx"
+category = "dev"
+optional = false
+python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7"
+
+[package.dependencies]
+docutils = "<0.19"
+sphinx = ">=1.6,<7"
+sphinxcontrib-jquery = {version = ">=2.0.0,<3.0.0 || >3.0.0", markers = "python_version > \"3\""}
+
+[package.extras]
+dev = ["bump2version", "sphinxcontrib-httpdomain", "transifex-client", "wheel"]
+
+[[package]]
+name = "sphinxcontrib-applehelp"
+version = "1.0.2"
+description = "sphinxcontrib-applehelp is a sphinx extension which outputs Apple help books"
+category = "dev"
+optional = false
+python-versions = ">=3.5"
+
+[package.extras]
+lint = ["docutils-stubs", "flake8", "mypy"]
+test = ["pytest"]
+
+[[package]]
+name = "sphinxcontrib-devhelp"
+version = "1.0.2"
+description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp document."
+category = "dev"
+optional = false
+python-versions = ">=3.5"
+
+[package.extras]
+lint = ["docutils-stubs", "flake8", "mypy"]
+test = ["pytest"]
+
+[[package]]
+name = "sphinxcontrib-htmlhelp"
+version = "2.0.0"
+description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files"
+category = "dev"
+optional = false
+python-versions = ">=3.6"
+
+[package.extras]
+lint = ["docutils-stubs", "flake8", "mypy"]
+test = ["html5lib", "pytest"]
+
+[[package]]
+name = "sphinxcontrib-jquery"
+version = "2.0.0"
+description = "Extension to include jQuery on newer Sphinx releases"
+category = "dev"
+optional = false
+python-versions = ">=2.7"
+
+[package.dependencies]
+setuptools = "*"
+
+[[package]]
+name = "sphinxcontrib-jsmath"
+version = "1.0.1"
+description = "A sphinx extension which renders display math in HTML via JavaScript"
+category = "dev"
+optional = false
+python-versions = ">=3.5"
+
+[package.extras]
+test = ["flake8", "mypy", "pytest"]
+
+[[package]]
+name = "sphinxcontrib-qthelp"
+version = "1.0.3"
+description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp document."
+category = "dev"
+optional = false
+python-versions = ">=3.5"
+
+[package.extras]
+lint = ["docutils-stubs", "flake8", "mypy"]
+test = ["pytest"]
+
+[[package]]
+name = "sphinxcontrib-serializinghtml"
+version = "1.1.5"
+description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)."
+category = "dev"
+optional = false
+python-versions = ">=3.5"
+
+[package.extras]
+lint = ["docutils-stubs", "flake8", "mypy"]
+test = ["pytest"]
+
+[[package]]
+name = "tomli"
+version = "2.0.1"
+description = "A lil' TOML parser"
+category = "dev"
+optional = false
+python-versions = ">=3.7"
+
+[[package]]
+name = "tox"
+version = "4.4.5"
+description = "tox is a generic virtualenv management and test command line tool"
+category = "dev"
+optional = false
+python-versions = ">=3.7"
+
+[package.dependencies]
+cachetools = ">=5.3"
+chardet = ">=5.1"
+colorama = ">=0.4.6"
+filelock = ">=3.9"
+packaging = ">=23"
+platformdirs = ">=2.6.2"
+pluggy = ">=1"
+pyproject-api = ">=1.5"
+tomli = {version = ">=2.0.1", markers = "python_version < \"3.11\""}
+virtualenv = ">=20.17.1"
+
+[package.extras]
+docs = ["furo (>=2022.12.7)", "sphinx (>=6.1.3)", "sphinx-argparse-cli (>=1.11)", "sphinx-autodoc-typehints (>=1.22,!=1.23.4)", "sphinx-copybutton (>=0.5.1)", "sphinx-inline-tabs (>=2022.1.2b11)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=22.12)"]
+testing = ["build[virtualenv] (>=0.10)", "covdefaults (>=2.2.2)", "devpi-process (>=0.3)", "diff-cover (>=7.4)", "distlib (>=0.3.6)", "flaky (>=3.7)", "hatch-vcs (>=0.3)", "hatchling (>=1.12.2)", "psutil (>=5.9.4)", "pytest (>=7.2.1)", "pytest-cov (>=4)", "pytest-mock (>=3.10)", "pytest-xdist (>=3.1)", "re-assert (>=1.1)", "time-machine (>=2.9)", "wheel (>=0.38.4)"]
+
+[[package]]
+name = "typer"
+version = "0.7.0"
+description = "Typer, build great CLIs. Easy to code. Based on Python type hints."
+category = "dev"
+optional = false
+python-versions = ">=3.6"
+
+[package.dependencies]
+click = ">=7.1.1,<9.0.0"
+colorama = {version = ">=0.4.3,<0.5.0", optional = true, markers = "extra == \"all\""}
+rich = {version = ">=10.11.0,<13.0.0", optional = true, markers = "extra == \"all\""}
+shellingham = {version = ">=1.3.0,<2.0.0", optional = true, markers = "extra == \"all\""}
+
+[package.extras]
+all = ["colorama (>=0.4.3,<0.5.0)", "rich (>=10.11.0,<13.0.0)", "shellingham (>=1.3.0,<2.0.0)"]
+dev = ["autoflake (>=1.3.1,<2.0.0)", "flake8 (>=3.8.3,<4.0.0)", "pre-commit (>=2.17.0,<3.0.0)"]
+doc = ["cairosvg (>=2.5.2,<3.0.0)", "mdx-include (>=1.4.1,<2.0.0)", "mkdocs (>=1.1.2,<2.0.0)", "mkdocs-material (>=8.1.4,<9.0.0)", "pillow (>=9.3.0,<10.0.0)"]
+test = ["black (>=22.3.0,<23.0.0)", "coverage (>=6.2,<7.0)", "isort (>=5.0.6,<6.0.0)", "mypy (==0.910)", "pytest (>=4.4.0,<8.0.0)", "pytest-cov (>=2.10.0,<5.0.0)", "pytest-sugar (>=0.9.4,<0.10.0)", "pytest-xdist (>=1.32.0,<4.0.0)", "rich (>=10.11.0,<13.0.0)", "shellingham (>=1.3.0,<2.0.0)"]
+
+[[package]]
+name = "types-docutils"
+version = "0.19.1.3"
+description = "Typing stubs for docutils"
+category = "dev"
+optional = false
+python-versions = "*"
+
+[[package]]
+name = "typing-extensions"
+version = "3.10.0.2"
+description = "Backported and Experimental Type Hints for Python 3.5+"
+category = "dev"
+optional = false
+python-versions = "*"
+
+[[package]]
+name = "urllib3"
+version = "1.26.6"
+description = "HTTP library with thread-safe connection pooling, file post, and more."
+category = "dev"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4"
+
+[package.extras]
+brotli = ["brotlipy (>=0.6.0)"]
+secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)"]
+socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"]
+
+[[package]]
+name = "virtualenv"
+version = "20.19.0"
+description = "Virtual Python Environment builder"
+category = "dev"
+optional = false
+python-versions = ">=3.7"
+
+[package.dependencies]
+distlib = ">=0.3.6,<1"
+filelock = ">=3.4.1,<4"
+platformdirs = ">=2.4,<4"
+
+[package.extras]
+docs = ["furo (>=2022.12.7)", "proselint (>=0.13)", "sphinx (>=6.1.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=22.12)"]
+test = ["covdefaults (>=2.2.2)", "coverage (>=7.1)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23)", "pytest (>=7.2.1)", "pytest-env (>=0.8.1)", "pytest-freezegun (>=0.4.2)", "pytest-mock (>=3.10)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)"]
+
+[[package]]
+name = "zipp"
+version = "3.5.0"
+description = "Backport of pathlib-compatible object wrapper for zip files"
+category = "dev"
+optional = false
+python-versions = ">=3.6"
+
+[package.extras]
+docs = ["jaraco.packaging (>=8.2)", "rst.linker (>=1.9)", "sphinx"]
+testing = ["func-timeout", "jaraco.itertools", "pytest (>=4.6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-flake8", "pytest-mypy"]
+
+[metadata]
+lock-version = "1.1"
+python-versions = ">=3.8.1,<4.0"
+content-hash = "85385294eb45c64e75c642cb94b89a78ae2950ee138960f847624bb3361efc4d"
+
+[metadata.files]
+alabaster = [
+    {file = "alabaster-0.7.12-py2.py3-none-any.whl", hash = "sha256:446438bdcca0e05bd45ea2de1668c1d9b032e1a9154c2c259092d77031ddd359"},
+    {file = "alabaster-0.7.12.tar.gz", hash = "sha256:a661d72d58e6ea8a57f7a86e37d86716863ee5e92788398526d58b26a4e4dc02"},
+]
+attrs = [
+    {file = "attrs-21.2.0-py2.py3-none-any.whl", hash = "sha256:149e90d6d8ac20db7a955ad60cf0e6881a3f20d37096140088356da6c716b0b1"},
+    {file = "attrs-21.2.0.tar.gz", hash = "sha256:ef6aaac3ca6cd92904cdd0d83f629a15f18053ec84e6432106f7a4d04ae4f5fb"},
+]
+babel = [
+    {file = "Babel-2.9.1-py2.py3-none-any.whl", hash = "sha256:ab49e12b91d937cd11f0b67cb259a57ab4ad2b59ac7a3b41d6c06c0ac5b0def9"},
+    {file = "Babel-2.9.1.tar.gz", hash = "sha256:bc0c176f9f6a994582230df350aa6e05ba2ebe4b3ac317eab29d9be5d2768da0"},
+]
+black = [
+    {file = "black-23.1.0-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:b6a92a41ee34b883b359998f0c8e6eb8e99803aa8bf3123bf2b2e6fec505a221"},
+    {file = "black-23.1.0-cp310-cp310-macosx_10_16_universal2.whl", hash = "sha256:57c18c5165c1dbe291d5306e53fb3988122890e57bd9b3dcb75f967f13411a26"},
+    {file = "black-23.1.0-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:9880d7d419bb7e709b37e28deb5e68a49227713b623c72b2b931028ea65f619b"},
+    {file = "black-23.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e6663f91b6feca5d06f2ccd49a10f254f9298cc1f7f49c46e498a0771b507104"},
+    {file = "black-23.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:9afd3f493666a0cd8f8df9a0200c6359ac53940cbde049dcb1a7eb6ee2dd7074"},
+    {file = "black-23.1.0-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:bfffba28dc52a58f04492181392ee380e95262af14ee01d4bc7bb1b1c6ca8d27"},
+    {file = "black-23.1.0-cp311-cp311-macosx_10_16_universal2.whl", hash = "sha256:c1c476bc7b7d021321e7d93dc2cbd78ce103b84d5a4cf97ed535fbc0d6660648"},
+    {file = "black-23.1.0-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:382998821f58e5c8238d3166c492139573325287820963d2f7de4d518bd76958"},
+    {file = "black-23.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bf649fda611c8550ca9d7592b69f0637218c2369b7744694c5e4902873b2f3a"},
+    {file = "black-23.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:121ca7f10b4a01fd99951234abdbd97728e1240be89fde18480ffac16503d481"},
+    {file = "black-23.1.0-cp37-cp37m-macosx_10_16_x86_64.whl", hash = "sha256:a8471939da5e824b891b25751955be52ee7f8a30a916d570a5ba8e0f2eb2ecad"},
+    {file = "black-23.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8178318cb74f98bc571eef19068f6ab5613b3e59d4f47771582f04e175570ed8"},
+    {file = "black-23.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:a436e7881d33acaf2536c46a454bb964a50eff59b21b51c6ccf5a40601fbef24"},
+    {file = "black-23.1.0-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:a59db0a2094d2259c554676403fa2fac3473ccf1354c1c63eccf7ae65aac8ab6"},
+    {file = "black-23.1.0-cp38-cp38-macosx_10_16_universal2.whl", hash = "sha256:0052dba51dec07ed029ed61b18183942043e00008ec65d5028814afaab9a22fd"},
+    {file = "black-23.1.0-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:49f7b39e30f326a34b5c9a4213213a6b221d7ae9d58ec70df1c4a307cf2a1580"},
+    {file = "black-23.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:162e37d49e93bd6eb6f1afc3e17a3d23a823042530c37c3c42eeeaf026f38468"},
+    {file = "black-23.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:8b70eb40a78dfac24842458476135f9b99ab952dd3f2dab738c1881a9b38b753"},
+    {file = "black-23.1.0-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:a29650759a6a0944e7cca036674655c2f0f63806ddecc45ed40b7b8aa314b651"},
+    {file = "black-23.1.0-cp39-cp39-macosx_10_16_universal2.whl", hash = "sha256:bb460c8561c8c1bec7824ecbc3ce085eb50005883a6203dcfb0122e95797ee06"},
+    {file = "black-23.1.0-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:c91dfc2c2a4e50df0026f88d2215e166616e0c80e86004d0003ece0488db2739"},
+    {file = "black-23.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2a951cc83ab535d248c89f300eccbd625e80ab880fbcfb5ac8afb5f01a258ac9"},
+    {file = "black-23.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:0680d4380db3719ebcfb2613f34e86c8e6d15ffeabcf8ec59355c5e7b85bb555"},
+    {file = "black-23.1.0-py3-none-any.whl", hash = "sha256:7a0f701d314cfa0896b9001df70a530eb2472babb76086344e688829efd97d32"},
+    {file = "black-23.1.0.tar.gz", hash = "sha256:b0bd97bea8903f5a2ba7219257a44e3f1f9d00073d6cc1add68f0beec69692ac"},
+]
+cachetools = [
+    {file = "cachetools-5.3.0-py3-none-any.whl", hash = "sha256:429e1a1e845c008ea6c85aa35d4b98b65d6a9763eeef3e37e92728a12d1de9d4"},
+    {file = "cachetools-5.3.0.tar.gz", hash = "sha256:13dfddc7b8df938c21a940dfa6557ce6e94a2f1cdfa58eb90c805721d58f2c14"},
+]
+certifi = [
+    {file = "certifi-2022.12.7-py3-none-any.whl", hash = "sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18"},
+    {file = "certifi-2022.12.7.tar.gz", hash = "sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3"},
+]
+chardet = [
+    {file = "chardet-5.1.0-py3-none-any.whl", hash = "sha256:362777fb014af596ad31334fde1e8c327dfdb076e1960d1694662d46a6917ab9"},
+    {file = "chardet-5.1.0.tar.gz", hash = "sha256:0d62712b956bc154f85fb0a266e2a3c5913c2967e00348701b32411d6def31e5"},
+]
+charset-normalizer = [
+    {file = "charset-normalizer-2.0.4.tar.gz", hash = "sha256:f23667ebe1084be45f6ae0538e4a5a865206544097e4e8bbcacf42cd02a348f3"},
+    {file = "charset_normalizer-2.0.4-py3-none-any.whl", hash = "sha256:0c8911edd15d19223366a194a513099a302055a962bca2cec0f54b8b63175d8b"},
+]
+click = [
+    {file = "click-8.0.1-py3-none-any.whl", hash = "sha256:fba402a4a47334742d782209a7c79bc448911afe1149d07bdabdf480b3e2f4b6"},
+    {file = "click-8.0.1.tar.gz", hash = "sha256:8c04c11192119b1ef78ea049e0a6f0463e4c48ef00a30160c704337586f3ad7a"},
+]
+colorama = [
+    {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"},
+    {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
+]
+commonmark = [
+    {file = "commonmark-0.9.1-py2.py3-none-any.whl", hash = "sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9"},
+    {file = "commonmark-0.9.1.tar.gz", hash = "sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60"},
+]
+coverage = [
+    {file = "coverage-5.5-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:b6d534e4b2ab35c9f93f46229363e17f63c53ad01330df9f2d6bd1187e5eaacf"},
+    {file = "coverage-5.5-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:b7895207b4c843c76a25ab8c1e866261bcfe27bfaa20c192de5190121770672b"},
+    {file = "coverage-5.5-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:c2723d347ab06e7ddad1a58b2a821218239249a9e4365eaff6649d31180c1669"},
+    {file = "coverage-5.5-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:900fbf7759501bc7807fd6638c947d7a831fc9fdf742dc10f02956ff7220fa90"},
+    {file = "coverage-5.5-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:004d1880bed2d97151facef49f08e255a20ceb6f9432df75f4eef018fdd5a78c"},
+    {file = "coverage-5.5-cp27-cp27m-win32.whl", hash = "sha256:06191eb60f8d8a5bc046f3799f8a07a2d7aefb9504b0209aff0b47298333302a"},
+    {file = "coverage-5.5-cp27-cp27m-win_amd64.whl", hash = "sha256:7501140f755b725495941b43347ba8a2777407fc7f250d4f5a7d2a1050ba8e82"},
+    {file = "coverage-5.5-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:372da284cfd642d8e08ef606917846fa2ee350f64994bebfbd3afb0040436905"},
+    {file = "coverage-5.5-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:8963a499849a1fc54b35b1c9f162f4108017b2e6db2c46c1bed93a72262ed083"},
+    {file = "coverage-5.5-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:869a64f53488f40fa5b5b9dcb9e9b2962a66a87dab37790f3fcfb5144b996ef5"},
+    {file = "coverage-5.5-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:4a7697d8cb0f27399b0e393c0b90f0f1e40c82023ea4d45d22bce7032a5d7b81"},
+    {file = "coverage-5.5-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:8d0a0725ad7c1a0bcd8d1b437e191107d457e2ec1084b9f190630a4fb1af78e6"},
+    {file = "coverage-5.5-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:51cb9476a3987c8967ebab3f0fe144819781fca264f57f89760037a2ea191cb0"},
+    {file = "coverage-5.5-cp310-cp310-win_amd64.whl", hash = "sha256:c0891a6a97b09c1f3e073a890514d5012eb256845c451bd48f7968ef939bf4ae"},
+    {file = "coverage-5.5-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:3487286bc29a5aa4b93a072e9592f22254291ce96a9fbc5251f566b6b7343cdb"},
+    {file = "coverage-5.5-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:deee1077aae10d8fa88cb02c845cfba9b62c55e1183f52f6ae6a2df6a2187160"},
+    {file = "coverage-5.5-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:f11642dddbb0253cc8853254301b51390ba0081750a8ac03f20ea8103f0c56b6"},
+    {file = "coverage-5.5-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:6c90e11318f0d3c436a42409f2749ee1a115cd8b067d7f14c148f1ce5574d701"},
+    {file = "coverage-5.5-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:30c77c1dc9f253283e34c27935fded5015f7d1abe83bc7821680ac444eaf7793"},
+    {file = "coverage-5.5-cp35-cp35m-win32.whl", hash = "sha256:9a1ef3b66e38ef8618ce5fdc7bea3d9f45f3624e2a66295eea5e57966c85909e"},
+    {file = "coverage-5.5-cp35-cp35m-win_amd64.whl", hash = "sha256:972c85d205b51e30e59525694670de6a8a89691186012535f9d7dbaa230e42c3"},
+    {file = "coverage-5.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:af0e781009aaf59e25c5a678122391cb0f345ac0ec272c7961dc5455e1c40066"},
+    {file = "coverage-5.5-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:74d881fc777ebb11c63736622b60cb9e4aee5cace591ce274fb69e582a12a61a"},
+    {file = "coverage-5.5-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:92b017ce34b68a7d67bd6d117e6d443a9bf63a2ecf8567bb3d8c6c7bc5014465"},
+    {file = "coverage-5.5-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:d636598c8305e1f90b439dbf4f66437de4a5e3c31fdf47ad29542478c8508bbb"},
+    {file = "coverage-5.5-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:41179b8a845742d1eb60449bdb2992196e211341818565abded11cfa90efb821"},
+    {file = "coverage-5.5-cp36-cp36m-win32.whl", hash = "sha256:040af6c32813fa3eae5305d53f18875bedd079960822ef8ec067a66dd8afcd45"},
+    {file = "coverage-5.5-cp36-cp36m-win_amd64.whl", hash = "sha256:5fec2d43a2cc6965edc0bb9e83e1e4b557f76f843a77a2496cbe719583ce8184"},
+    {file = "coverage-5.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:18ba8bbede96a2c3dde7b868de9dcbd55670690af0988713f0603f037848418a"},
+    {file = "coverage-5.5-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:2910f4d36a6a9b4214bb7038d537f015346f413a975d57ca6b43bf23d6563b53"},
+    {file = "coverage-5.5-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:f0b278ce10936db1a37e6954e15a3730bea96a0997c26d7fee88e6c396c2086d"},
+    {file = "coverage-5.5-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:796c9c3c79747146ebd278dbe1e5c5c05dd6b10cc3bcb8389dfdf844f3ead638"},
+    {file = "coverage-5.5-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:53194af30d5bad77fcba80e23a1441c71abfb3e01192034f8246e0d8f99528f3"},
+    {file = "coverage-5.5-cp37-cp37m-win32.whl", hash = "sha256:184a47bbe0aa6400ed2d41d8e9ed868b8205046518c52464fde713ea06e3a74a"},
+    {file = "coverage-5.5-cp37-cp37m-win_amd64.whl", hash = "sha256:2949cad1c5208b8298d5686d5a85b66aae46d73eec2c3e08c817dd3513e5848a"},
+    {file = "coverage-5.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:217658ec7187497e3f3ebd901afdca1af062b42cfe3e0dafea4cced3983739f6"},
+    {file = "coverage-5.5-cp38-cp38-manylinux1_i686.whl", hash = "sha256:1aa846f56c3d49205c952d8318e76ccc2ae23303351d9270ab220004c580cfe2"},
+    {file = "coverage-5.5-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:24d4a7de75446be83244eabbff746d66b9240ae020ced65d060815fac3423759"},
+    {file = "coverage-5.5-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:d1f8bf7b90ba55699b3a5e44930e93ff0189aa27186e96071fac7dd0d06a1873"},
+    {file = "coverage-5.5-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:970284a88b99673ccb2e4e334cfb38a10aab7cd44f7457564d11898a74b62d0a"},
+    {file = "coverage-5.5-cp38-cp38-win32.whl", hash = "sha256:01d84219b5cdbfc8122223b39a954820929497a1cb1422824bb86b07b74594b6"},
+    {file = "coverage-5.5-cp38-cp38-win_amd64.whl", hash = "sha256:2e0d881ad471768bf6e6c2bf905d183543f10098e3b3640fc029509530091502"},
+    {file = "coverage-5.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d1f9ce122f83b2305592c11d64f181b87153fc2c2bbd3bb4a3dde8303cfb1a6b"},
+    {file = "coverage-5.5-cp39-cp39-manylinux1_i686.whl", hash = "sha256:13c4ee887eca0f4c5a247b75398d4114c37882658300e153113dafb1d76de529"},
+    {file = "coverage-5.5-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:52596d3d0e8bdf3af43db3e9ba8dcdaac724ba7b5ca3f6358529d56f7a166f8b"},
+    {file = "coverage-5.5-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:2cafbbb3af0733db200c9b5f798d18953b1a304d3f86a938367de1567f4b5bff"},
+    {file = "coverage-5.5-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:44d654437b8ddd9eee7d1eaee28b7219bec228520ff809af170488fd2fed3e2b"},
+    {file = "coverage-5.5-cp39-cp39-win32.whl", hash = "sha256:d314ed732c25d29775e84a960c3c60808b682c08d86602ec2c3008e1202e3bb6"},
+    {file = "coverage-5.5-cp39-cp39-win_amd64.whl", hash = "sha256:13034c4409db851670bc9acd836243aeee299949bd5673e11844befcb0149f03"},
+    {file = "coverage-5.5-pp36-none-any.whl", hash = "sha256:f030f8873312a16414c0d8e1a1ddff2d3235655a2174e3648b4fa66b3f2f1079"},
+    {file = "coverage-5.5-pp37-none-any.whl", hash = "sha256:2a3859cb82dcbda1cfd3e6f71c27081d18aa251d20a17d87d26d4cd216fb0af4"},
+    {file = "coverage-5.5.tar.gz", hash = "sha256:ebe78fe9a0e874362175b02371bdfbee64d8edc42a044253ddf4ee7d3c15212c"},
+]
+coveralls = [
+    {file = "coveralls-3.3.1-py2.py3-none-any.whl", hash = "sha256:f42015f31d386b351d4226389b387ae173207058832fbf5c8ec4b40e27b16026"},
+    {file = "coveralls-3.3.1.tar.gz", hash = "sha256:b32a8bb5d2df585207c119d6c01567b81fba690c9c10a753bfe27a335bfc43ea"},
+]
+distlib = [
+    {file = "distlib-0.3.6-py2.py3-none-any.whl", hash = "sha256:f35c4b692542ca110de7ef0bea44d73981caeb34ca0b9b6b2e6d7790dda8f80e"},
+    {file = "distlib-0.3.6.tar.gz", hash = "sha256:14bad2d9b04d3a36127ac97f30b12a19268f211063d8f8ee4f47108896e11b46"},
+]
+docopt = [
+    {file = "docopt-0.6.2.tar.gz", hash = "sha256:49b3a825280bd66b3aa83585ef59c4a8c82f2c8a522dbe754a8bc8d08c85c491"},
+]
+docutils = [
+    {file = "docutils-0.18.1-py2.py3-none-any.whl", hash = "sha256:23010f129180089fbcd3bc08cfefccb3b890b0050e1ca00c867036e9d161b98c"},
+    {file = "docutils-0.18.1.tar.gz", hash = "sha256:679987caf361a7539d76e584cbeddc311e3aee937877c87346f31debc63e9d06"},
+]
+exceptiongroup = [
+    {file = "exceptiongroup-1.1.0-py3-none-any.whl", hash = "sha256:327cbda3da756e2de031a3107b81ab7b3770a602c4d16ca618298c526f4bec1e"},
+    {file = "exceptiongroup-1.1.0.tar.gz", hash = "sha256:bcb67d800a4497e1b404c2dd44fca47d3b7a5e5433dbab67f96c1a685cdfdf23"},
+]
+filelock = [
+    {file = "filelock-3.9.0-py3-none-any.whl", hash = "sha256:f58d535af89bb9ad5cd4df046f741f8553a418c01a7856bf0d173bbc9f6bd16d"},
+    {file = "filelock-3.9.0.tar.gz", hash = "sha256:7b319f24340b51f55a2bf7a12ac0755a9b03e718311dac567a0f4f7fabd2f5de"},
+]
+flake8 = [
+    {file = "flake8-6.0.0-py2.py3-none-any.whl", hash = "sha256:3833794e27ff64ea4e9cf5d410082a8b97ff1a06c16aa3d2027339cd0f1195c7"},
+    {file = "flake8-6.0.0.tar.gz", hash = "sha256:c61007e76655af75e6785a931f452915b371dc48f56efd765247c8fe68f2b181"},
+]
+idna = [
+    {file = "idna-3.2-py3-none-any.whl", hash = "sha256:14475042e284991034cb48e06f6851428fb14c4dc953acd9be9a5e95c7b6dd7a"},
+    {file = "idna-3.2.tar.gz", hash = "sha256:467fbad99067910785144ce333826c71fb0e63a425657295239737f7ecd125f3"},
+]
+imagesize = [
+    {file = "imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b"},
+    {file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"},
+]
+importlib-metadata = [
+    {file = "importlib_metadata-4.8.1-py3-none-any.whl", hash = "sha256:b618b6d2d5ffa2f16add5697cf57a46c76a56229b0ed1c438322e4e95645bd15"},
+    {file = "importlib_metadata-4.8.1.tar.gz", hash = "sha256:f284b3e11256ad1e5d03ab86bb2ccd6f5339688ff17a4d797a0fe7df326f23b1"},
+]
+iniconfig = [
+    {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"},
+    {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"},
+]
+jinja2 = [
+    {file = "Jinja2-3.0.1-py3-none-any.whl", hash = "sha256:1f06f2da51e7b56b8f238affdd6b4e2c61e39598a378cc49345bc1bd42a978a4"},
+    {file = "Jinja2-3.0.1.tar.gz", hash = "sha256:703f484b47a6af502e743c9122595cc812b0271f661722403114f71a79d0f5a4"},
+]
+markupsafe = [
+    {file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d8446c54dc28c01e5a2dbac5a25f071f6653e6e40f3a8818e8b45d790fe6ef53"},
+    {file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:36bc903cbb393720fad60fc28c10de6acf10dc6cc883f3e24ee4012371399a38"},
+    {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d7d807855b419fc2ed3e631034685db6079889a1f01d5d9dac950f764da3dad"},
+    {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:add36cb2dbb8b736611303cd3bfcee00afd96471b09cda130da3581cbdc56a6d"},
+    {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:168cd0a3642de83558a5153c8bd34f175a9a6e7f6dc6384b9655d2697312a646"},
+    {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4dc8f9fb58f7364b63fd9f85013b780ef83c11857ae79f2feda41e270468dd9b"},
+    {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:20dca64a3ef2d6e4d5d615a3fd418ad3bde77a47ec8a23d984a12b5b4c74491a"},
+    {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:cdfba22ea2f0029c9261a4bd07e830a8da012291fbe44dc794e488b6c9bb353a"},
+    {file = "MarkupSafe-2.0.1-cp310-cp310-win32.whl", hash = "sha256:99df47edb6bda1249d3e80fdabb1dab8c08ef3975f69aed437cb69d0a5de1e28"},
+    {file = "MarkupSafe-2.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:e0f138900af21926a02425cf736db95be9f4af72ba1bb21453432a07f6082134"},
+    {file = "MarkupSafe-2.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f9081981fe268bd86831e5c75f7de206ef275defcb82bc70740ae6dc507aee51"},
+    {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:0955295dd5eec6cb6cc2fe1698f4c6d84af2e92de33fbcac4111913cd100a6ff"},
+    {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:0446679737af14f45767963a1a9ef7620189912317d095f2d9ffa183a4d25d2b"},
+    {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:f826e31d18b516f653fe296d967d700fddad5901ae07c622bb3705955e1faa94"},
+    {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:fa130dd50c57d53368c9d59395cb5526eda596d3ffe36666cd81a44d56e48872"},
+    {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:905fec760bd2fa1388bb5b489ee8ee5f7291d692638ea5f67982d968366bef9f"},
+    {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf5d821ffabf0ef3533c39c518f3357b171a1651c1ff6827325e4489b0e46c3c"},
+    {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0d4b31cc67ab36e3392bbf3862cfbadac3db12bdd8b02a2731f509ed5b829724"},
+    {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:baa1a4e8f868845af802979fcdbf0bb11f94f1cb7ced4c4b8a351bb60d108145"},
+    {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:deb993cacb280823246a026e3b2d81c493c53de6acfd5e6bfe31ab3402bb37dd"},
+    {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:63f3268ba69ace99cab4e3e3b5840b03340efed0948ab8f78d2fd87ee5442a4f"},
+    {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:8d206346619592c6200148b01a2142798c989edcb9c896f9ac9722a99d4e77e6"},
+    {file = "MarkupSafe-2.0.1-cp36-cp36m-win32.whl", hash = "sha256:6c4ca60fa24e85fe25b912b01e62cb969d69a23a5d5867682dd3e80b5b02581d"},
+    {file = "MarkupSafe-2.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b2f4bf27480f5e5e8ce285a8c8fd176c0b03e93dcc6646477d4630e83440c6a9"},
+    {file = "MarkupSafe-2.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0717a7390a68be14b8c793ba258e075c6f4ca819f15edfc2a3a027c823718567"},
+    {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:6557b31b5e2c9ddf0de32a691f2312a32f77cd7681d8af66c2692efdbef84c18"},
+    {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:49e3ceeabbfb9d66c3aef5af3a60cc43b85c33df25ce03d0031a608b0a8b2e3f"},
+    {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:d7f9850398e85aba693bb640262d3611788b1f29a79f0c93c565694658f4071f"},
+    {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:6a7fae0dd14cf60ad5ff42baa2e95727c3d81ded453457771d02b7d2b3f9c0c2"},
+    {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:b7f2d075102dc8c794cbde1947378051c4e5180d52d276987b8d28a3bd58c17d"},
+    {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e9936f0b261d4df76ad22f8fee3ae83b60d7c3e871292cd42f40b81b70afae85"},
+    {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:2a7d351cbd8cfeb19ca00de495e224dea7e7d919659c2841bbb7f420ad03e2d6"},
+    {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:60bf42e36abfaf9aff1f50f52644b336d4f0a3fd6d8a60ca0d054ac9f713a864"},
+    {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d6c7ebd4e944c85e2c3421e612a7057a2f48d478d79e61800d81468a8d842207"},
+    {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f0567c4dc99f264f49fe27da5f735f414c4e7e7dd850cfd8e69f0862d7c74ea9"},
+    {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:89c687013cb1cd489a0f0ac24febe8c7a666e6e221b783e53ac50ebf68e45d86"},
+    {file = "MarkupSafe-2.0.1-cp37-cp37m-win32.whl", hash = "sha256:a30e67a65b53ea0a5e62fe23682cfe22712e01f453b95233b25502f7c61cb415"},
+    {file = "MarkupSafe-2.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:611d1ad9a4288cf3e3c16014564df047fe08410e628f89805e475368bd304914"},
+    {file = "MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5bb28c636d87e840583ee3adeb78172efc47c8b26127267f54a9c0ec251d41a9"},
+    {file = "MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:be98f628055368795d818ebf93da628541e10b75b41c559fdf36d104c5787066"},
+    {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:1d609f577dc6e1aa17d746f8bd3c31aa4d258f4070d61b2aa5c4166c1539de35"},
+    {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7d91275b0245b1da4d4cfa07e0faedd5b0812efc15b702576d103293e252af1b"},
+    {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:01a9b8ea66f1658938f65b93a85ebe8bc016e6769611be228d797c9d998dd298"},
+    {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:47ab1e7b91c098ab893b828deafa1203de86d0bc6ab587b160f78fe6c4011f75"},
+    {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:97383d78eb34da7e1fa37dd273c20ad4320929af65d156e35a5e2d89566d9dfb"},
+    {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6fcf051089389abe060c9cd7caa212c707e58153afa2c649f00346ce6d260f1b"},
+    {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5855f8438a7d1d458206a2466bf82b0f104a3724bf96a1c781ab731e4201731a"},
+    {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:3dd007d54ee88b46be476e293f48c85048603f5f516008bee124ddd891398ed6"},
+    {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:aca6377c0cb8a8253e493c6b451565ac77e98c2951c45f913e0b52facdcff83f"},
+    {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:04635854b943835a6ea959e948d19dcd311762c5c0c6e1f0e16ee57022669194"},
+    {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6300b8454aa6930a24b9618fbb54b5a68135092bc666f7b06901f897fa5c2fee"},
+    {file = "MarkupSafe-2.0.1-cp38-cp38-win32.whl", hash = "sha256:023cb26ec21ece8dc3907c0e8320058b2e0cb3c55cf9564da612bc325bed5e64"},
+    {file = "MarkupSafe-2.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:984d76483eb32f1bcb536dc27e4ad56bba4baa70be32fa87152832cdd9db0833"},
+    {file = "MarkupSafe-2.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2ef54abee730b502252bcdf31b10dacb0a416229b72c18b19e24a4509f273d26"},
+    {file = "MarkupSafe-2.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3c112550557578c26af18a1ccc9e090bfe03832ae994343cfdacd287db6a6ae7"},
+    {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux1_i686.whl", hash = "sha256:53edb4da6925ad13c07b6d26c2a852bd81e364f95301c66e930ab2aef5b5ddd8"},
+    {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:f5653a225f31e113b152e56f154ccbe59eeb1c7487b39b9d9f9cdb58e6c79dc5"},
+    {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:4efca8f86c54b22348a5467704e3fec767b2db12fc39c6d963168ab1d3fc9135"},
+    {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:ab3ef638ace319fa26553db0624c4699e31a28bb2a835c5faca8f8acf6a5a902"},
+    {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:f8ba0e8349a38d3001fae7eadded3f6606f0da5d748ee53cc1dab1d6527b9509"},
+    {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c47adbc92fc1bb2b3274c4b3a43ae0e4573d9fbff4f54cd484555edbf030baf1"},
+    {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:37205cac2a79194e3750b0af2a5720d95f786a55ce7df90c3af697bfa100eaac"},
+    {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1f2ade76b9903f39aa442b4aadd2177decb66525062db244b35d71d0ee8599b6"},
+    {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4296f2b1ce8c86a6aea78613c34bb1a672ea0e3de9c6ba08a960efe0b0a09047"},
+    {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f02365d4e99430a12647f09b6cc8bab61a6564363f313126f775eb4f6ef798e"},
+    {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5b6d930f030f8ed98e3e6c98ffa0652bdb82601e7a016ec2ab5d7ff23baa78d1"},
+    {file = "MarkupSafe-2.0.1-cp39-cp39-win32.whl", hash = "sha256:10f82115e21dc0dfec9ab5c0223652f7197feb168c940f3ef61563fc2d6beb74"},
+    {file = "MarkupSafe-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:693ce3f9e70a6cf7d2fb9e6c9d8b204b6b39897a2c4a1aa65728d5ac97dcc1d8"},
+    {file = "MarkupSafe-2.0.1.tar.gz", hash = "sha256:594c67807fb16238b30c44bdf74f36c02cdf22d1c8cda91ef8a0ed8dabf5620a"},
+]
+mccabe = [
+    {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"},
+    {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"},
+]
+mypy = [
+    {file = "mypy-1.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e0626db16705ab9f7fa6c249c017c887baf20738ce7f9129da162bb3075fc1af"},
+    {file = "mypy-1.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1ace23f6bb4aec4604b86c4843276e8fa548d667dbbd0cb83a3ae14b18b2db6c"},
+    {file = "mypy-1.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87edfaf344c9401942883fad030909116aa77b0fa7e6e8e1c5407e14549afe9a"},
+    {file = "mypy-1.0.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0ab090d9240d6b4e99e1fa998c2d0aa5b29fc0fb06bd30e7ad6183c95fa07593"},
+    {file = "mypy-1.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:7cc2c01dfc5a3cbddfa6c13f530ef3b95292f926329929001d45e124342cd6b7"},
+    {file = "mypy-1.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:14d776869a3e6c89c17eb943100f7868f677703c8a4e00b3803918f86aafbc52"},
+    {file = "mypy-1.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bb2782a036d9eb6b5a6efcdda0986774bf798beef86a62da86cb73e2a10b423d"},
+    {file = "mypy-1.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5cfca124f0ac6707747544c127880893ad72a656e136adc935c8600740b21ff5"},
+    {file = "mypy-1.0.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8845125d0b7c57838a10fd8925b0f5f709d0e08568ce587cc862aacce453e3dd"},
+    {file = "mypy-1.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:01b1b9e1ed40544ef486fa8ac022232ccc57109f379611633ede8e71630d07d2"},
+    {file = "mypy-1.0.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c7cf862aef988b5fbaa17764ad1d21b4831436701c7d2b653156a9497d92c83c"},
+    {file = "mypy-1.0.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5cd187d92b6939617f1168a4fe68f68add749902c010e66fe574c165c742ed88"},
+    {file = "mypy-1.0.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:4e5175026618c178dfba6188228b845b64131034ab3ba52acaffa8f6c361f805"},
+    {file = "mypy-1.0.0-cp37-cp37m-win_amd64.whl", hash = "sha256:2f6ac8c87e046dc18c7d1d7f6653a66787a4555085b056fe2d599f1f1a2a2d21"},
+    {file = "mypy-1.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7306edca1c6f1b5fa0bc9aa645e6ac8393014fa82d0fa180d0ebc990ebe15964"},
+    {file = "mypy-1.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3cfad08f16a9c6611e6143485a93de0e1e13f48cfb90bcad7d5fde1c0cec3d36"},
+    {file = "mypy-1.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67cced7f15654710386e5c10b96608f1ee3d5c94ca1da5a2aad5889793a824c1"},
+    {file = "mypy-1.0.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:a86b794e8a56ada65c573183756eac8ac5b8d3d59daf9d5ebd72ecdbb7867a43"},
+    {file = "mypy-1.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:50979d5efff8d4135d9db293c6cb2c42260e70fb010cbc697b1311a4d7a39ddb"},
+    {file = "mypy-1.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3ae4c7a99e5153496243146a3baf33b9beff714464ca386b5f62daad601d87af"},
+    {file = "mypy-1.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5e398652d005a198a7f3c132426b33c6b85d98aa7dc852137a2a3be8890c4072"},
+    {file = "mypy-1.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be78077064d016bc1b639c2cbcc5be945b47b4261a4f4b7d8923f6c69c5c9457"},
+    {file = "mypy-1.0.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:92024447a339400ea00ac228369cd242e988dd775640755fa4ac0c126e49bb74"},
+    {file = "mypy-1.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:fe523fcbd52c05040c7bee370d66fee8373c5972171e4fbc323153433198592d"},
+    {file = "mypy-1.0.0-py3-none-any.whl", hash = "sha256:2efa963bdddb27cb4a0d42545cd137a8d2b883bd181bbc4525b568ef6eca258f"},
+    {file = "mypy-1.0.0.tar.gz", hash = "sha256:f34495079c8d9da05b183f9f7daec2878280c2ad7cc81da686ef0b484cea2ecf"},
+]
+mypy-extensions = [
+    {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"},
+    {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"},
+]
+packaging = [
+    {file = "packaging-23.0-py3-none-any.whl", hash = "sha256:714ac14496c3e68c99c29b00845f7a2b85f3bb6f1078fd9f72fd20f0570002b2"},
+    {file = "packaging-23.0.tar.gz", hash = "sha256:b6ad297f8907de0fa2fe1ccbd26fdaf387f5f47c7275fedf8cce89f99446cf97"},
+]
+pathspec = [
+    {file = "pathspec-0.9.0-py2.py3-none-any.whl", hash = "sha256:7d15c4ddb0b5c802d161efc417ec1a2558ea2653c2e8ad9c19098201dc1c993a"},
+    {file = "pathspec-0.9.0.tar.gz", hash = "sha256:e564499435a2673d586f6b2130bb5b95f04a3ba06f81b8f895b651a3c76aabb1"},
+]
+platformdirs = [
+    {file = "platformdirs-3.0.0-py3-none-any.whl", hash = "sha256:b1d5eb14f221506f50d6604a561f4c5786d9e80355219694a1b244bcd96f4567"},
+    {file = "platformdirs-3.0.0.tar.gz", hash = "sha256:8a1228abb1ef82d788f74139988b137e78692984ec7b08eaa6c65f1723af28f9"},
+]
+pluggy = [
+    {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"},
+    {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"},
+]
+pycodestyle = [
+    {file = "pycodestyle-2.10.0-py2.py3-none-any.whl", hash = "sha256:8a4eaf0d0495c7395bdab3589ac2db602797d76207242c17d470186815706610"},
+    {file = "pycodestyle-2.10.0.tar.gz", hash = "sha256:347187bdb476329d98f695c213d7295a846d1152ff4fe9bacb8a9590b8ee7053"},
+]
+pydantic = [
+    {file = "pydantic-1.9.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9c9e04a6cdb7a363d7cb3ccf0efea51e0abb48e180c0d31dca8d247967d85c6e"},
+    {file = "pydantic-1.9.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fafe841be1103f340a24977f61dee76172e4ae5f647ab9e7fd1e1fca51524f08"},
+    {file = "pydantic-1.9.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afacf6d2a41ed91fc631bade88b1d319c51ab5418870802cedb590b709c5ae3c"},
+    {file = "pydantic-1.9.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ee0d69b2a5b341fc7927e92cae7ddcfd95e624dfc4870b32a85568bd65e6131"},
+    {file = "pydantic-1.9.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ff68fc85355532ea77559ede81f35fff79a6a5543477e168ab3a381887caea76"},
+    {file = "pydantic-1.9.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c0f5e142ef8217019e3eef6ae1b6b55f09a7a15972958d44fbd228214cede567"},
+    {file = "pydantic-1.9.2-cp310-cp310-win_amd64.whl", hash = "sha256:615661bfc37e82ac677543704437ff737418e4ea04bef9cf11c6d27346606044"},
+    {file = "pydantic-1.9.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:328558c9f2eed77bd8fffad3cef39dbbe3edc7044517f4625a769d45d4cf7555"},
+    {file = "pydantic-1.9.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bd446bdb7755c3a94e56d7bdfd3ee92396070efa8ef3a34fab9579fe6aa1d84"},
+    {file = "pydantic-1.9.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e0b214e57623a535936005797567231a12d0da0c29711eb3514bc2b3cd008d0f"},
+    {file = "pydantic-1.9.2-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:d8ce3fb0841763a89322ea0432f1f59a2d3feae07a63ea2c958b2315e1ae8adb"},
+    {file = "pydantic-1.9.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:b34ba24f3e2d0b39b43f0ca62008f7ba962cff51efa56e64ee25c4af6eed987b"},
+    {file = "pydantic-1.9.2-cp36-cp36m-win_amd64.whl", hash = "sha256:84d76ecc908d917f4684b354a39fd885d69dd0491be175f3465fe4b59811c001"},
+    {file = "pydantic-1.9.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4de71c718c9756d679420c69f216776c2e977459f77e8f679a4a961dc7304a56"},
+    {file = "pydantic-1.9.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5803ad846cdd1ed0d97eb00292b870c29c1f03732a010e66908ff48a762f20e4"},
+    {file = "pydantic-1.9.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a8c5360a0297a713b4123608a7909e6869e1b56d0e96eb0d792c27585d40757f"},
+    {file = "pydantic-1.9.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:cdb4272678db803ddf94caa4f94f8672e9a46bae4a44f167095e4d06fec12979"},
+    {file = "pydantic-1.9.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:19b5686387ea0d1ea52ecc4cffb71abb21702c5e5b2ac626fd4dbaa0834aa49d"},
+    {file = "pydantic-1.9.2-cp37-cp37m-win_amd64.whl", hash = "sha256:32e0b4fb13ad4db4058a7c3c80e2569adbd810c25e6ca3bbd8b2a9cc2cc871d7"},
+    {file = "pydantic-1.9.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:91089b2e281713f3893cd01d8e576771cd5bfdfbff5d0ed95969f47ef6d676c3"},
+    {file = "pydantic-1.9.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e631c70c9280e3129f071635b81207cad85e6c08e253539467e4ead0e5b219aa"},
+    {file = "pydantic-1.9.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b3946f87e5cef3ba2e7bd3a4eb5a20385fe36521d6cc1ebf3c08a6697c6cfb3"},
+    {file = "pydantic-1.9.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5565a49effe38d51882cb7bac18bda013cdb34d80ac336428e8908f0b72499b0"},
+    {file = "pydantic-1.9.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:bd67cb2c2d9602ad159389c29e4ca964b86fa2f35c2faef54c3eb28b4efd36c8"},
+    {file = "pydantic-1.9.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4aafd4e55e8ad5bd1b19572ea2df546ccace7945853832bb99422a79c70ce9b8"},
+    {file = "pydantic-1.9.2-cp38-cp38-win_amd64.whl", hash = "sha256:d70916235d478404a3fa8c997b003b5f33aeac4686ac1baa767234a0f8ac2326"},
+    {file = "pydantic-1.9.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f0ca86b525264daa5f6b192f216a0d1e860b7383e3da1c65a1908f9c02f42801"},
+    {file = "pydantic-1.9.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1061c6ee6204f4f5a27133126854948e3b3d51fcc16ead2e5d04378c199b2f44"},
+    {file = "pydantic-1.9.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e78578f0c7481c850d1c969aca9a65405887003484d24f6110458fb02cca7747"},
+    {file = "pydantic-1.9.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5da164119602212a3fe7e3bc08911a89db4710ae51444b4224c2382fd09ad453"},
+    {file = "pydantic-1.9.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7ead3cd020d526f75b4188e0a8d71c0dbbe1b4b6b5dc0ea775a93aca16256aeb"},
+    {file = "pydantic-1.9.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7d0f183b305629765910eaad707800d2f47c6ac5bcfb8c6397abdc30b69eeb15"},
+    {file = "pydantic-1.9.2-cp39-cp39-win_amd64.whl", hash = "sha256:f1a68f4f65a9ee64b6ccccb5bf7e17db07caebd2730109cb8a95863cfa9c4e55"},
+    {file = "pydantic-1.9.2-py3-none-any.whl", hash = "sha256:78a4d6bdfd116a559aeec9a4cfe77dda62acc6233f8b56a716edad2651023e5e"},
+    {file = "pydantic-1.9.2.tar.gz", hash = "sha256:8cb0bc509bfb71305d7a59d00163d5f9fc4530f0881ea32c74ff4f74c85f3d3d"},
+]
+pyflakes = [
+    {file = "pyflakes-3.0.1-py2.py3-none-any.whl", hash = "sha256:ec55bf7fe21fff7f1ad2f7da62363d749e2a470500eab1b555334b67aa1ef8cf"},
+    {file = "pyflakes-3.0.1.tar.gz", hash = "sha256:ec8b276a6b60bd80defed25add7e439881c19e64850afd9b346283d4165fd0fd"},
+]
+pygments = [
+    {file = "Pygments-2.14.0-py3-none-any.whl", hash = "sha256:fa7bd7bd2771287c0de303af8bfdfc731f51bd2c6a47ab69d117138893b82717"},
+    {file = "Pygments-2.14.0.tar.gz", hash = "sha256:b3ed06a9e8ac9a9aae5a6f5dbe78a8a58655d17b43b93c078f094ddc476ae297"},
+]
+pyproject-api = [
+    {file = "pyproject_api-1.5.0-py3-none-any.whl", hash = "sha256:4c111277dfb96bcd562c6245428f27250b794bfe3e210b8714c4f893952f2c17"},
+    {file = "pyproject_api-1.5.0.tar.gz", hash = "sha256:0962df21f3e633b8ddb9567c011e6c1b3dcdfc31b7860c0ede7e24c5a1200fbe"},
+]
+pytest = [
+    {file = "pytest-7.2.1-py3-none-any.whl", hash = "sha256:c7c6ca206e93355074ae32f7403e8ea12163b1163c976fee7d4d84027c162be5"},
+    {file = "pytest-7.2.1.tar.gz", hash = "sha256:d45e0952f3727241918b8fd0f376f5ff6b301cc0777c6f9a556935c92d8a7d42"},
+]
+pytz = [
+    {file = "pytz-2021.1-py2.py3-none-any.whl", hash = "sha256:eb10ce3e7736052ed3623d49975ce333bcd712c7bb19a58b9e2089d4057d0798"},
+    {file = "pytz-2021.1.tar.gz", hash = "sha256:83a4a90894bf38e243cf052c8b58f381bfe9a7a483f6a9cab140bc7f702ac4da"},
+]
+requests = [
+    {file = "requests-2.26.0-py2.py3-none-any.whl", hash = "sha256:6c1246513ecd5ecd4528a0906f910e8f0f9c6b8ec72030dc9fd154dc1a6efd24"},
+    {file = "requests-2.26.0.tar.gz", hash = "sha256:b8aa58f8cf793ffd8782d3d8cb19e66ef36f7aba4353eec859e74678b01b07a7"},
+]
+rich = [
+    {file = "rich-12.0.1-py3-none-any.whl", hash = "sha256:ce5c714e984a2d185399e4e1dd1f8b2feacb7cecfc576f1522425643a36a57ea"},
+    {file = "rich-12.0.1.tar.gz", hash = "sha256:3fba9dd15ebe048e2795a02ac19baee79dc12cc50b074ef70f2958cd651b59a9"},
+]
+rstcheck = [
+    {file = "rstcheck-6.1.1-py3-none-any.whl", hash = "sha256:edeff9ad0644d12bd250100b677887424193789254c90d95c13375062ee2cbac"},
+    {file = "rstcheck-6.1.1.tar.gz", hash = "sha256:8e43485a644e794b8127f8c4868ef62c14ec7919bdda6cb16642157055d32e47"},
+]
+rstcheck-core = [
+    {file = "rstcheck_core-1.0.3-py3-none-any.whl", hash = "sha256:d75d7df8f15b58e8aafe322d6fb6ef1ac8d12bb563089b0696948a00ee7f601a"},
+    {file = "rstcheck_core-1.0.3.tar.gz", hash = "sha256:add19c9a1b97d9087f4b463b49c12cd8a9c03689a255e99089c70a2692f16369"},
+]
+setuptools = [
+    {file = "setuptools-67.2.0-py3-none-any.whl", hash = "sha256:16ccf598aab3b506593c17378473978908a2734d7336755a8769b480906bec1c"},
+    {file = "setuptools-67.2.0.tar.gz", hash = "sha256:b440ee5f7e607bb8c9de15259dba2583dd41a38879a7abc1d43a71c59524da48"},
+]
+shellingham = [
+    {file = "shellingham-1.5.0.post1-py2.py3-none-any.whl", hash = "sha256:368bf8c00754fd4f55afb7bbb86e272df77e4dc76ac29dbcbb81a59e9fc15744"},
+    {file = "shellingham-1.5.0.post1.tar.gz", hash = "sha256:823bc5fb5c34d60f285b624e7264f4dda254bc803a3774a147bf99c0e3004a28"},
+]
+snowballstemmer = [
+    {file = "snowballstemmer-2.1.0-py2.py3-none-any.whl", hash = "sha256:b51b447bea85f9968c13b650126a888aabd4cb4463fca868ec596826325dedc2"},
+    {file = "snowballstemmer-2.1.0.tar.gz", hash = "sha256:e997baa4f2e9139951b6f4c631bad912dfd3c792467e2f03d7239464af90e914"},
+]
+sphinx = [
+    {file = "Sphinx-6.1.3.tar.gz", hash = "sha256:0dac3b698538ffef41716cf97ba26c1c7788dba73ce6f150c1ff5b4720786dd2"},
+    {file = "sphinx-6.1.3-py3-none-any.whl", hash = "sha256:807d1cb3d6be87eb78a381c3e70ebd8d346b9a25f3753e9947e866b2786865fc"},
+]
+sphinx-rtd-theme = [
+    {file = "sphinx_rtd_theme-1.2.0-py2.py3-none-any.whl", hash = "sha256:f823f7e71890abe0ac6aaa6013361ea2696fc8d3e1fa798f463e82bdb77eeff2"},
+    {file = "sphinx_rtd_theme-1.2.0.tar.gz", hash = "sha256:a0d8bd1a2ed52e0b338cbe19c4b2eef3c5e7a048769753dac6a9f059c7b641b8"},
+]
+sphinxcontrib-applehelp = [
+    {file = "sphinxcontrib-applehelp-1.0.2.tar.gz", hash = "sha256:a072735ec80e7675e3f432fcae8610ecf509c5f1869d17e2eecff44389cdbc58"},
+    {file = "sphinxcontrib_applehelp-1.0.2-py2.py3-none-any.whl", hash = "sha256:806111e5e962be97c29ec4c1e7fe277bfd19e9652fb1a4392105b43e01af885a"},
+]
+sphinxcontrib-devhelp = [
+    {file = "sphinxcontrib-devhelp-1.0.2.tar.gz", hash = "sha256:ff7f1afa7b9642e7060379360a67e9c41e8f3121f2ce9164266f61b9f4b338e4"},
+    {file = "sphinxcontrib_devhelp-1.0.2-py2.py3-none-any.whl", hash = "sha256:8165223f9a335cc1af7ffe1ed31d2871f325254c0423bc0c4c7cd1c1e4734a2e"},
+]
+sphinxcontrib-htmlhelp = [
+    {file = "sphinxcontrib-htmlhelp-2.0.0.tar.gz", hash = "sha256:f5f8bb2d0d629f398bf47d0d69c07bc13b65f75a81ad9e2f71a63d4b7a2f6db2"},
+    {file = "sphinxcontrib_htmlhelp-2.0.0-py2.py3-none-any.whl", hash = "sha256:d412243dfb797ae3ec2b59eca0e52dac12e75a241bf0e4eb861e450d06c6ed07"},
+]
+sphinxcontrib-jquery = [
+    {file = "sphinxcontrib-jquery-2.0.0.tar.gz", hash = "sha256:8fb65f6dba84bf7bcd1aea1f02ab3955ac34611d838bcc95d4983b805b234daa"},
+    {file = "sphinxcontrib_jquery-2.0.0-py3-none-any.whl", hash = "sha256:ed47fa425c338ffebe3c37e1cdb56e30eb806116b85f01055b158c7057fdb995"},
+]
+sphinxcontrib-jsmath = [
+    {file = "sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"},
+    {file = "sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178"},
+]
+sphinxcontrib-qthelp = [
+    {file = "sphinxcontrib-qthelp-1.0.3.tar.gz", hash = "sha256:4c33767ee058b70dba89a6fc5c1892c0d57a54be67ddd3e7875a18d14cba5a72"},
+    {file = "sphinxcontrib_qthelp-1.0.3-py2.py3-none-any.whl", hash = "sha256:bd9fc24bcb748a8d51fd4ecaade681350aa63009a347a8c14e637895444dfab6"},
+]
+sphinxcontrib-serializinghtml = [
+    {file = "sphinxcontrib-serializinghtml-1.1.5.tar.gz", hash = "sha256:aa5f6de5dfdf809ef505c4895e51ef5c9eac17d0f287933eb49ec495280b6952"},
+    {file = "sphinxcontrib_serializinghtml-1.1.5-py2.py3-none-any.whl", hash = "sha256:352a9a00ae864471d3a7ead8d7d79f5fc0b57e8b3f95e9867eb9eb28999b92fd"},
+]
+tomli = [
+    {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"},
+    {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"},
+]
+tox = [
+    {file = "tox-4.4.5-py3-none-any.whl", hash = "sha256:1081864f1a1393ffa11ebe9beaa280349020579310d217a594a4e7b6124c5425"},
+    {file = "tox-4.4.5.tar.gz", hash = "sha256:f9bc83c5da8666baa2a4d4e884bbbda124fe646e4b1c0e412949cecc2b6e8f90"},
+]
+typer = [
+    {file = "typer-0.7.0-py3-none-any.whl", hash = "sha256:b5e704f4e48ec263de1c0b3a2387cd405a13767d2f907f44c1a08cbad96f606d"},
+    {file = "typer-0.7.0.tar.gz", hash = "sha256:ff797846578a9f2a201b53442aedeb543319466870fbe1c701eab66dd7681165"},
+]
+types-docutils = [
+    {file = "types-docutils-0.19.1.3.tar.gz", hash = "sha256:36fe30de56f1ece1a9f7a990d47daa781b5af831d2b3f2dcb7dfd01b857cc3d4"},
+    {file = "types_docutils-0.19.1.3-py3-none-any.whl", hash = "sha256:d608e6b91ccf0e8e01c586a0af5b0e0462382d3be65b734af82d40c9d010735d"},
+]
+typing-extensions = [
+    {file = "typing_extensions-3.10.0.2-py2-none-any.whl", hash = "sha256:d8226d10bc02a29bcc81df19a26e56a9647f8b0a6d4a83924139f4a8b01f17b7"},
+    {file = "typing_extensions-3.10.0.2-py3-none-any.whl", hash = "sha256:f1d25edafde516b146ecd0613dabcc61409817af4766fbbcfb8d1ad4ec441a34"},
+    {file = "typing_extensions-3.10.0.2.tar.gz", hash = "sha256:49f75d16ff11f1cd258e1b988ccff82a3ca5570217d7ad8c5f48205dd99a677e"},
+]
+urllib3 = [
+    {file = "urllib3-1.26.6-py2.py3-none-any.whl", hash = "sha256:39fb8672126159acb139a7718dd10806104dec1e2f0f6c88aab05d17df10c8d4"},
+    {file = "urllib3-1.26.6.tar.gz", hash = "sha256:f57b4c16c62fa2760b7e3d97c35b255512fb6b59a259730f36ba32ce9f8e342f"},
+]
+virtualenv = [
+    {file = "virtualenv-20.19.0-py3-none-any.whl", hash = "sha256:54eb59e7352b573aa04d53f80fc9736ed0ad5143af445a1e539aada6eb947dd1"},
+    {file = "virtualenv-20.19.0.tar.gz", hash = "sha256:37a640ba82ed40b226599c522d411e4be5edb339a0c0de030c0dc7b646d61590"},
+]
+zipp = [
+    {file = "zipp-3.5.0-py3-none-any.whl", hash = "sha256:957cfda87797e389580cb8b9e3870841ca991e2125350677b2ca83a0e99390a3"},
+    {file = "zipp-3.5.0.tar.gz", hash = "sha256:f5812b1e007e48cff63449a5e9f4e7ebea716b4111f9c4f9a645f91d579bf0c4"},
+]
diff --git a/pyproject.toml b/pyproject.toml
new file mode 100644
index 0000000..67b34d7
--- /dev/null
+++ b/pyproject.toml
@@ -0,0 +1,56 @@
+[tool.poetry]
+name = "sh"
+version = "2.0.4"
+description = "Python subprocess replacement"
+authors = ["Andrew Moffat <arwmoffat@gmail.com>"]
+readme = "README.rst"
+maintainers = [
+    "Andrew Moffat <arwmoffat@gmail.com>",
+    "Erik Cederstrand <erik@cederstrand.dk>"
+]
+homepage = "https://amoffat.github.io/sh/"
+repository = "https://github.com/amoffat/sh"
+documentation = "https://amoffat.github.io/sh/"
+license = "MIT"
+classifiers = [
+    "Development Status :: 5 - Production/Stable",
+    "Environment :: Console",
+    "Intended Audience :: Developers",
+    "Intended Audience :: System Administrators",
+    "License :: OSI Approved :: MIT License",
+    "Programming Language :: Python",
+    "Programming Language :: Python :: 3.8",
+    "Programming Language :: Python :: 3.9",
+    "Programming Language :: Python :: 3.10",
+    "Programming Language :: Python :: 3.11",
+    "Programming Language :: Python :: Implementation :: CPython",
+    "Programming Language :: Python :: Implementation :: PyPy",
+    "Topic :: Software Development :: Build Tools",
+    "Topic :: Software Development :: Libraries :: Python Modules",
+]
+include = [
+    { path = "CHANGELOG.md", format = "sdist" },
+    { path = "MIGRATION.md", format = "sdist" },
+    { path = "images", format = "sdist" },
+    { path = "Makefile", format = "sdist" },
+    { path = "tests", format = "sdist" },
+    { path = "tox.ini", format = "sdist" },
+]
+
+[tool.poetry.dependencies]
+python = ">=3.8.1,<4.0"
+
+[tool.poetry.group.dev.dependencies]
+tox = "^4.4.5"
+black = "^23.1.0"
+coveralls = "^3.3.1"
+flake8 = "^6.0.0"
+rstcheck = "^6.1.1"
+sphinx = "^6.1.3"
+sphinx-rtd-theme = "^1.2.0"
+pytest = "^7.2.1"
+mypy = "^1.0.0"
+
+[build-system]
+requires = ["poetry-core>=1.0.0a5"]
+build-backend = "poetry.core.masonry.api"
diff --git a/requirements-dev.txt b/requirements-dev.txt
deleted file mode 100644
index f82c409..0000000
--- a/requirements-dev.txt
+++ /dev/null
@@ -1,6 +0,0 @@
-Pygments==2.1.3
-coverage==4.2
-coveralls==1.1
-docopt==0.6.2
-docutils==0.12
-flake8==3.7.9
diff --git a/requirements-docs.txt b/requirements-docs.txt
deleted file mode 100644
index c7496d6..0000000
--- a/requirements-docs.txt
+++ /dev/null
@@ -1,12 +0,0 @@
-alabaster==0.7.9
-Babel==2.3.4
-docutils==0.12
-imagesize==0.7.1
-Jinja2==2.10.3
-MarkupSafe==0.23
-Pygments==2.1.3
-pytz==2016.7
-six==1.10.0
-snowballstemmer==1.2.1
-Sphinx==3.1.1
-sphinx-rtd-theme==0.5.0
diff --git a/setup.cfg b/setup.cfg
deleted file mode 100644
index af455ef..0000000
--- a/setup.cfg
+++ /dev/null
@@ -1,8 +0,0 @@
-[bdist_wheel]
-universal = 1
-
-[metadata]
-license_file = LICENSE.txt
-
-[flake8]
-max-line-length = 120
diff --git a/setup.py b/setup.py
deleted file mode 100644
index 97cba90..0000000
--- a/setup.py
+++ /dev/null
@@ -1,58 +0,0 @@
-from __future__ import print_function
-import os
-from os.path import dirname, abspath, join
-import sys
-import sh
-import codecs
-from setuptools import setup
-
-
-HERE = dirname(abspath(__file__))
-
-author = "Andrew Moffat"
-author_email = "arwmoffat@gmail.com"
-keywords = ["subprocess", "process", "shell", "launch", "program"]
-
-
-def read(*parts):
-    with codecs.open(join(HERE, *parts), "rb", "utf-8") as f:
-        return f.read()
-
-setup(
-    name="sh",
-    version=sh.__version__,
-    description="Python subprocess replacement",
-    long_description=read("README.rst"),
-    author=author,
-    author_email=author_email,
-    maintainer=author,
-    maintainer_email=author_email,
-    keywords=keywords,
-    url="https://github.com/amoffat/sh",
-    license="MIT",
-    py_modules=["sh"],
-    classifiers=[
-        "Development Status :: 5 - Production/Stable",
-        "Environment :: Console",
-        "Intended Audience :: Developers",
-        "Intended Audience :: System Administrators",
-        "License :: OSI Approved :: MIT License",
-        "Programming Language :: Python",
-        "Programming Language :: Python :: 2",
-        "Programming Language :: Python :: 2.6",
-        "Programming Language :: Python :: 2.7",
-        "Programming Language :: Python :: 3",
-        "Programming Language :: Python :: 3.1",
-        "Programming Language :: Python :: 3.2",
-        "Programming Language :: Python :: 3.3",
-        "Programming Language :: Python :: 3.4",
-        "Programming Language :: Python :: 3.5",
-        "Programming Language :: Python :: 3.6",
-        "Programming Language :: Python :: 3.7",
-        "Programming Language :: Python :: 3.8",
-        "Programming Language :: Python :: Implementation :: CPython",
-        "Programming Language :: Python :: Implementation :: PyPy",
-        "Topic :: Software Development :: Build Tools",
-        "Topic :: Software Development :: Libraries :: Python Modules",
-    ],
-)
diff --git a/sh.py b/sh.py
index 2135d87..e12b559 100644
--- a/sh.py
+++ b/sh.py
@@ -2,7 +2,7 @@
 http://amoffat.github.io/sh/
 """
 # ===============================================================================
-# Copyright (C) 2011-2020 by Andrew Moffat
+# Copyright (C) 2011-2023 by Andrew Moffat
 #
 # Permission is hereby granted, free of charge, to any person obtaining a copy
 # of this software and associated documentation files (the "Software"), to deal
@@ -22,20 +22,14 @@ http://amoffat.github.io/sh/
 # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
 # THE SOFTWARE.
 # ===============================================================================
-__version__ = "1.14.2"
-__project_url__ = "https://github.com/amoffat/sh"
-
+import asyncio
 from collections import deque
+
 try:
     from collections.abc import Mapping
 except ImportError:
     from collections import Mapping
-from contextlib import contextmanager
-from functools import partial
-from io import UnsupportedOperation, open as fdopen
-from locale import getpreferredencoding
-from types import ModuleType, GeneratorType
-import ast
+
 import errno
 import fcntl
 import gc
@@ -60,44 +54,35 @@ import traceback
 import tty
 import warnings
 import weakref
+from asyncio import Queue as AQueue
+from contextlib import contextmanager
+from functools import partial
+from importlib import metadata
+from io import BytesIO, StringIO, UnsupportedOperation
+from io import open as fdopen
+from locale import getpreferredencoding
+from queue import Empty, Queue
+from shlex import quote as shlex_quote
+from types import GeneratorType, ModuleType
+from typing import Any, Dict, Type, Union
 
-IS_PY3 = sys.version_info[0] == 3
-MINOR_VER = sys.version_info[1]
-IS_PY26 = sys.version_info[0] == 2 and MINOR_VER == 6
-if IS_PY3:
-    from io import StringIO
-
-    ioStringIO = StringIO
-    from io import BytesIO as cStringIO
-
-    iocStringIO = cStringIO
-    from queue import Queue, Empty
-
-    # for some reason, python 3.1 removed the builtin "callable", wtf
-    if not hasattr(__builtins__, "callable"):
-        def callable(ob):
-            return hasattr(ob, "__call__")
-else:
-    from StringIO import StringIO
-    from cStringIO import OutputType as cStringIO
-    from io import StringIO as ioStringIO
-    from io import BytesIO as iocStringIO
-    from Queue import Queue, Empty
-
-try:
-    from shlex import quote as shlex_quote  # here from 3.3 onward
-except ImportError:
-    from pipes import quote as shlex_quote  # undocumented before 2.7
+__version__ = metadata.version("sh")
+__project_url__ = "https://github.com/amoffat/sh"
 
 if "windows" in platform.system().lower():  # pragma: no cover
-    raise ImportError("sh %s is currently only supported on linux and osx. \
+    raise ImportError(
+        "sh %s is currently only supported on linux and osx. \
 please install pbs 0.110 (http://pypi.python.org/pypi/pbs) for windows \
-support." % __version__)
+support."
+        % __version__
+    )
+
+TEE_STDOUT = {True, "out", 1}
+TEE_STDERR = {"err", 2}
 
 DEFAULT_ENCODING = getpreferredencoding() or "UTF-8"
 
 IS_MACOS = platform.system() in ("AIX", "Darwin")
-THIS_DIR = os.path.dirname(os.path.realpath(__file__))
 SH_LOGGER_NAME = __name__
 
 # normally i would hate this idea of using a global to signify whether we are
@@ -113,20 +98,12 @@ FORCE_USE_SELECT = bool(int(os.environ.get("SH_TESTS_USE_SELECT", "0")))
 # with-context
 PUSHD_LOCK = threading.RLock()
 
-if hasattr(inspect, "getfullargspec"):
-    def get_num_args(fn):
-        return len(inspect.getfullargspec(fn).args)
-else:
-    def get_num_args(fn):
-        return len(inspect.getargspec(fn).args)
 
-if IS_PY3:
-    raw_input = input
-    unicode = str
-    basestring = str
-    long = int
+def get_num_args(fn):
+    return len(inspect.getfullargspec(fn).args)
 
-_unicode_methods = set(dir(unicode()))
+
+_unicode_methods = set(dir(str()))
 
 HAS_POLL = hasattr(select, "poll")
 POLLER_EVENT_READ = 1
@@ -134,169 +111,139 @@ POLLER_EVENT_WRITE = 2
 POLLER_EVENT_HUP = 4
 POLLER_EVENT_ERROR = 8
 
-# here we use an use a poller interface that transparently selects the most
-# capable poller (out of either select.select or select.poll).  this was added
-# by zhangyafeikimi when he discovered that if the fds created internally by sh
-# numbered > 1024, select.select failed (a limitation of select.select).  this
-# can happen if your script opens a lot of files
-if HAS_POLL and not FORCE_USE_SELECT:
-    class Poller(object):
-        def __init__(self):
-            self._poll = select.poll()
-            # file descriptor <-> file object bidirectional maps
-            self.fd_lookup = {}
-            self.fo_lookup = {}
-
-        def __nonzero__(self):
-            return len(self.fd_lookup) != 0
-
-        def __len__(self):
-            return len(self.fd_lookup)
-
-        def _set_fileobject(self, f):
-            if hasattr(f, "fileno"):
-                fd = f.fileno()
-                self.fd_lookup[fd] = f
-                self.fo_lookup[f] = fd
-            else:
-                self.fd_lookup[f] = f
-                self.fo_lookup[f] = f
-
-        def _remove_fileobject(self, f):
-            if hasattr(f, "fileno"):
-                fd = f.fileno()
-                del self.fd_lookup[fd]
-                del self.fo_lookup[f]
-            else:
-                del self.fd_lookup[f]
-                del self.fo_lookup[f]
-
-        def _get_file_descriptor(self, f):
-            return self.fo_lookup.get(f)
-
-        def _get_file_object(self, fd):
-            return self.fd_lookup.get(fd)
-
-        def _register(self, f, events):
-            # f can be a file descriptor or file object
-            self._set_fileobject(f)
-            fd = self._get_file_descriptor(f)
-            self._poll.register(fd, events)
-
-        def register_read(self, f):
-            self._register(f, select.POLLIN | select.POLLPRI)
-
-        def register_write(self, f):
-            self._register(f, select.POLLOUT)
 
-        def register_error(self, f):
-            self._register(f, select.POLLERR | select.POLLHUP | select.POLLNVAL)
+class PollPoller(object):
+    def __init__(self):
+        self._poll = select.poll()
+        # file descriptor <-> file object bidirectional maps
+        self.fd_lookup = {}
+        self.fo_lookup = {}
 
-        def unregister(self, f):
-            fd = self._get_file_descriptor(f)
-            self._poll.unregister(fd)
-            self._remove_fileobject(f)
+    def __nonzero__(self):
+        return len(self.fd_lookup) != 0
 
-        def poll(self, timeout):
-            if timeout is not None:
-                # convert from seconds to milliseconds
-                timeout *= 1000
-            changes = self._poll.poll(timeout)
-            results = []
-            for fd, events in changes:
-                f = self._get_file_object(fd)
-                if events & (select.POLLIN | select.POLLPRI):
-                    results.append((f, POLLER_EVENT_READ))
-                elif events & select.POLLOUT:
-                    results.append((f, POLLER_EVENT_WRITE))
-                elif events & select.POLLHUP:
-                    results.append((f, POLLER_EVENT_HUP))
-                elif events & (select.POLLERR | select.POLLNVAL):
-                    results.append((f, POLLER_EVENT_ERROR))
-            return results
-else:
-    class Poller(object):
-        def __init__(self):
-            self.rlist = []
-            self.wlist = []
-            self.xlist = []
+    def __len__(self):
+        return len(self.fd_lookup)
 
-        def __nonzero__(self):
-            return len(self.rlist) + len(self.wlist) + len(self.xlist) != 0
+    def _set_fileobject(self, f):
+        if hasattr(f, "fileno"):
+            fd = f.fileno()
+            self.fd_lookup[fd] = f
+            self.fo_lookup[f] = fd
+        else:
+            self.fd_lookup[f] = f
+            self.fo_lookup[f] = f
+
+    def _remove_fileobject(self, f):
+        if hasattr(f, "fileno"):
+            fd = f.fileno()
+            del self.fd_lookup[fd]
+            del self.fo_lookup[f]
+        else:
+            del self.fd_lookup[f]
+            del self.fo_lookup[f]
+
+    def _get_file_descriptor(self, f):
+        return self.fo_lookup.get(f)
+
+    def _get_file_object(self, fd):
+        return self.fd_lookup.get(fd)
+
+    def _register(self, f, events):
+        # f can be a file descriptor or file object
+        self._set_fileobject(f)
+        fd = self._get_file_descriptor(f)
+        self._poll.register(fd, events)
+
+    def register_read(self, f):
+        self._register(f, select.POLLIN | select.POLLPRI)
+
+    def register_write(self, f):
+        self._register(f, select.POLLOUT)
+
+    def register_error(self, f):
+        self._register(f, select.POLLERR | select.POLLHUP | select.POLLNVAL)
+
+    def unregister(self, f):
+        fd = self._get_file_descriptor(f)
+        self._poll.unregister(fd)
+        self._remove_fileobject(f)
+
+    def poll(self, timeout):
+        if timeout is not None:
+            # convert from seconds to milliseconds
+            timeout *= 1000
+        changes = self._poll.poll(timeout)
+        results = []
+        for fd, events in changes:
+            f = self._get_file_object(fd)
+            if events & (select.POLLIN | select.POLLPRI):
+                results.append((f, POLLER_EVENT_READ))
+            elif events & select.POLLOUT:
+                results.append((f, POLLER_EVENT_WRITE))
+            elif events & select.POLLHUP:
+                results.append((f, POLLER_EVENT_HUP))
+            elif events & (select.POLLERR | select.POLLNVAL):
+                results.append((f, POLLER_EVENT_ERROR))
+        return results
 
-        def __len__(self):
-            return len(self.rlist) + len(self.wlist) + len(self.xlist)
 
-        @staticmethod
-        def _register(f, events):
-            if f not in events:
-                events.append(f)
+class SelectPoller(object):
+    def __init__(self):
+        self.rlist = []
+        self.wlist = []
+        self.xlist = []
 
-        @staticmethod
-        def _unregister(f, events):
-            if f in events:
-                events.remove(f)
+    def __nonzero__(self):
+        return len(self.rlist) + len(self.wlist) + len(self.xlist) != 0
 
-        def register_read(self, f):
-            self._register(f, self.rlist)
+    def __len__(self):
+        return len(self.rlist) + len(self.wlist) + len(self.xlist)
 
-        def register_write(self, f):
-            self._register(f, self.wlist)
+    @staticmethod
+    def _register(f, events):
+        if f not in events:
+            events.append(f)
 
-        def register_error(self, f):
-            self._register(f, self.xlist)
+    @staticmethod
+    def _unregister(f, events):
+        if f in events:
+            events.remove(f)
 
-        def unregister(self, f):
-            self._unregister(f, self.rlist)
-            self._unregister(f, self.wlist)
-            self._unregister(f, self.xlist)
+    def register_read(self, f):
+        self._register(f, self.rlist)
 
-        def poll(self, timeout):
-            _in, _out, _err = select.select(self.rlist, self.wlist, self.xlist, timeout)
-            results = []
-            for f in _in:
-                results.append((f, POLLER_EVENT_READ))
-            for f in _out:
-                results.append((f, POLLER_EVENT_WRITE))
-            for f in _err:
-                results.append((f, POLLER_EVENT_ERROR))
-            return results
+    def register_write(self, f):
+        self._register(f, self.wlist)
 
+    def register_error(self, f):
+        self._register(f, self.xlist)
 
-def encode_to_py3bytes_or_py2str(s):
-    """ takes anything and attempts to return a py2 string or py3 bytes.  this
-    is typically used when creating command + arguments to be executed via
-    os.exec* """
+    def unregister(self, f):
+        self._unregister(f, self.rlist)
+        self._unregister(f, self.wlist)
+        self._unregister(f, self.xlist)
 
-    fallback_encoding = "utf8"
+    def poll(self, timeout):
+        _in, _out, _err = select.select(self.rlist, self.wlist, self.xlist, timeout)
+        results = []
+        for f in _in:
+            results.append((f, POLLER_EVENT_READ))
+        for f in _out:
+            results.append((f, POLLER_EVENT_WRITE))
+        for f in _err:
+            results.append((f, POLLER_EVENT_ERROR))
+        return results
 
-    if IS_PY3:
-        # if we're already bytes, do nothing
-        if isinstance(s, bytes):
-            pass
-        else:
-            s = str(s)
-            try:
-                s = bytes(s, DEFAULT_ENCODING)
-            except UnicodeEncodeError:
-                s = bytes(s, fallback_encoding)
-    else:
-        # attempt to convert the thing to unicode from the system's encoding
-        try:
-            s = unicode(s, DEFAULT_ENCODING)
-        # if the thing is already unicode, or it's a number, it can't be
-        # coerced to unicode with an encoding argument, but if we leave out
-        # the encoding argument, it will convert it to a string, then to unicode
-        except TypeError:
-            s = unicode(s)
 
-        # now that we have guaranteed unicode, encode to our system encoding,
-        # but attempt to fall back to something
-        try:
-            s = s.encode(DEFAULT_ENCODING)
-        except UnicodeEncodeError:
-            s = s.encode(fallback_encoding, "replace")
-    return s
+# here we use an use a poller interface that transparently selects the most
+# capable poller (out of either select.select or select.poll).  this was added
+# by zhangyafeikimi when he discovered that if the fds created internally by sh
+# numbered > 1024, select.select failed (a limitation of select.select).  this
+# can happen if your script opens a lot of files
+Poller: Union[Type[SelectPoller], Type[PollPoller]] = SelectPoller
+if HAS_POLL and not FORCE_USE_SELECT:
+    Poller = PollPoller
 
 
 def _indent_text(text, num=4):
@@ -321,7 +268,7 @@ Original exception:
 
 
 class ErrorReturnCodeMeta(type):
-    """ a metaclass which provides the ability for an ErrorReturnCode (or
+    """a metaclass which provides the ability for an ErrorReturnCode (or
     derived) instance, imported from one sh module, to be considered the
     subclass of ErrorReturnCode from another module.  this is mostly necessary
     in the tests, where we do assertRaises, but the ErrorReturnCode that the
@@ -363,6 +310,7 @@ class ErrorReturnCode(Exception):
         return self.__class__, (self.full_cmd, self.stdout, self.stderr, self.truncate)
 
     def __init__(self, full_cmd, stdout, stderr, truncate=True):
+        self.exit_code = self.exit_code  # makes pylint happy
         self.full_cmd = full_cmd
         self.stdout = stdout
         self.stderr = stderr
@@ -370,32 +318,30 @@ class ErrorReturnCode(Exception):
 
         exc_stdout = self.stdout
         if truncate:
-            exc_stdout = exc_stdout[:self.truncate_cap]
+            exc_stdout = exc_stdout[: self.truncate_cap]
             out_delta = len(self.stdout) - len(exc_stdout)
             if out_delta:
-                exc_stdout += ("... (%d more, please see e.stdout)" % out_delta).encode()
+                exc_stdout += (
+                    "... (%d more, please see e.stdout)" % out_delta
+                ).encode()
 
         exc_stderr = self.stderr
         if truncate:
-            exc_stderr = exc_stderr[:self.truncate_cap]
+            exc_stderr = exc_stderr[: self.truncate_cap]
             err_delta = len(self.stderr) - len(exc_stderr)
             if err_delta:
-                exc_stderr += ("... (%d more, please see e.stderr)" % err_delta).encode()
+                exc_stderr += (
+                    "... (%d more, please see e.stderr)" % err_delta
+                ).encode()
 
-        msg_tmpl = unicode("\n\n  RAN: {cmd}\n\n  STDOUT:\n{stdout}\n\n  STDERR:\n{stderr}")
+        msg_tmpl = str("\n\n  RAN: {cmd}\n\n  STDOUT:\n{stdout}\n\n  STDERR:\n{stderr}")
 
         msg = msg_tmpl.format(
             cmd=self.full_cmd,
             stdout=exc_stdout.decode(DEFAULT_ENCODING, "replace"),
-            stderr=exc_stderr.decode(DEFAULT_ENCODING, "replace")
+            stderr=exc_stderr.decode(DEFAULT_ENCODING, "replace"),
         )
 
-        if not IS_PY3:
-            # Exception messages should be treated as an API which takes native str type on both
-            # Python2 and Python3.  (Meaning, it's a byte string on Python2 and a text string on
-            # Python3)
-            msg = encode_to_py3bytes_or_py2str(msg)
-
         super(ErrorReturnCode, self).__init__(msg)
 
 
@@ -404,8 +350,8 @@ class SignalException(ErrorReturnCode):
 
 
 class TimeoutException(Exception):
-    """ the exception thrown when a command is killed because a specified
-    timeout (via _timeout or .wait(timeout)) was hit """
+    """the exception thrown when a command is killed because a specified
+    timeout (via _timeout or .wait(timeout)) was hit"""
 
     def __init__(self, exit_code, full_cmd):
         self.exit_code = exit_code
@@ -413,7 +359,7 @@ class TimeoutException(Exception):
         super(Exception, self).__init__()
 
 
-SIGNALS_THAT_SHOULD_THROW_EXCEPTION = set((
+SIGNALS_THAT_SHOULD_THROW_EXCEPTION = {
     signal.SIGABRT,
     signal.SIGBUS,
     signal.SIGFPE,
@@ -425,7 +371,7 @@ SIGNALS_THAT_SHOULD_THROW_EXCEPTION = set((
     signal.SIGSEGV,
     signal.SIGTERM,
     signal.SIGSYS,
-))
+}
 
 
 # we subclass AttributeError because:
@@ -436,13 +382,15 @@ class CommandNotFound(AttributeError):
 
 
 rc_exc_regex = re.compile(r"(ErrorReturnCode|SignalException)_((\d+)|SIG[a-zA-Z]+)")
-rc_exc_cache = {}
+rc_exc_cache: Dict[str, Type[ErrorReturnCode]] = {}
 
-SIGNAL_MAPPING = dict([(v, k) for k, v in signal.__dict__.items() if re.match(r"SIG[a-zA-Z]+", k)])
+SIGNAL_MAPPING = dict(
+    [(v, k) for k, v in signal.__dict__.items() if re.match(r"SIG[a-zA-Z]+", k)]
+)
 
 
 def get_exc_from_name(name):
-    """ takes an exception name, like:
+    """takes an exception name, like:
 
         ErrorReturnCode_1
         SignalException_9
@@ -450,7 +398,7 @@ def get_exc_from_name(name):
 
     and returns the corresponding exception.  this is primarily used for
     importing exceptions from sh into user code, for instance, to capture those
-    exceptions """
+    exceptions"""
 
     exc = None
     try:
@@ -474,12 +422,12 @@ def get_exc_from_name(name):
 
 
 def get_rc_exc(rc):
-    """ takes a exit code or negative signal number and produces an exception
+    """takes a exit code or negative signal number and produces an exception
     that corresponds to that return code.  positive return codes yield
     ErrorReturnCode exception, negative return codes yield SignalException
 
     we also cache the generated exception so that only one signal of that type
-    exists, preserving identity """
+    exists, preserving identity"""
 
     try:
         return rc_exc_cache[rc]
@@ -522,8 +470,8 @@ class GlobResults(list):
         list.__init__(self, results)
 
 
-def glob(path, *args, **kwargs):
-    expanded = GlobResults(path, _old_glob(path, *args, **kwargs))
+def glob(path, recursive=False):
+    expanded = GlobResults(path, _old_glob(path, recursive=recursive))
     return expanded
 
 
@@ -534,16 +482,18 @@ def canonicalize(path):
     return os.path.abspath(os.path.expanduser(path))
 
 
-def which(program, paths=None):
-    """ takes a program name or full path, plus an optional collection of search
+def _which(program, paths=None):
+    """takes a program name or full path, plus an optional collection of search
     paths, and returns the full path of the requested executable.  if paths is
     specified, it is the entire list of search paths, and the PATH env is not
-    used at all.  otherwise, PATH env is used to look for the program """
+    used at all.  otherwise, PATH env is used to look for the program"""
 
     def is_exe(file_path):
-        return (os.path.exists(file_path) and
-                os.access(file_path, os.X_OK) and
-                os.path.isfile(os.path.realpath(file_path)))
+        return (
+            os.path.exists(file_path)
+            and os.access(file_path, os.X_OK)
+            and os.path.isfile(os.path.realpath(file_path))
+        )
 
     found_path = None
     fpath, fname = os.path.split(program)
@@ -576,31 +526,31 @@ def which(program, paths=None):
 
 
 def resolve_command_path(program):
-    path = which(program)
+    path = _which(program)
     if not path:
         # our actual command might have a dash in it, but we can't call
         # that from python (we have to use underscores), so we'll check
         # if a dash version of our underscore command exists and use that
         # if it does
         if "_" in program:
-            path = which(program.replace("_", "-"))
+            path = _which(program.replace("_", "-"))
         if not path:
             return None
     return path
 
 
-def resolve_command(name, baked_args=None):
+def resolve_command(name, command_cls, baked_args=None):
     path = resolve_command_path(name)
     cmd = None
     if path:
-        cmd = Command(path)
+        cmd = command_cls(path)
         if baked_args:
             cmd = cmd.bake(**baked_args)
     return cmd
 
 
 class Logger(object):
-    """ provides a memory-inexpensive logger.  a gotcha about python's builtin
+    """provides a memory-inexpensive logger.  a gotcha about python's builtin
     logger is that logger objects are never garbage collected.  if you create a
     thousand loggers with unique names, they'll sit there in memory until your
     script is done.  with sh, it's easy to create loggers with unique names if
@@ -660,7 +610,7 @@ def default_logger_str(cmd, call_args, pid=None):
 
 
 class RunningCommand(object):
-    """ this represents an executing Command object.  it is returned as the
+    """this represents an executing Command object.  it is returned as the
     result of __call__() being executed on a Command instance.  this creates a
     reference to a OProc instance, which is a low-level wrapper around the
     process that was exec'd
@@ -671,10 +621,10 @@ class RunningCommand(object):
     backgrounded-process's stdout/err, the RunningCommand object is smart enough
     to know to wait() on the process to finish first.  and when the process
     finishes, RunningCommand is smart enough to translate exit codes to
-    exceptions. """
+    exceptions."""
 
     # these are attributes that we allow to pass through to OProc
-    _OProc_attr_whitelist = set((
+    _OProc_attr_allowlist = {
         "signal",
         "terminate",
         "kill",
@@ -684,27 +634,16 @@ class RunningCommand(object):
         "sid",
         "pgid",
         "ctty",
-
         "input_thread_exc",
         "output_thread_exc",
         "bg_thread_exc",
-    ))
+    }
 
     def __init__(self, cmd, call_args, stdin, stdout, stderr):
-        """
-            cmd is a list, where each element is encoded as bytes (PY3) or str (PY2)
-        """
-
         # self.ran is used for auditing what actually ran.  for example, in
         # exceptions, or if you just want to know what was ran after the
         # command ran
-        #
-        # here we're making a consistent unicode string out if our cmd.
-        # we're also assuming (correctly, i think) that the command and its
-        # arguments are the encoding we pass into _encoding, which falls back to
-        # the system's encoding
-        enc = call_args["encoding"]
-        self.ran = " ".join([shlex_quote(arg.decode(enc, "ignore")) for arg in cmd])
+        self.ran = " ".join([shlex_quote(str(arg)) for arg in cmd])
 
         self.call_args = call_args
         self.cmd = cmd
@@ -714,6 +653,21 @@ class RunningCommand(object):
         should_wait = True
         spawn_process = True
 
+        # if we're using an async for loop on this object, we need to put the underlying
+        # iterable in no-block mode. however, we will only know if we're using an async
+        # for loop after this object is constructed. so we'll set it to False now, but
+        # then later set it to True if we need it
+        self._force_noblock_iter = False
+
+        # this event is used when we want to `await` a RunningCommand. see how it gets
+        # used in self.__await__
+        try:
+            asyncio.get_event_loop()
+        except RuntimeError:
+            self.aio_output_complete = None
+        else:
+            self.aio_output_complete = asyncio.Event()
+
         # this is used to track if we've already raised StopIteration, and if we
         # have, raise it immediately again if the user tries to call next() on
         # us.  https://github.com/amoffat/sh/issues/273
@@ -728,6 +682,9 @@ class RunningCommand(object):
         if call_args["piped"] or call_args["iter"] or call_args["iter_noblock"]:
             should_wait = False
 
+        if call_args["async"]:
+            should_wait = False
+
         # we're running in the background, return self and let us lazily
         # evaluate
         if call_args["bg"]:
@@ -773,8 +730,17 @@ class RunningCommand(object):
             # self.process, but it has not been assigned yet
             process_assign_lock = threading.Lock()
             with process_assign_lock:
-                self.process = OProc(self, self.log, cmd, stdin, stdout, stderr,
-                                     self.call_args, pipe, process_assign_lock)
+                self.process = OProc(
+                    self,
+                    self.log,
+                    cmd,
+                    stdin,
+                    stdout,
+                    stderr,
+                    self.call_args,
+                    pipe,
+                    process_assign_lock,
+                )
 
             logger_str = log_str_factory(self.ran, call_args, self.process.pid)
             self.log.context = self.log.sanitize_context(logger_str)
@@ -784,16 +750,17 @@ class RunningCommand(object):
                 self.wait()
 
     def wait(self, timeout=None):
-        """ waits for the running command to finish.  this is called on all
+        """waits for the running command to finish.  this is called on all
         running commands, eventually, except for ones that run in the background
 
-        if timeout is a number, it is the number of seconds to wait for the process to resolve. otherwise block on wait.
+        if timeout is a number, it is the number of seconds to wait for the process to
+        resolve. otherwise block on wait.
 
-        this function can raise a TimeoutException, either because of a `_timeout` on the command itself as it was
+        this function can raise a TimeoutException, either because of a `_timeout` on
+        the command itself as it was
         launched, or because of a timeout passed into this method.
         """
         if not self._waited_until_completion:
-
             # if we've been given a timeout, we need to poll is_alive()
             if timeout is not None:
                 waited_for = 0
@@ -804,12 +771,14 @@ class RunningCommand(object):
                     raise RuntimeError("timeout cannot be negative")
 
                 # while we still have time to wait, run this loop
-                # notice that alive and exit_code are only defined in this loop, but the loop is also guaranteed to run,
-                # defining them, given the constraints that timeout is non-negative
+                # notice that alive and exit_code are only defined in this loop, but
+                # the loop is also guaranteed to run, defining them, given the
+                # constraints that timeout is non-negative
                 while waited_for <= timeout:
                     alive, exit_code = self.process.is_alive()
 
-                    # if we're alive, we need to wait some more, but let's sleep before we poll again
+                    # if we're alive, we need to wait some more, but let's sleep
+                    # before we poll again
                     if alive:
                         time.sleep(sleep_amt)
                         waited_for += sleep_amt
@@ -818,13 +787,15 @@ class RunningCommand(object):
                     else:
                         break
 
-                # if we've made it this far, and we're still alive, then it means we timed out waiting
+                # if we've made it this far, and we're still alive, then it means we
+                # timed out waiting
                 if alive:
                     raise TimeoutException(None, self.ran)
 
-                # if we didn't time out, we fall through and let the rest of the code handle exit_code.
-                # notice that we set _waited_until_completion here, only if we didn't time out. this allows us to
-                # re-wait again on timeout, if we catch the TimeoutException in the parent frame
+                # if we didn't time out, we fall through and let the rest of the code
+                # handle exit_code. notice that we set _waited_until_completion here,
+                # only if we didn't time out. this allows us to re-wait again on
+                # timeout, if we catch the TimeoutException in the parent frame
                 self._waited_until_completion = True
 
             else:
@@ -851,17 +822,20 @@ class RunningCommand(object):
         return self
 
     def is_alive(self):
-        """ returns whether or not we're still alive. this call has side-effects on OProc """
+        """returns whether or not we're still alive. this call has side-effects on
+        OProc"""
         return self.process.is_alive()[0]
 
     def handle_command_exit_code(self, code):
-        """ here we determine if we had an exception, or an error code that we
+        """here we determine if we had an exception, or an error code that we
         weren't expecting to see.  if we did, we create and raise an exception
         """
         ca = self.call_args
         exc_class = get_exc_exit_code_would_raise(code, ca["ok_code"], ca["piped"])
         if exc_class:
-            exc = exc_class(self.ran, self.process.stdout, self.process.stderr, ca["truncate_exc"])
+            exc = exc_class(
+                self.ran, self.process.stdout, self.process.stderr, ca["truncate_exc"]
+            )
             raise exc
 
     @property
@@ -883,28 +857,38 @@ class RunningCommand(object):
         return len(str(self))
 
     def __enter__(self):
-        """ we don't actually do anything here because anything that should have
+        """we don't actually do anything here because anything that should have
         been done would have been done in the Command.__call__ call.
         essentially all that has to happen is the command be pushed on the
-        prepend stack. """
+        prepend stack."""
         pass
 
     def __iter__(self):
         return self
 
-    def next(self):
-        """ allow us to iterate over the output of our command """
+    def __next__(self):
+        """allow us to iterate over the output of our command"""
 
         if self._stopped_iteration:
             raise StopIteration()
 
+        pq = self.process._pipe_queue
+
+        # the idea with this is, if we're using regular `_iter` (non-asyncio), then we
+        # want to have blocking be True when we read from the pipe queue, so our cpu
+        # doesn't spin too fast. however, if we *are* using asyncio (an async for loop),
+        # then we want non-blocking pipe queue reads, because we'll do an asyncio.sleep,
+        # in the coroutine that is doing the iteration, this way coroutines have better
+        # yielding (see queue_connector in __aiter__).
+        block_pq_read = not self._force_noblock_iter
+
         # we do this because if get blocks, we can't catch a KeyboardInterrupt
         # so the slight timeout allows for that.
         while True:
             try:
-                chunk = self.process._pipe_queue.get(True, self.call_args["iter_poll_time"])
+                chunk = pq.get(block_pq_read, self.call_args["iter_poll_time"])
             except Empty:
-                if self.call_args["iter_noblock"]:
+                if self.call_args["iter_noblock"] or self._force_noblock_iter:
                     return errno.EWOULDBLOCK
             else:
                 if chunk is None:
@@ -912,65 +896,94 @@ class RunningCommand(object):
                     self._stopped_iteration = True
                     raise StopIteration()
                 try:
-                    return chunk.decode(self.call_args["encoding"], self.call_args["decode_errors"])
+                    return chunk.decode(
+                        self.call_args["encoding"], self.call_args["decode_errors"]
+                    )
                 except UnicodeDecodeError:
                     return chunk
 
-    # python 3
-    __next__ = next
+    def __await__(self):
+        async def wait_for_completion():
+            await self.aio_output_complete.wait()
+            return str(self)
+
+        return wait_for_completion().__await__()
+
+    def __aiter__(self):
+        # maxsize is critical to making sure our queue_connector function below yields
+        # when it awaits _aio_queue.put(chunk). if we didn't have a maxsize, our loop
+        # would happily iterate through `chunk in self` and put onto the queue without
+        # any blocking, and therefore no yielding, which would prevent other coroutines
+        # from running.
+        self._aio_queue = AQueue(maxsize=1)
+        self._force_noblock_iter = True
+
+        # the sole purpose of this coroutine is to connect our pipe_queue (which is
+        # being populated by a thread) to an asyncio-friendly queue. then, in __anext__,
+        # we can iterate over that asyncio queue.
+        async def queue_connector():
+            try:
+                # this will spin as fast as possible if there's no data to read,
+                # thanks to self._force_noblock_iter. so we sleep below.
+                for chunk in self:
+                    if chunk == errno.EWOULDBLOCK:
+                        # let us have better coroutine yielding.
+                        await asyncio.sleep(0.01)
+                    else:
+                        await self._aio_queue.put(chunk)
+            finally:
+                await self._aio_queue.put(None)
+
+        if sys.version_info < (3, 7, 0):
+            task = asyncio.ensure_future(queue_connector())
+        else:
+            task = asyncio.create_task(queue_connector())
+
+        self._aio_task = task
+        return self
+
+    async def __anext__(self):
+        chunk = await self._aio_queue.get()
+        if chunk is not None:
+            return chunk
+        else:
+            exc = self._aio_task.exception()
+            if exc is not None:
+                raise exc
+            raise StopAsyncIteration
 
     def __exit__(self, exc_type, exc_val, exc_tb):
         if self.call_args["with"] and get_prepend_stack():
             get_prepend_stack().pop()
 
     def __str__(self):
-        """ in python3, should return unicode.  in python2, should return a
-        string of bytes """
-        if IS_PY3:
-            return self.__unicode__()
-        else:
-            return unicode(self).encode(self.call_args["encoding"])
-
-    def __unicode__(self):
-        """ a magic method defined for python2.  calling unicode() on a
-        RunningCommand object will call this """
         if self.process and self.stdout:
-            return self.stdout.decode(self.call_args["encoding"], self.call_args["decode_errors"])
-        elif IS_PY3:
-            return ""
-        else:
-            return unicode("")
+            return self.stdout.decode(
+                self.call_args["encoding"], self.call_args["decode_errors"]
+            )
+        return ""
 
     def __eq__(self, other):
-        return unicode(self) == unicode(other)
-
-    __hash__ = None  # Avoid DeprecationWarning in Python < 3
+        return id(self) == id(other)
 
     def __contains__(self, item):
         return item in str(self)
 
     def __getattr__(self, p):
         # let these three attributes pass through to the OProc object
-        if p in self._OProc_attr_whitelist:
+        if p in self._OProc_attr_allowlist:
             if self.process:
                 return getattr(self.process, p)
             else:
                 raise AttributeError
 
-        # see if strings have what we're looking for.  we're looking at the
-        # method names explicitly because we don't want to evaluate self unless
-        # we absolutely have to, the reason being, in python2, hasattr swallows
-        # exceptions, and if we try to run hasattr on a command that failed and
-        # is being run with _iter=True, the command will be evaluated, throw an
-        # exception, but hasattr will discard it
+        # see if strings have what we're looking for
         if p in _unicode_methods:
-            return getattr(unicode(self), p)
+            return getattr(str(self), p)
 
         raise AttributeError
 
     def __repr__(self):
-        """ in python3, should return unicode.  in python2, should return a
-        string of bytes """
         try:
             return str(self)
         except UnicodeDecodeError:
@@ -980,7 +993,7 @@ class RunningCommand(object):
             return repr("")
 
     def __long__(self):
-        return long(str(self).strip())
+        return int(str(self).strip())
 
     def __float__(self):
         return float(str(self).strip())
@@ -990,7 +1003,7 @@ class RunningCommand(object):
 
 
 def output_redirect_is_filename(out):
-    return isinstance(out, basestring)
+    return isinstance(out, str) or hasattr(out, "__fspath__")
 
 
 def get_prepend_stack():
@@ -1005,7 +1018,6 @@ def special_kwarg_validator(passed_kwargs, merged_kwargs, invalid_list):
     invalid_args = []
 
     for elem in invalid_list:
-
         if callable(elem):
             fn = elem
             ret = fn(passed_kwargs, merged_kwargs)
@@ -1033,7 +1045,7 @@ def get_fileno(ob):
             fileno = fileno_meth()
         except UnsupportedOperation:
             pass
-    elif isinstance(ob, (int, long)) and ob >= 0:
+    elif isinstance(ob, (int,)) and ob >= 0:
         fileno = ob
 
     return fileno
@@ -1044,7 +1056,7 @@ def ob_is_fd_based(ob):
 
 
 def ob_is_tty(ob):
-    """ checks if an object (like a file-like object) is a tty.  """
+    """checks if an object (like a file-like object) is a tty."""
     fileno = get_fileno(ob)
     is_tty = False
     if fileno is not None:
@@ -1061,36 +1073,51 @@ def ob_is_pipe(ob):
     return is_pipe
 
 
+def output_iterator_validator(passed_kwargs, merged_kwargs):
+    invalid = []
+    if passed_kwargs.get("no_out") and passed_kwargs.get("iter") in (True, "out"):
+        error = "You cannot iterate over output if there is no output"
+        invalid.append((("no_out", "iter"), error))
+    return invalid
+
+
 def tty_in_validator(passed_kwargs, merged_kwargs):
-    # here we'll validate that people aren't randomly shotgun-debugging different tty options and hoping that they'll
-    # work, without understanding what they do
+    # here we'll validate that people aren't randomly shotgun-debugging different tty
+    # options and hoping that they'll work, without understanding what they do
     pairs = (("tty_in", "in"), ("tty_out", "out"))
     invalid = []
     for tty_type, std in pairs:
         if tty_type in passed_kwargs and ob_is_tty(passed_kwargs.get(std, None)):
-            error = "`_%s` is a TTY already, so so it doesn't make sense to set up a TTY with `_%s`" % (std, tty_type)
+            error = (
+                "`_%s` is a TTY already, so so it doesn't make sense to set up a"
+                " TTY with `_%s`" % (std, tty_type)
+            )
             invalid.append(((tty_type, std), error))
 
     # if unify_ttys is set, then both tty_in and tty_out must both be True
-    if merged_kwargs["unify_ttys"] and not (merged_kwargs["tty_in"] and merged_kwargs["tty_out"]):
-        invalid.append((
-            ("unify_ttys", "tty_in", "tty_out"),
-            "`_tty_in` and `_tty_out` must both be True if `_unify_ttys` is True"
-        ))
+    if merged_kwargs["unify_ttys"] and not (
+        merged_kwargs["tty_in"] and merged_kwargs["tty_out"]
+    ):
+        invalid.append(
+            (
+                ("unify_ttys", "tty_in", "tty_out"),
+                "`_tty_in` and `_tty_out` must both be True if `_unify_ttys` is True",
+            )
+        )
 
     return invalid
 
 
 def fg_validator(passed_kwargs, merged_kwargs):
-    """ fg is not valid with basically every other option """
+    """fg is not valid with basically every other option"""
 
     invalid = []
     msg = """\
 _fg is invalid with nearly every other option, see warning and workaround here:
 
     https://amoffat.github.io/sh/sections/special_arguments.html#fg"""
-    whitelist = set(("env", "fg", "cwd"))
-    offending = set(passed_kwargs.keys()) - whitelist
+    allowlist = {"env", "fg", "cwd", "ok_code"}
+    offending = set(passed_kwargs.keys()) - allowlist
 
     if "fg" in passed_kwargs and passed_kwargs["fg"] and offending:
         invalid.append(("fg", msg))
@@ -1098,11 +1125,11 @@ _fg is invalid with nearly every other option, see warning and workaround here:
 
 
 def bufsize_validator(passed_kwargs, merged_kwargs):
-    """ a validator to prevent a user from saying that they want custom
+    """a validator to prevent a user from saying that they want custom
     buffering when they're using an in/out object that will be os.dup'ed to the
     process, and has its own buffering.  an example is a pipe or a tty.  it
     doesn't make sense to tell them to have a custom buffering, since the os
-    controls this. """
+    controls this."""
     invalid = []
 
     in_ob = passed_kwargs.get("in", None)
@@ -1126,8 +1153,9 @@ def bufsize_validator(passed_kwargs, merged_kwargs):
 
 
 def env_validator(passed_kwargs, merged_kwargs):
-    """ a validator to check that env is a dictionary and that all environment variable
-    keys and values are strings. Otherwise, we would exit with a confusing exit code 255. """
+    """a validator to check that env is a dictionary and that all environment variable
+    keys and values are strings. Otherwise, we would exit with a confusing exit code
+    255."""
     invalid = []
 
     env = passed_kwargs.get("env", None)
@@ -1142,46 +1170,45 @@ def env_validator(passed_kwargs, merged_kwargs):
         if not isinstance(k, str):
             invalid.append(("env", "env key {!r} must be a str".format(k)))
         if not isinstance(v, str):
-            invalid.append(("env", "value {!r} of env key {!r} must be a str".format(v, k)))
+            invalid.append(
+                ("env", "value {!r} of env key {!r} must be a str".format(v, k))
+            )
 
     return invalid
 
 
 class Command(object):
-    """ represents an un-run system program, like "ls" or "cd".  because it
+    """represents an un-run system program, like "ls" or "cd".  because it
     represents the program itself (and not a running instance of it), it should
     hold very little state.  in fact, the only state it does hold is baked
     arguments.
 
     when a Command object is called, the result that is returned is a
     RunningCommand object, which represents the Command put into an execution
-    state. """
+    state."""
+
     thread_local = threading.local()
+    RunningCommandCls = RunningCommand
 
-    _call_args = {
+    _call_args: Dict[str, Any] = {
         "fg": False,  # run command in foreground
-
         # run a command in the background.  commands run in the background
         # ignore SIGHUP and do not automatically exit when the parent process
         # ends
         "bg": False,
-
         # automatically report exceptions for background commands
         "bg_exc": True,
-
         "with": False,  # prepend the command to every command after it
         "in": None,
         "out": None,  # redirect STDOUT
         "err": None,  # redirect STDERR
         "err_to_out": None,  # redirect STDERR to STDOUT
-
         # stdin buffer size
         # 1 for line, 0 for unbuffered, any other number for that amount
         "in_bufsize": 0,
         # stdout buffer size, same values as above
         "out_bufsize": 1,
         "err_bufsize": 1,
-
         # this is how big the output buffers will be for stdout and stderr.
         # this is essentially how much output they will store from the process.
         # we use a deque, so if it overflows past this amount, the first items
@@ -1191,8 +1218,7 @@ class Command(object):
         # this is not a *BYTE* size, this is a *CHUNK* size...meaning, that if
         # you're buffering out/err at 1024 bytes, the internal buffer size will
         # be "internal_bufsize" CHUNKS of 1024 bytes
-        "internal_bufsize": 3 * 1024 ** 2,
-
+        "internal_bufsize": 3 * 1024**2,
         "env": None,
         "piped": None,
         "iter": None,
@@ -1201,29 +1227,23 @@ class Command(object):
         "iter_poll_time": 0.1,
         "ok_code": 0,
         "cwd": None,
-
         # the separator delimiting between a long-argument's name and its value
         # setting this to None will cause name and value to be two separate
         # arguments, like for short options
         # for example, --arg=derp, '=' is the long_sep
         "long_sep": "=",
-
         # the prefix used for long arguments
         "long_prefix": "--",
-
         # this is for programs that expect their input to be from a terminal.
         # ssh is one of those programs
         "tty_in": False,
         "tty_out": True,
         "unify_ttys": False,
-
         "encoding": DEFAULT_ENCODING,
         "decode_errors": "strict",
-
         # how long the process should run before it is auto-killed
         "timeout": None,
         "timeout_signal": signal.SIGKILL,
-
         # TODO write some docs on "long-running processes"
         # these control whether or not stdout/err will get aggregated together
         # as the process runs.  this has memory usage implications, so sometimes
@@ -1232,47 +1252,43 @@ class Command(object):
         "no_out": False,
         "no_err": False,
         "no_pipe": False,
-
         # if any redirection is used for stdout or stderr, internal buffering
         # of that data is not stored.  this forces it to be stored, as if
         # the output is being T'd to both the redirected destination and our
         # internal buffers
         "tee": None,
-
         # will be called when a process terminates regardless of exception
         "done": None,
-
         # a tuple (rows, columns) of the desired size of both the stdout and
         # stdin ttys, if ttys are being used
-        "tty_size": (20, 80),
-
+        "tty_size": (24, 80),
         # whether or not our exceptions should be truncated
         "truncate_exc": True,
-
         # a function to call after the child forks but before the process execs
         "preexec_fn": None,
-
         # UID to set after forking. Requires root privileges. Not supported on
         # Windows.
         "uid": None,
-
         # put the forked process in its own process session?
-        "new_session": True,
-
+        "new_session": False,
+        # put the forked process in its own process group?
+        "new_group": False,
         # pre-process args passed into __call__.  only really useful when used
         # in .bake()
         "arg_preprocess": None,
-
         # a callable that produces a log message from an argument tuple of the
         # command and the args
         "log_msg": None,
-
-        # whether or not to close all inherited fds. typically, this should be True, as inheriting fds can be a security
-        # vulnerability
+        # whether or not to close all inherited fds. typically, this should be True,
+        # as inheriting fds can be a security vulnerability
         "close_fds": True,
-
-        # a whitelist of the integer fds to pass through to the child process. setting this forces close_fds to be True
+        # a allowlist of the integer fds to pass through to the child process. setting
+        # this forces close_fds to be True
         "pass_fds": set(),
+        # return an instance of RunningCommand always. if this isn't True, then
+        # sometimes we may return just a plain unicode string
+        "return_cmd": False,
+        "async": False,
     }
 
     # this is a collection of validators to make sure the special kwargs make
@@ -1280,8 +1296,11 @@ class Command(object):
     _kwarg_validators = (
         (("err", "err_to_out"), "Stderr is already being redirected"),
         (("piped", "iter"), "You cannot iterate when this command is being piped"),
-        (("piped", "no_pipe"), "Using a pipe doesn't make sense if you've disabled the pipe"),
-        (("no_out", "iter"), "You cannot iterate over output if there is no output"),
+        (
+            ("piped", "no_pipe"),
+            "Using a pipe doesn't make sense if you've disabled the pipe",
+        ),
+        output_iterator_validator,
         (("close_fds", "pass_fds"), "Passing `pass_fds` forces `close_fds` to be True"),
         tty_in_validator,
         bufsize_validator,
@@ -1290,9 +1309,9 @@ class Command(object):
     )
 
     def __init__(self, path, search_paths=None):
-        found = which(path, search_paths)
+        found = _which(path, search_paths)
 
-        self._path = encode_to_py3bytes_or_py2str("")
+        self._path = ""
 
         # is the command baked (aka, partially applied)?
         self._partial = False
@@ -1310,7 +1329,7 @@ class Command(object):
         # exception.  if CommandNotFound is raised, we need self._path and the
         # other attributes to be set correctly, so repr() works when they're
         # inspecting the stack.  issue #304
-        self._path = encode_to_py3bytes_or_py2str(found)
+        self._path = found
         self.__name__ = str(self)
 
     def __getattribute__(self, name):
@@ -1335,24 +1354,26 @@ class Command(object):
 
         return val
 
-    @staticmethod
-    def _extract_call_args(kwargs):
-        """ takes kwargs that were passed to a command's __call__ and extracts
+    @classmethod
+    def _extract_call_args(cls, kwargs):
+        """takes kwargs that were passed to a command's __call__ and extracts
         out the special keyword arguments, we return a tuple of special keyword
-        args, and kwargs that will go to the exec'ed command """
+        args, and kwargs that will go to the exec'ed command"""
 
         kwargs = kwargs.copy()
         call_args = {}
-        for parg, default in Command._call_args.items():
+        for parg, default in cls._call_args.items():
             key = "_" + parg
 
             if key in kwargs:
                 call_args[parg] = kwargs[key]
                 del kwargs[key]
 
-        merged_args = Command._call_args.copy()
+        merged_args = cls._call_args.copy()
         merged_args.update(call_args)
-        invalid_kwargs = special_kwarg_validator(call_args, merged_args, Command._kwarg_validators)
+        invalid_kwargs = special_kwarg_validator(
+            call_args, merged_args, cls._kwarg_validators
+        )
 
         if invalid_kwargs:
             exc_msg = []
@@ -1363,55 +1384,40 @@ class Command(object):
 
         return call_args, kwargs
 
-    # TODO needs documentation
     def bake(self, *args, **kwargs):
+        """returns a new Command object after baking(freezing) the given
+        command arguments which are used automatically when its exec'ed
+
+        special keyword arguments can be temporary baked and additionally be
+        overridden in __call__ or in subsequent bakes (basically setting
+        defaults)"""
+
+        # construct the base Command
         fn = type(self)(self._path)
         fn._partial = True
 
         call_args, kwargs = self._extract_call_args(kwargs)
 
-        pruned_call_args = call_args
-        for k, v in Command._call_args.items():
-            try:
-                if pruned_call_args[k] == v:
-                    del pruned_call_args[k]
-            except KeyError:
-                continue
-
         fn._partial_call_args.update(self._partial_call_args)
-        fn._partial_call_args.update(pruned_call_args)
+        fn._partial_call_args.update(call_args)
         fn._partial_baked_args.extend(self._partial_baked_args)
-        sep = pruned_call_args.get("long_sep", self._call_args["long_sep"])
-        prefix = pruned_call_args.get("long_prefix", self._call_args["long_prefix"])
+        sep = call_args.get("long_sep", self._call_args["long_sep"])
+        prefix = call_args.get("long_prefix", self._call_args["long_prefix"])
         fn._partial_baked_args.extend(compile_args(args, kwargs, sep, prefix))
         return fn
 
     def __str__(self):
-        """ in python3, should return unicode.  in python2, should return a
-        string of bytes """
-        if IS_PY3:
-            return self.__unicode__()
-        else:
-            return self.__unicode__().encode(DEFAULT_ENCODING)
+        baked_args = " ".join(self._partial_baked_args)
+        if baked_args:
+            baked_args = " " + baked_args
+        return self._path + baked_args
 
     def __eq__(self, other):
         return str(self) == str(other)
 
-    __hash__ = None  # Avoid DeprecationWarning in Python < 3
-
     def __repr__(self):
-        """ in python3, should return unicode.  in python2, should return a
-        string of bytes """
         return "<Command %r>" % str(self)
 
-    def __unicode__(self):
-        """ a magic method defined for python2.  calling unicode() on a
-        self will call this """
-        baked_args = " ".join(item.decode(DEFAULT_ENCODING) for item in self._partial_baked_args)
-        if baked_args:
-            baked_args = " " + baked_args
-        return self._path.decode(DEFAULT_ENCODING) + baked_args
-
     def __enter__(self):
         self(_with=True)
 
@@ -1428,7 +1434,7 @@ class Command(object):
 
         # this will hold a complete mapping of all our special keyword arguments
         # and their values
-        call_args = Command._call_args.copy()
+        call_args = self.__class__._call_args.copy()
 
         # aggregate any 'with' contexts
         for prepend in get_prepend_stack():
@@ -1464,20 +1470,20 @@ class Command(object):
         if not getattr(call_args["ok_code"], "__iter__", None):
             call_args["ok_code"] = [call_args["ok_code"]]
 
-        # check if we're piping via composition
+        # determine what our real STDIN is. is it something explicitly passed into
+        # _in?
         stdin = call_args["in"]
-        if args:
-            first_arg = args.pop(0)
-            if isinstance(first_arg, RunningCommand):
-                if first_arg.call_args["piped"]:
-                    stdin = first_arg.process
-                else:
-                    stdin = first_arg.process._pipe_queue
 
+        # now that we have our stdin, let's figure out how we should handle it
+        if isinstance(stdin, RunningCommand):
+            if stdin.call_args["piped"]:
+                stdin = stdin.process
             else:
-                args.insert(0, first_arg)
+                stdin = stdin.process._pipe_queue
 
-        processed_args = compile_args(args, kwargs, call_args["long_sep"], call_args["long_prefix"])
+        processed_args = compile_args(
+            args, kwargs, call_args["long_sep"], call_args["long_prefix"]
+        )
 
         # makes sure our arguments are broken up correctly
         split_args = self._partial_baked_args + processed_args
@@ -1489,7 +1495,6 @@ class Command(object):
         # if we're running in foreground mode, we need to completely bypass
         # launching a RunningCommand and OProc and just do a spawn
         if call_args["fg"]:
-
             cwd = call_args["cwd"] or os.getcwd()
             with pushd(cwd):
                 if call_args["env"] is None:
@@ -1497,12 +1502,11 @@ class Command(object):
                 else:
                     exit_code = os.spawnve(os.P_WAIT, cmd[0], cmd, call_args["env"])
 
-            exc_class = get_exc_exit_code_would_raise(exit_code, call_args["ok_code"], call_args["piped"])
+            exc_class = get_exc_exit_code_would_raise(
+                exit_code, call_args["ok_code"], call_args["piped"]
+            )
             if exc_class:
-                if IS_PY3:
-                    ran = " ".join([arg.decode(DEFAULT_ENCODING, "ignore") for arg in cmd])
-                else:
-                    ran = " ".join(cmd)
+                ran = " ".join(cmd)
                 exc = exc_class(ran, b"", b"", call_args["truncate_exc"])
                 raise exc
             return None
@@ -1517,11 +1521,15 @@ class Command(object):
         if output_redirect_is_filename(stderr):
             stderr = open(str(stderr), "wb")
 
-        return RunningCommand(cmd, call_args, stdin, stdout, stderr)
+        rc = self.__class__.RunningCommandCls(cmd, call_args, stdin, stdout, stderr)
+        if rc._spawned_and_waited and not call_args["return_cmd"]:
+            return str(rc)
+        else:
+            return rc
 
 
 def compile_args(a, kwargs, sep, prefix):
-    """ takes args and kwargs, as they were passed into the command instance
+    """takes args and kwargs, as they were passed into the command instance
     being executed with __call__, and compose them into a flat list that
     will eventually be fed into exec.  example:
 
@@ -1536,11 +1544,10 @@ def compile_args(a, kwargs, sep, prefix):
 
     and produces
 
-        ['-l', '/tmp', '--color=never']
+        ['-l', '/tmp', '--color=geneticnever']
 
     """
     processed_args = []
-    encode = encode_to_py3bytes_or_py2str
 
     # aggregate positional args
     for arg in a:
@@ -1549,24 +1556,24 @@ def compile_args(a, kwargs, sep, prefix):
                 arg = [arg.path]
 
             for sub_arg in arg:
-                processed_args.append(encode(sub_arg))
+                processed_args.append(sub_arg)
         elif isinstance(arg, dict):
-            processed_args += aggregate_keywords(arg, sep, prefix, raw=True)
+            processed_args += _aggregate_keywords(arg, sep, prefix, raw=True)
 
         # see https://github.com/amoffat/sh/issues/522
         elif arg is None or arg is False:
             pass
         else:
-            processed_args.append(encode(arg))
+            processed_args.append(str(arg))
 
     # aggregate the keyword arguments
-    processed_args += aggregate_keywords(kwargs, sep, prefix)
+    processed_args += _aggregate_keywords(kwargs, sep, prefix)
 
     return processed_args
 
 
-def aggregate_keywords(keywords, sep, prefix, raw=False):
-    """ take our keyword arguments, and a separator, and compose the list of
+def _aggregate_keywords(keywords, sep, prefix, raw=False):
+    """take our keyword arguments, and a separator, and compose the list of
     flat long (and short) arguments.  example
 
         {'color': 'never', 't': True, 'something': True} with sep '='
@@ -1601,32 +1608,43 @@ def aggregate_keywords(keywords, sep, prefix, raw=False):
     """
 
     processed = []
-    encode = encode_to_py3bytes_or_py2str
-
-    for k, v in keywords.items():
-        # we're passing a short arg as a kwarg, example:
-        # cut(d="\t")
-        if len(k) == 1:
-            if v is not False:
-                processed.append(encode("-" + k))
-                if v is not True:
-                    processed.append(encode(v))
-
-        # we're doing a long arg
-        else:
-            if not raw:
-                k = k.replace("_", "-")
 
-            if v is True:
-                processed.append(encode(prefix + k))
-            elif v is False:
-                pass
-            elif sep is None or sep == " ":
-                processed.append(encode(prefix + k))
-                processed.append(encode(v))
+    for k, maybe_list_of_v in keywords.items():
+        # turn our value(s) into a list of values so that we can process them
+        # all individually under the same key
+        list_of_v = [maybe_list_of_v]
+        if isinstance(maybe_list_of_v, (list, tuple)):
+            list_of_v = maybe_list_of_v
+
+        for v in list_of_v:
+            # we're passing a short arg as a kwarg, example:
+            # cut(d="\t")
+            if len(k) == 1:
+                if v is not False:
+                    processed.append("-" + k)
+                    if v is not True:
+                        processed.append(str(v))
+
+            # we're doing a long arg
             else:
-                arg = encode("%s%s%s%s" % (prefix, k, sep, v))
-                processed.append(arg)
+                if not raw:
+                    k = k.replace("_", "-")
+
+                # if it's true, it has no value, just pass the name
+                if v is True:
+                    processed.append(prefix + k)
+                # if it's false, skip passing it
+                elif v is False:
+                    pass
+
+                # we may need to break the argument up into multiple arguments
+                elif sep is None or sep == " ":
+                    processed.append(prefix + k)
+                    processed.append(str(v))
+                # otherwise just join it together into a single argument
+                else:
+                    arg = f"{prefix}{k}{sep}{v}"
+                    processed.append(arg)
 
     return processed
 
@@ -1646,17 +1664,17 @@ def _start_daemon_thread(fn, name, exc_queue, *a):
 
 
 def setwinsize(fd, rows_cols):
-    """ set the terminal size of a tty file descriptor.  borrowed logic
-    from pexpect.py """
+    """set the terminal size of a tty file descriptor.  borrowed logic
+    from pexpect.py"""
     rows, cols = rows_cols
-    winsize = getattr(termios, 'TIOCSWINSZ', -2146929561)
+    winsize = getattr(termios, "TIOCSWINSZ", -2146929561)
 
-    s = struct.pack('HHHH', rows, cols, 0, 0)
+    s = struct.pack("HHHH", rows, cols, 0, 0)
     fcntl.ioctl(fd, winsize, s)
 
 
 def construct_streamreader_callback(process, handler):
-    """ here we're constructing a closure for our streamreader callback.  this
+    """here we're constructing a closure for our streamreader callback.  this
     is used in the case that we pass a callback into _out or _err, meaning we
     want to our callback to handle each bit of output
 
@@ -1665,7 +1683,7 @@ def construct_streamreader_callback(process, handler):
     limiting them.  a new user will assume the callback takes 1 argument (the
     data).  as they get more advanced, they may want to terminate the process,
     or pass some stdin back, and will realize that they can pass a callback of
-    more args """
+    more args"""
 
     # implied arg refers to the "self" that methods will pass in.  we need to
     # account for this implied arg when figuring out what function the user
@@ -1740,7 +1758,7 @@ def get_exc_exit_code_would_raise(exit_code, ok_codes, sigpipe_ok):
 
 
 def handle_process_exit_code(exit_code):
-    """ this should only ever be called once for each child process """
+    """this should only ever be called once for each child process"""
     # if we exited from a signal, let our exit code reflect that
     if os.WIFSIGNALED(exit_code):
         exit_code = -os.WTERMSIG(exit_code)
@@ -1754,7 +1772,7 @@ def handle_process_exit_code(exit_code):
 
 
 def no_interrupt(syscall, *args, **kwargs):
-    """ a helper for making system calls immune to EINTR """
+    """a helper for making system calls immune to EINTR"""
     ret = None
 
     while True:
@@ -1772,10 +1790,10 @@ def no_interrupt(syscall, *args, **kwargs):
 
 
 class OProc(object):
-    """ this class is instantiated by RunningCommand for a command to be exec'd.
+    """this class is instantiated by RunningCommand for a command to be exec'd.
     it handles all the nasty business involved with correctly setting up the
     input/output to the child process.  it gets its name for subprocess.Popen
-    (process open) but we're calling ours OProc (open process) """
+    (process open) but we're calling ours OProc (open process)"""
 
     _default_window_size = (24, 80)
 
@@ -1783,13 +1801,26 @@ class OProc(object):
     STDOUT = -1
     STDERR = -2
 
-    def __init__(self, command, parent_log, cmd, stdin, stdout, stderr, call_args, pipe, process_assign_lock):
+    def __init__(
+        self,
+        command,
+        parent_log,
+        cmd,
+        stdin,
+        stdout,
+        stderr,
+        call_args,
+        pipe,
+        process_assign_lock,
+    ):
         """
-            cmd is the full list of arguments that will be exec'd.  it includes the program name and all its arguments.
+        cmd is the full list of arguments that will be exec'd.  it includes the program
+        name and all its arguments.
 
-            stdin, stdout, stderr are what the child will use for standard input/output/err.
+        stdin, stdout, stderr are what the child will use for standard input/output/err.
 
-            call_args is a mapping of all the special keyword arguments to apply to the child process.
+        call_args is a mapping of all the special keyword arguments to apply to the
+        child process.
         """
         self.command = command
         self.call_args = call_args
@@ -1823,8 +1854,12 @@ class OProc(object):
         stdout_is_fd_based = ob_is_fd_based(stdout)
         stderr_is_fd_based = ob_is_fd_based(stderr)
 
-        tee_out = ca["tee"] in (True, "out")
-        tee_err = ca["tee"] == "err"
+        if isinstance(ca["tee"], (str, bool, int)) or ca["tee"] is None:
+            tee = {ca["tee"]}
+        else:
+            tee = set(ca["tee"])
+        tee_out = TEE_STDOUT.intersection(tee)
+        tee_err = TEE_STDERR.intersection(tee)
 
         single_tty = ca["tty_in"] and ca["tty_out"] and ca["unify_ttys"]
 
@@ -1835,7 +1870,8 @@ class OProc(object):
         if single_tty:
             # master_fd, slave_fd = pty.openpty()
             #
-            # Anything that is written on the master end is provided to the process on the slave end as though it was
+            # Anything that is written on the master end is provided to the process on
+            # the slave end as though it was
             # input typed on a terminal. -"man 7 pty"
             #
             # later, in the child process, we're going to do this, so keep it in mind:
@@ -1845,12 +1881,14 @@ class OProc(object):
             #    os.dup2(self._stderr_child_fd, 2)
             self._stdin_parent_fd, self._stdin_child_fd = pty.openpty()
 
-            # this makes our parent fds behave like a terminal. it says that the very same fd that we "type" to (for
-            # stdin) is the same one that we see output printed to (for stdout)
+            # this makes our parent fds behave like a terminal. it says that the very
+            # same fd that we "type" to (for stdin) is the same one that we see output
+            # printed to (for stdout)
             self._stdout_parent_fd = os.dup(self._stdin_parent_fd)
 
-            # this line is what makes stdout and stdin attached to the same pty. in other words the process will write
-            # to the same underlying fd as stdout as it uses to read from for stdin. this makes programs like ssh happy
+            # this line is what makes stdout and stdin attached to the same pty. in
+            # other words the process will write to the same underlying fd as stdout
+            # as it uses to read from for stdin. this makes programs like ssh happy
             self._stdout_child_fd = os.dup(self._stdin_child_fd)
 
             self._stderr_parent_fd = os.dup(self._stdin_parent_fd)
@@ -1921,7 +1959,14 @@ class OProc(object):
             self._pipe_fd = os.dup(fd_to_use)
 
         new_session = ca["new_session"]
-        needs_ctty = ca["tty_in"] and new_session
+        new_group = ca["new_group"]
+        needs_ctty = ca["tty_in"]
+
+        # if we need a controlling terminal, we have to be in a new session where we
+        # are the session leader, otherwise we would need to take over the existing
+        # process session, and we can't do that(?)
+        if needs_ctty:
+            new_session = True
 
         self.ctty = None
         if needs_ctty:
@@ -1958,21 +2003,24 @@ class OProc(object):
                 os.close(close_pipe_write)
 
             # this is critical
-            # our exc_pipe_write must have CLOEXEC enabled. the reason for this is tricky:
-            # if our child (the block we're in now), has an exception, we need to be able to write to exc_pipe_write, so
-            # that when the parent does os.read(exc_pipe_read), it gets our traceback.  however, os.read(exc_pipe_read)
-            # in the parent blocks, so if our child *doesn't* have an exception, and doesn't close the writing end, it
-            # hangs forever.  not good!  but obviously the child can't close the writing end until it knows it's not
-            # going to have an exception, which is impossible to know because but what if os.execv has an exception?  so
-            # the answer is CLOEXEC, so that the writing end of the pipe gets closed upon successful exec, and the
-            # parent reading the read end won't block (close breaks the block).
+            # our exc_pipe_write must have CLOEXEC enabled. the reason for this is
+            # tricky: if our child (the block we're in now), has an exception, we need
+            # to be able to write to exc_pipe_write, so that when the parent does
+            # os.read(exc_pipe_read), it gets our traceback.  however,
+            # os.read(exc_pipe_read) in the parent blocks, so if our child *doesn't*
+            # have an exception, and doesn't close the writing end, it hangs forever.
+            # not good!  but obviously the child can't close the writing end until it
+            # knows it's not going to have an exception, which is impossible to know
+            # because but what if os.execv has an exception?  so the answer is CLOEXEC,
+            # so that the writing end of the pipe gets closed upon successful exec,
+            # and the parent reading the read end won't block (close breaks the block).
             flags = fcntl.fcntl(exc_pipe_write, fcntl.F_GETFD)
             flags |= fcntl.FD_CLOEXEC
             fcntl.fcntl(exc_pipe_write, fcntl.F_SETFD, flags)
 
             try:
-                # ignoring SIGHUP lets us persist even after the parent process
-                # exits.  only ignore if we're backgrounded
+                # ignoring SIGHUP lets us persist even after the controlling terminal
+                # is closed
                 if ca["bg"] is True:
                     signal.signal(signal.SIGHUP, signal.SIG_IGN)
 
@@ -1988,15 +2036,8 @@ class OProc(object):
                 # process init
                 if new_session:
                     os.setsid()
-                # if we're not going in a new session, we should go in a new
-                # process group.  this way, our process, and any children it
-                # spawns, are alone, contained entirely in one group.  if we
-                # didn't do this, and didn't use a new session, then our exec'd
-                # process *could* exist in the same group as our python process,
-                # depending on how we launch the process (from a shell, or some
-                # other way)
-                else:
-                    os.setpgrp()
+                elif new_group:
+                    os.setpgid(0, 0)
 
                 sid = os.getsid(0)
                 pgid = os.getpgid(0)
@@ -2057,11 +2098,17 @@ class OProc(object):
                     close_fds = True
 
                 if close_fds:
-                    pass_fds = set((0, 1, 2, exc_pipe_write))
+                    pass_fds = {0, 1, 2, exc_pipe_write}
                     pass_fds.update(ca["pass_fds"])
 
                     # don't inherit file descriptors
-                    inherited_fds = os.listdir("/dev/fd")
+                    try:
+                        inherited_fds = os.listdir("/dev/fd")
+                    except (IOError, OSError):
+                        # Some systems don't have /dev/fd. Raises OSError in
+                        # Python2, FileNotFoundError on Python3. The latter doesn't
+                        # exist on Python2, but inherits from IOError, which does.
+                        inherited_fds = os.listdir("/proc/self/fd")
                     inherited_fds = set(int(fd) for fd in inherited_fds) - pass_fds
                     for fd in inherited_fds:
                         try:
@@ -2069,11 +2116,16 @@ class OProc(object):
                         except OSError:
                             pass
 
+                # python=3.6, locale=c will fail test_unicode_arg if we don't
+                # explicitly encode to bytes via our desired encoding. this does
+                # not seem to be the case in other python versions, even if locale=c
+                bytes_cmd = [c.encode(ca["encoding"]) for c in cmd]
+
                 # actually execute the process
                 if ca["env"] is None:
-                    os.execv(cmd[0], cmd)
+                    os.execv(bytes_cmd[0], bytes_cmd)
                 else:
-                    os.execve(cmd[0], cmd, ca["env"])
+                    os.execve(bytes_cmd[0], bytes_cmd, ca["env"])
 
             # we must ensure that we carefully exit the child process on
             # exception, otherwise the parent process code will be executed
@@ -2082,7 +2134,7 @@ class OProc(object):
             # if your parent process experiences an exit code 255, it is most
             # likely that an exception occurred between the fork of the child
             # and the exec.  this should be reported.
-            except:  # noqa: E722
+            except Exception:  # noqa: E722
                 # some helpful debugging
                 tb = traceback.format_exc().encode("utf8", "ignore")
 
@@ -2114,14 +2166,16 @@ class OProc(object):
                 os.close(close_pipe_write)
 
             os.close(exc_pipe_write)
-            fork_exc = os.read(exc_pipe_read, 1024 ** 2)
+            fork_exc = os.read(exc_pipe_read, 1024**2)
             os.close(exc_pipe_read)
             if fork_exc:
                 fork_exc = fork_exc.decode(DEFAULT_ENCODING)
                 raise ForkException(fork_exc)
 
             os.close(session_pipe_write)
-            sid, pgid = os.read(session_pipe_read, 1024).decode(DEFAULT_ENCODING).split(",")
+            sid, pgid = (
+                os.read(session_pipe_read, 1024).decode(DEFAULT_ENCODING).split(",")
+            )
             os.close(session_pipe_read)
             self.sid = int(sid)
             self.pgid = int(pgid)
@@ -2140,8 +2194,9 @@ class OProc(object):
 
             self.stdin = stdin
 
-            # this accounts for when _out is a callable that is passed stdin.  in that case, if stdin is unspecified, we
-            # must set it to a queue, so callbacks can put things on it
+            # this accounts for when _out is a callable that is passed stdin.  in that
+            # case, if stdin is unspecified, we must set it to a queue, so callbacks can
+            # put things on it
             if callable(ca["out"]) and self.stdin is None:
                 self.stdin = Queue()
 
@@ -2179,8 +2234,14 @@ class OProc(object):
             self._stdin_stream = None
             if self._stdin_parent_fd:
                 log = self.log.get_child("streamwriter", "stdin")
-                self._stdin_stream = StreamWriter(log, self._stdin_parent_fd, self.stdin,
-                                                  ca["in_bufsize"], ca["encoding"], ca["tty_in"])
+                self._stdin_stream = StreamWriter(
+                    log,
+                    self._stdin_parent_fd,
+                    self.stdin,
+                    ca["in_bufsize"],
+                    ca["encoding"],
+                    ca["tty_in"],
+                )
 
             stdout_pipe = None
             if pipe is OProc.STDOUT and not ca["no_pipe"]:
@@ -2205,10 +2266,14 @@ class OProc(object):
                     stdout = construct_streamreader_callback(self, stdout)
                 self._stdout_stream = StreamReader(
                     self.log.get_child("streamreader", "stdout"),
-                    self._stdout_parent_fd, stdout, self._stdout,
-                    ca["out_bufsize"], ca["encoding"],
-                    ca["decode_errors"], stdout_pipe,
-                    save_data=save_stdout
+                    self._stdout_parent_fd,
+                    stdout,
+                    self._stdout,
+                    ca["out_bufsize"],
+                    ca["encoding"],
+                    ca["decode_errors"],
+                    stdout_pipe,
+                    save_data=save_stdout,
                 )
 
             elif self._stdout_parent_fd:
@@ -2219,22 +2284,31 @@ class OProc(object):
             # stream reader for stderr, because we've already set one up for
             # stdout above
             self._stderr_stream = None
-            if stderr is not OProc.STDOUT and not single_tty and not pipe_err and self._stderr_parent_fd:
-
+            if (
+                stderr is not OProc.STDOUT
+                and not single_tty
+                and not pipe_err
+                and self._stderr_parent_fd
+            ):
                 stderr_pipe = None
                 if pipe is OProc.STDERR and not ca["no_pipe"]:
                     stderr_pipe = self._pipe_queue
 
-                save_stderr = not ca["no_err"] and (ca["tee"] in ("err",) or stderr is None)
+                save_stderr = not ca["no_err"] and (tee_err or stderr is None)
 
                 if callable(stderr):
                     stderr = construct_streamreader_callback(self, stderr)
 
                 self._stderr_stream = StreamReader(
                     Logger("streamreader"),
-                    self._stderr_parent_fd, stderr, self._stderr,
-                    ca["err_bufsize"], ca["encoding"], ca["decode_errors"],
-                    stderr_pipe, save_data=save_stderr
+                    self._stderr_parent_fd,
+                    stderr,
+                    self._stderr,
+                    ca["err_bufsize"],
+                    ca["encoding"],
+                    ca["decode_errors"],
+                    stderr_pipe,
+                    save_data=save_stderr,
                 )
 
             elif self._stderr_parent_fd:
@@ -2248,7 +2322,9 @@ class OProc(object):
             self._timeout_timer = None
             if ca["timeout"]:
                 self._timeout_event = threading.Event()
-                self._timeout_timer = threading.Timer(ca["timeout"], self._timeout_event.set)
+                self._timeout_timer = threading.Timer(
+                    ca["timeout"], self._timeout_event.set
+                )
                 self._timeout_timer.start()
 
             # this is for cases where we know that the RunningCommand that was
@@ -2259,7 +2335,14 @@ class OProc(object):
             # RunningCommand.wait() does), because we want the exception to be
             # re-raised in the future, if we DO call .wait()
             handle_exit_code = None
-            if not self.command._spawned_and_waited and ca["bg_exc"]:
+            if (
+                not self.command._spawned_and_waited
+                and ca["bg_exc"]
+                # we don't want background exceptions if we're doing async stuff,
+                # because we want those to bubble up.
+                and not ca["async"]
+            ):
+
                 def fn(exit_code):
                     with process_assign_lock:
                         return self.command.handle_command_exit_code(exit_code)
@@ -2272,9 +2355,13 @@ class OProc(object):
             self._bg_thread_exc_queue = Queue(1)
             self._background_thread = _start_daemon_thread(
                 background_thread,
-                thread_name, self._bg_thread_exc_queue, timeout_fn,
-                self._timeout_event, handle_exit_code, self.is_alive,
-                self._quit_threads
+                thread_name,
+                self._bg_thread_exc_queue,
+                timeout_fn,
+                self._timeout_event,
+                handle_exit_code,
+                self.is_alive,
+                self._quit_threads,
             )
 
             # start the main io threads. stdin thread is not needed if we are
@@ -2286,9 +2373,13 @@ class OProc(object):
                 thread_name = "STDIN thread for pid %d" % self.pid
                 self._input_thread = _start_daemon_thread(
                     input_thread,
-                    thread_name, self._input_thread_exc_queue, self.log,
-                    self._stdin_stream, self.is_alive, self._quit_threads,
-                    close_before_term
+                    thread_name,
+                    self._input_thread_exc_queue,
+                    self.log,
+                    self._stdin_stream,
+                    self.is_alive,
+                    self._quit_threads,
+                    close_before_term,
                 )
 
             # this event is for cases where the subprocess that we launch
@@ -2298,47 +2389,42 @@ class OProc(object):
             # prevents that hanging
             self._stop_output_event = threading.Event()
 
+            # we need to set up a callback to fire when our `output_thread` is about
+            # to exit. this callback will set an asyncio Event, so that coroutiens can
+            # be notified that our output is finished.
+            # if the `sh` command was launched from within a thread (so we're not in
+            # the main thread), then we won't have an event loop.
+            try:
+                loop = asyncio.get_event_loop()
+            except RuntimeError:
+
+                def output_complete():
+                    pass
+
+            else:
+
+                def output_complete():
+                    loop.call_soon_threadsafe(self.command.aio_output_complete.set)
+
             self._output_thread_exc_queue = Queue(1)
             thread_name = "STDOUT/ERR thread for pid %d" % self.pid
             self._output_thread = _start_daemon_thread(
                 output_thread,
-                thread_name, self._output_thread_exc_queue, self.log,
-                self._stdout_stream, self._stderr_stream,
-                self._timeout_event, self.is_alive, self._quit_threads,
-                self._stop_output_event
+                thread_name,
+                self._output_thread_exc_queue,
+                self.log,
+                self._stdout_stream,
+                self._stderr_stream,
+                self._timeout_event,
+                self.is_alive,
+                self._quit_threads,
+                self._stop_output_event,
+                output_complete,
             )
 
     def __repr__(self):
         return "<Process %d %r>" % (self.pid, self.cmd[:500])
 
-    # these next 3 properties are primary for tests
-    @property
-    def output_thread_exc(self):
-        exc = None
-        try:
-            exc = self._output_thread_exc_queue.get(False)
-        except Empty:
-            pass
-        return exc
-
-    @property
-    def input_thread_exc(self):
-        exc = None
-        try:
-            exc = self._input_thread_exc_queue.get(False)
-        except Empty:
-            pass
-        return exc
-
-    @property
-    def bg_thread_exc(self):
-        exc = None
-        try:
-            exc = self._bg_thread_exc_queue.get(False)
-        except Empty:
-            pass
-        return exc
-
     def change_in_bufsize(self, buf):
         self._stdin_stream.stream_bufferer.change_buffering(buf)
 
@@ -2357,15 +2443,15 @@ class OProc(object):
         return "".encode(self.call_args["encoding"]).join(self._stderr)
 
     def get_pgid(self):
-        """ return the CURRENT group id of the process. this differs from
+        """return the CURRENT group id of the process. this differs from
         self.pgid in that this reflects the current state of the process, where
-        self.pgid is the group id at launch """
+        self.pgid is the group id at launch"""
         return os.getpgid(self.pid)
 
     def get_sid(self):
-        """ return the CURRENT session id of the process. this differs from
+        """return the CURRENT session id of the process. this differs from
         self.sid in that this reflects the current state of the process, where
-        self.sid is the session id at launch """
+        self.sid is the session id at launch"""
         return os.getsid(self.pid)
 
     def signal_group(self, sig):
@@ -2389,9 +2475,9 @@ class OProc(object):
         self.signal(signal.SIGTERM)
 
     def is_alive(self):
-        """ polls if our child process has completed, without blocking.  this
+        """polls if our child process has completed, without blocking.  this
         method has side-effects, such as setting our exit_code, if we happen to
-        see our child exit while this is running """
+        see our child exit while this is running"""
 
         if self.exit_code is not None:
             return False, self.exit_code
@@ -2406,12 +2492,17 @@ class OProc(object):
         # so essentially what we're doing is, using this lock, checking if
         # we're calling .wait(), and if we are, let .wait() get the exit code
         # and handle the status, otherwise let us do it.
-        acquired = self._wait_lock.acquire(False)
+        #
+        # Using a small timeout provides backpressure against code that spams
+        # calls to .is_alive() which may block the main thread from acquiring
+        # the lock otherwise.
+        acquired = self._wait_lock.acquire(timeout=0.00001)
         if not acquired:
             if self.exit_code is not None:
                 return False, self.exit_code
             return True, self.exit_code
 
+        witnessed_end = False
         try:
             # WNOHANG is just that...we're calling waitpid without hanging...
             # essentially polling the process.  the return result is (0, 0) if
@@ -2420,7 +2511,7 @@ class OProc(object):
             pid, exit_code = no_interrupt(os.waitpid, self.pid, os.WNOHANG)
             if pid == self.pid:
                 self.exit_code = handle_process_exit_code(exit_code)
-                self._process_just_ended()
+                witnessed_end = True
 
                 return False, self.exit_code
 
@@ -2431,6 +2522,8 @@ class OProc(object):
             return True, self.exit_code
         finally:
             self._wait_lock.release()
+            if witnessed_end:
+                self._process_just_ended()
 
     def _process_just_ended(self):
         if self._timeout_timer:
@@ -2449,7 +2542,7 @@ class OProc(object):
             os.close(self._stdin_parent_fd)
 
     def wait(self):
-        """ waits for the process to complete, handles the exit code """
+        """waits for the process to complete, handles the exit code"""
 
         self.log.debug("acquiring wait lock to wait for completion")
         # using the lock in a with-context blocks, which is what we want if
@@ -2465,38 +2558,41 @@ class OProc(object):
                 witnessed_end = True
 
             else:
-                self.log.debug("exit code already set (%d), no need to wait", self.exit_code)
+                self.log.debug(
+                    "exit code already set (%d), no need to wait", self.exit_code
+                )
+        self._process_exit_cleanup(witnessed_end=witnessed_end)
+        return self.exit_code
 
-            self._quit_threads.set()
+    def _process_exit_cleanup(self, witnessed_end):
+        self._quit_threads.set()
 
-            # we may not have a thread for stdin, if the pipe has been connected
-            # via _piped="direct"
-            if self._input_thread:
-                self._input_thread.join()
+        # we may not have a thread for stdin, if the pipe has been connected
+        # via _piped="direct"
+        if self._input_thread:
+            self._input_thread.join()
 
-            # wait, then signal to our output thread that the child process is
-            # done, and we should have finished reading all the stdout/stderr
-            # data that we can by now
-            timer = threading.Timer(2.0, self._stop_output_event.set)
-            timer.start()
+        # wait, then signal to our output thread that the child process is
+        # done, and we should have finished reading all the stdout/stderr
+        # data that we can by now
+        timer = threading.Timer(2.0, self._stop_output_event.set)
+        timer.start()
 
-            # wait for our stdout and stderr streamreaders to finish reading and
-            # aggregating the process output
-            self._output_thread.join()
-            timer.cancel()
+        # wait for our stdout and stderr streamreaders to finish reading and
+        # aggregating the process output
+        self._output_thread.join()
+        timer.cancel()
 
-            self._background_thread.join()
+        self._background_thread.join()
 
-            if witnessed_end:
-                self._process_just_ended()
-
-            return self.exit_code
+        if witnessed_end:
+            self._process_just_ended()
 
 
 def input_thread(log, stdin, is_alive, quit_thread, close_before_term):
-    """ this is run in a separate thread.  it writes into our process's
+    """this is run in a separate thread.  it writes into our process's
     stdin (a streamwriter) and waits the process to end AND everything that
-    can be written to be written """
+    can be written to be written"""
 
     closed = False
     alive = True
@@ -2528,13 +2624,13 @@ def input_thread(log, stdin, is_alive, quit_thread, close_before_term):
 
 def event_wait(ev, timeout=None):
     triggered = ev.wait(timeout)
-    if IS_PY26:
-        triggered = ev.is_set()
     return triggered
 
 
-def background_thread(timeout_fn, timeout_event, handle_exit_code, is_alive, quit_thread):
-    """ handles the timeout logic """
+def background_thread(
+    timeout_fn, timeout_event, handle_exit_code, is_alive, quit_thread
+):
+    """handles the timeout logic"""
 
     # if there's a timeout event, loop
     if timeout_event:
@@ -2561,10 +2657,19 @@ def background_thread(timeout_fn, timeout_event, handle_exit_code, is_alive, qui
         handle_exit_code(exit_code)
 
 
-def output_thread(log, stdout, stderr, timeout_event, is_alive, quit_thread, stop_output_event):
-    """ this function is run in a separate thread.  it reads from the
+def output_thread(
+    log,
+    stdout,
+    stderr,
+    timeout_event,
+    is_alive,
+    quit_thread,
+    stop_output_event,
+    output_complete,
+):
+    """this function is run in a separate thread.  it reads from the
     process's stdout stream (a streamreader), and waits for it to claim that
-    its done """
+    its done"""
 
     poller = Poller()
     if stdout is not None:
@@ -2610,6 +2715,8 @@ def output_thread(log, stdout, stderr, timeout_event, is_alive, quit_thread, sto
     if stderr:
         stderr.close()
 
+    output_complete()
+
 
 class DoneReadingForever(Exception):
     pass
@@ -2620,7 +2727,7 @@ class NotYetReadyToRead(Exception):
 
 
 def determine_how_to_read_input(input_obj):
-    """ given some kind of input object, return a function that knows how to
+    """given some kind of input object, return a function that knows how to
     read chunks of that input object.
 
     each reader function should return a chunk and raise a DoneReadingForever
@@ -2629,7 +2736,7 @@ def determine_how_to_read_input(input_obj):
     NOTE: the function returned does not need to care much about the requested
     buffering type (eg, unbuffered vs newline-buffered).  the StreamBufferer
     will take care of that.  these functions just need to return a
-    reasonably-sized chunk of data. """
+    reasonably-sized chunk of data."""
 
     if isinstance(input_obj, Queue):
         log_msg = "queue"
@@ -2644,7 +2751,7 @@ def determine_how_to_read_input(input_obj):
         log_msg = "file descriptor"
         get_chunk = get_file_chunk_reader(input_obj)
 
-    elif isinstance(input_obj, basestring):
+    elif isinstance(input_obj, str):
         log_msg = "string"
         get_chunk = get_iter_string_reader(input_obj)
 
@@ -2705,22 +2812,19 @@ def get_callable_chunk_reader(stdin):
 
 
 def get_iter_string_reader(stdin):
-    """ return an iterator that returns a chunk of a string every time it is
+    """return an iterator that returns a chunk of a string every time it is
     called.  notice that even though bufsize_type might be line buffered, we're
     not doing any line buffering here.  that's because our StreamBufferer
-    handles all buffering.  we just need to return a reasonable-sized chunk. """
+    handles all buffering.  we just need to return a reasonable-sized chunk."""
     bufsize = 1024
-    iter_str = (stdin[i:i + bufsize] for i in range(0, len(stdin), bufsize))
+    iter_str = (stdin[i : i + bufsize] for i in range(0, len(stdin), bufsize))
     return get_iter_chunk_reader(iter_str)
 
 
 def get_iter_chunk_reader(stdin):
     def fn():
         try:
-            if IS_PY3:
-                chunk = stdin.__next__()
-            else:
-                chunk = stdin.next()
+            chunk = stdin.__next__()
             return chunk
         except StopIteration:
             raise DoneReadingForever
@@ -2736,11 +2840,10 @@ def get_file_chunk_reader(stdin):
         # exception.  that exception is how we'll know we can't do a poll on
         # stdin
         is_real_file = True
-        if IS_PY3:
-            try:
-                stdin.fileno()
-            except UnsupportedOperation:
-                is_real_file = False
+        try:
+            stdin.fileno()
+        except UnsupportedOperation:
+            is_real_file = False
 
         # this poll is for files that may not yet be ready to read.  we test
         # for fileno because StringIO/BytesIO cannot be used in a poll
@@ -2765,10 +2868,10 @@ def get_file_chunk_reader(stdin):
 
 
 def bufsize_type_to_bufsize(bf_type):
-    """ for a given bufsize type, return the actual bufsize we will read.
+    """for a given bufsize type, return the actual bufsize we will read.
     notice that although 1 means "newline-buffered", we're reading a chunk size
     of 1024.  this is because we have to read something.  we let a
-    StreamBufferer instance handle splitting our chunk on newlines """
+    StreamBufferer instance handle splitting our chunk on newlines"""
 
     # newlines
     if bf_type == 1:
@@ -2784,12 +2887,11 @@ def bufsize_type_to_bufsize(bf_type):
 
 
 class StreamWriter(object):
-    """ StreamWriter reads from some input (the stdin param) and writes to a fd
+    """StreamWriter reads from some input (the stdin param) and writes to a fd
     (the stream param).  the stdin may be a Queue, a callable, something with
-    the "read" method, a string, or an iterable """
+    the "read" method, a string, or an iterable"""
 
     def __init__(self, log, stream, stdin, bufsize_type, encoding, tty_in):
-
         self.stream = stream
         self.stdin = stdin
 
@@ -2802,12 +2904,12 @@ class StreamWriter(object):
         self.log.debug("parsed stdin as a %s", log_msg)
 
     def fileno(self):
-        """ defining this allows us to do poll on an instance of this
-        class """
+        """defining this allows us to do poll on an instance of this
+        class"""
         return self.stream
 
     def write(self):
-        """ attempt to get a chunk of data to write to our child process's
+        """attempt to get a chunk of data to write to our child process's
         stdin, then write it.  the return value answers the questions "are we
         done writing forever?" """
 
@@ -2854,7 +2956,7 @@ class StreamWriter(object):
             return False
 
         # if we're not bytes, make us bytes
-        if IS_PY3 and not isinstance(chunk, bytes):
+        if not isinstance(chunk, bytes):
             chunk = chunk.encode(self.encoding)
 
         for proc_chunk in self.stream_bufferer.process(chunk):
@@ -2886,43 +2988,55 @@ def determine_how_to_feed_output(handler, encoding, decode_errors):
         process, finish = get_callback_chunk_consumer(handler, encoding, decode_errors)
 
     # in py3, this is used for bytes
-    elif isinstance(handler, (cStringIO, iocStringIO)):
+    elif isinstance(handler, BytesIO):
         process, finish = get_cstringio_chunk_consumer(handler)
 
     # in py3, this is used for unicode
-    elif isinstance(handler, (StringIO, ioStringIO)):
+    elif isinstance(handler, StringIO):
         process, finish = get_stringio_chunk_consumer(handler, encoding, decode_errors)
 
     elif hasattr(handler, "write"):
-        process, finish = get_file_chunk_consumer(handler)
+        process, finish = get_file_chunk_consumer(handler, decode_errors)
 
     else:
         try:
             handler = int(handler)
         except (ValueError, TypeError):
-            def process(chunk): return False  # noqa: E731
-            def finish(): return None  # noqa: E731
+
+            def process(chunk):
+                return False  # noqa: E731
+
+            def finish():
+                return None  # noqa: E731
+
         else:
-            process, finish = get_fd_chunk_consumer(handler)
+            process, finish = get_fd_chunk_consumer(handler, decode_errors)
 
     return process, finish
 
 
-def get_fd_chunk_consumer(handler):
+def get_fd_chunk_consumer(handler, decode_errors):
     handler = fdopen(handler, "w", closefd=False)
-    return get_file_chunk_consumer(handler)
+    return get_file_chunk_consumer(handler, decode_errors)
 
 
-def get_file_chunk_consumer(handler):
+def get_file_chunk_consumer(handler, decode_errors):
     if getattr(handler, "encoding", None):
-        def encode(chunk): return chunk.decode(handler.encoding)  # noqa: E731
+
+        def encode(chunk):
+            return chunk.decode(handler.encoding, decode_errors)  # noqa: E731
+
     else:
-        def encode(chunk): return chunk  # noqa: E731
+
+        def encode(chunk):
+            return chunk  # noqa: E731
 
     if hasattr(handler, "flush"):
         flush = handler.flush
     else:
-        def flush(): return None  # noqa: E731
+
+        def flush():
+            return None  # noqa: E731
 
     def process(chunk):
         handler.write(encode(chunk))
@@ -2976,11 +3090,21 @@ def get_stringio_chunk_consumer(handler, encoding, decode_errors):
 
 
 class StreamReader(object):
-    """ reads from some output (the stream) and sends what it just read to the
-    handler.  """
-
-    def __init__(self, log, stream, handler, buffer, bufsize_type, encoding, decode_errors, pipe_queue=None,
-                 save_data=True):
+    """reads from some output (the stream) and sends what it just read to the
+    handler."""
+
+    def __init__(
+        self,
+        log,
+        stream,
+        handler,
+        buffer,
+        bufsize_type,
+        encoding,
+        decode_errors,
+        pipe_queue=None,
+        save_data=True,
+    ):
         self.stream = stream
         self.buffer = buffer
         self.save_data = save_data
@@ -2993,17 +3117,20 @@ class StreamReader(object):
 
         self.log = log
 
-        self.stream_bufferer = StreamBufferer(bufsize_type, self.encoding, self.decode_errors)
+        self.stream_bufferer = StreamBufferer(
+            bufsize_type, self.encoding, self.decode_errors
+        )
         self.bufsize = bufsize_type_to_bufsize(bufsize_type)
 
-        self.process_chunk, self.finish_chunk_processor = \
-            determine_how_to_feed_output(handler, encoding, decode_errors)
+        self.process_chunk, self.finish_chunk_processor = determine_how_to_feed_output(
+            handler, encoding, decode_errors
+        )
 
         self.should_quit = False
 
     def fileno(self):
-        """ defining this allows us to do poll on an instance of this
-        class """
+        """defining this allows us to do poll on an instance of this
+        class"""
         return self.stream
 
     def close(self):
@@ -3050,12 +3177,12 @@ class StreamReader(object):
 
 
 class StreamBufferer(object):
-    """ this is used for feeding in chunks of stdout/stderr, and breaking it up
+    """this is used for feeding in chunks of stdout/stderr, and breaking it up
     into chunks that will actually be put into the internal buffers.  for
     example, if you have two processes, one being piped to the other, and you
     want that, first process to feed lines of data (instead of the chunks
     however they come in), OProc will use an instance of this class to chop up
-    the data and feed it as lines to be sent down the pipe """
+    the data and feed it as lines to be sent down the pipe"""
 
     def __init__(self, buffer_type, encoding=DEFAULT_ENCODING, decode_errors="strict"):
         # 0 for unbuffered, 1 for line, everything else for that amount
@@ -3097,7 +3224,9 @@ class StreamBufferer(object):
         # THE OUTPUT IS ALWAYS PY3 BYTES
 
         # TODO, when we stop supporting 2.6, make this a with context
-        self.log.debug("acquiring buffering lock to process chunk (buffering: %d)", self.type)
+        self.log.debug(
+            "acquiring buffering lock to process chunk (buffering: %d)", self.type
+        )
         self._buffering_lock.acquire()
         self.log.debug("got buffering lock to process chunk (buffering: %d)", self.type)
         try:
@@ -3121,14 +3250,14 @@ class StreamBufferer(object):
                     if newline == -1:
                         break
 
-                    chunk_to_write = chunk[:newline + 1]
+                    chunk_to_write = chunk[: newline + 1]
                     if self.buffer:
                         chunk_to_write = b"".join(self.buffer) + chunk_to_write
 
                         self.buffer = []
                         self.n_buffer_count = 0
 
-                    chunk = chunk[newline + 1:]
+                    chunk = chunk[newline + 1 :]
                     total_to_write.append(chunk_to_write)
 
                 if chunk:
@@ -3143,8 +3272,8 @@ class StreamBufferer(object):
                     overage = self.n_buffer_count + len(chunk) - self.type
                     if overage >= 0:
                         ret = "".encode(self.encoding).join(self.buffer) + chunk
-                        chunk_to_write = ret[:self.type]
-                        chunk = ret[self.type:]
+                        chunk_to_write = ret[: self.type]
+                        chunk = ret[self.type :]
                         total_to_write.append(chunk_to_write)
                         self.buffer = []
                         self.n_buffer_count = 0
@@ -3155,7 +3284,10 @@ class StreamBufferer(object):
                 return total_to_write
         finally:
             self._buffering_lock.release()
-            self.log.debug("released buffering lock for processing chunk (buffering: %d)", self.type)
+            self.log.debug(
+                "released buffering lock for processing chunk (buffering: %d)",
+                self.type,
+            )
 
     def flush(self):
         self.log.debug("acquiring buffering lock for flushing buffer")
@@ -3187,9 +3319,9 @@ def with_lock(lock):
 
 @with_lock(PUSHD_LOCK)
 def pushd(path):
-    """ pushd changes the actual working directory for the duration of the
+    """pushd changes the actual working directory for the duration of the
     context, unlike the _cwd arg this will work with other built-ins such as
-    sh.glob correctly """
+    sh.glob correctly"""
     orig_path = os.getcwd()
     os.chdir(path)
     try:
@@ -3200,12 +3332,13 @@ def pushd(path):
 
 @contextmanager
 def _args(**kwargs):
-    """ allows us to temporarily override all the special keyword parameters in
-    a with context """
+    """allows us to temporarily override all the special keyword parameters in
+    a with context"""
 
     kwargs_str = ",".join(["%s=%r" % (k, v) for k, v in kwargs.items()])
 
-    raise DeprecationWarning("""
+    raise DeprecationWarning(
+        """
 
 sh.args() has been deprecated because it was never thread safe.  use the
 following instead:
@@ -3219,23 +3352,26 @@ or
     from sh2 import your_command
     your_command()
 
-""".format(kwargs=kwargs_str))
+""".format(
+            kwargs=kwargs_str
+        )
+    )
 
 
 class Environment(dict):
-    """ this allows lookups to names that aren't found in the global scope to be
+    """this allows lookups to names that aren't found in the global scope to be
     searched for as a program name.  for example, if "ls" isn't found in this
     module's scope, we consider it a system program and try to find it.
 
     we use a dict instead of just a regular object as the base class because the
     exec() statement used in the run_repl requires the "globals" argument to be a
-    dictionary """
+    dictionary"""
 
     # this is a list of all of the names that the sh module exports that will
     # not resolve to functions.  we don't want to accidentally shadow real
     # commands with functions/imports that we define in sh.py.  for example,
     # "import time" may override the time system program
-    whitelist = set((
+    allowlist = {
         "Command",
         "RunningCommand",
         "CommandNotFound",
@@ -3247,6 +3383,7 @@ class Environment(dict):
         "ForkException",
         "TimeoutException",
         "StreamBufferer",
+        "_aggregate_keywords",
         "__project_url__",
         "__version__",
         "__file__",
@@ -3254,32 +3391,35 @@ class Environment(dict):
         "pushd",
         "glob",
         "contrib",
-    ))
+    }
 
     def __init__(self, globs, baked_args=None):
-        """ baked_args are defaults for the 'sh' execution context.  for
+        """baked_args are defaults for the 'sh' execution context.  for
         example:
 
             tmp = sh(_out=StringIO())
 
-        'out' would end up in here as an entry in the baked_args dict """
+        'out' would end up in here as an entry in the baked_args dict"""
         super(dict, self).__init__()
         self.globs = globs
         self.baked_args = baked_args or {}
 
     def __getitem__(self, k):
-        if k == 'args':
+        if k == "args":
             # Let the deprecated '_args' context manager be imported as 'args'
-            k = '_args'
+            k = "_args"
 
         # if we're trying to import something real, see if it's in our global scope.
-        # what defines "real" is that it's in our whitelist
-        if k in self.whitelist:
+        # what defines "real" is that it's in our allowlist
+        if k in self.allowlist:
             return self.globs[k]
 
         # somebody tried to be funny and do "from sh import *"
         if k == "__all__":
-            warnings.warn("Cannot import * from sh. Please import sh or import programs individually.")
+            warnings.warn(
+                "Cannot import * from sh. Please import sh or import programs "
+                "individually."
+            )
             return []
 
         # check if we're naming a dynamically generated ReturnCode exception
@@ -3292,16 +3432,16 @@ class Environment(dict):
         if k.startswith("__") and k.endswith("__"):
             raise AttributeError
 
+        # is it a command?
+        cmd = resolve_command(k, self.globs[Command.__name__], self.baked_args)
+        if cmd:
+            return cmd
+
         # is it a custom builtin?
         builtin = getattr(self, "b_" + k, None)
         if builtin:
             return builtin
 
-        # is it a command?
-        cmd = resolve_command(k, self.baked_args)
-        if cmd:
-            return cmd
-
         # how about an environment variable?
         # this check must come after testing if its a command, because on some
         # systems, there are an environment variables that can conflict with
@@ -3315,20 +3455,11 @@ class Environment(dict):
         # nothing found, raise an exception
         raise CommandNotFound(k)
 
-    # methods that begin with "b_" are custom builtins and will override any
-    # program that exists in our path.  this is useful for things like
-    # common shell builtins that people are used to, but which aren't actually
-    # full-fledged system binaries
-    @staticmethod
-    def b_cd(path=None):
-        if path:
-            os.chdir(path)
-        else:
-            os.chdir(os.path.expanduser('~'))
-
+    # Methods that begin with "b_" are implementations of shell built-ins that
+    # people are used to, but which may not have an executable equivalent.
     @staticmethod
     def b_which(program, paths=None):
-        return which(program, paths)
+        return _which(program, paths)
 
 
 class Contrib(ModuleType):  # pragma: no cover
@@ -3337,7 +3468,7 @@ class Contrib(ModuleType):  # pragma: no cover
         def wrapper1(fn):
             @property
             def cmd_getter(self):
-                cmd = resolve_command(name)
+                cmd = resolve_command(name, Command)
 
                 if not cmd:
                     raise CommandNotFound(name)
@@ -3358,15 +3489,15 @@ sys.modules[mod_name] = contrib
 
 @contrib("git")
 def git(orig):  # pragma: no cover
-    """ most git commands play nicer without a TTY """
+    """most git commands play nicer without a TTY"""
     cmd = orig.bake(_tty_out=False)
     return cmd
 
 
 @contrib("sudo")
 def sudo(orig):  # pragma: no cover
-    """ a nicer version of sudo that uses getpass to ask for a password, or
-    allows the first argument to be a string password """
+    """a nicer version of sudo that uses getpass to ask for a password, or
+    allows the first argument to be a string password"""
 
     prompt = "[sudo] password for %s: " % getpass.getuser()
 
@@ -3391,7 +3522,7 @@ def sudo(orig):  # pragma: no cover
 
 @contrib("ssh")
 def ssh(orig):  # pragma: no cover
-    """ An ssh command for automatic password login """
+    """An ssh command for automatic password login"""
 
     class SessionContent(object):
         def __init__(self):
@@ -3452,20 +3583,33 @@ def ssh(orig):  # pragma: no cover
         prompt = "Please enter SSH password: "
 
         if prompt_match is None:
-            def prompt_match(content): return content.cur_line.endswith("password: ")  # noqa: E731
+
+            def prompt_match(content):
+                return content.cur_line.endswith("password: ")  # noqa: E731
 
         if password is None:
-            def pass_getter(): return getpass.getpass(prompt=prompt)  # noqa: E731
+
+            def pass_getter():
+                return getpass.getpass(prompt=prompt)  # noqa: E731
+
         else:
-            def pass_getter(): return password.rstrip("\n")  # noqa: E731
+
+            def pass_getter():
+                return password.rstrip("\n")  # noqa: E731
 
         if login_success is None:
-            def login_success(content): return True  # noqa: E731
 
-        kwargs["_out"] = SSHInteract(prompt_match, pass_getter, real_out_handler, login_success)
+            def login_success(content):
+                return True  # noqa: E731
+
+        kwargs["_out"] = SSHInteract(
+            prompt_match, pass_getter, real_out_handler, login_success
+        )
         return a, kwargs
 
-    cmd = orig.bake(_out_bufsize=0, _tty_in=True, _unify_ttys=True, _arg_preprocess=process)
+    cmd = orig.bake(
+        _out_bufsize=0, _tty_in=True, _unify_ttys=True, _arg_preprocess=process
+    )
     return cmd
 
 
@@ -3475,7 +3619,7 @@ def run_repl(env):  # pragma: no cover
     print(banner.format(version=__version__))
     while True:
         try:
-            line = raw_input("sh> ")
+            line = input("sh> ")
         except (ValueError, EOFError):
             break
 
@@ -3502,8 +3646,8 @@ class SelfWrapper(ModuleType):
         # nicely.  if i make these attributes dynamic lookups in
         # __getattr__, reload sometimes chokes in weird ways...
         super(SelfWrapper, self).__init__(
-            name=getattr(self_module, '__name__', None),
-            doc=getattr(self_module, '__doc__', None)
+            name=getattr(self_module, "__name__", None),
+            doc=getattr(self_module, "__doc__", None),
         )
         for attr in ["__builtins__", "__file__", "__package__"]:
             setattr(self, attr, getattr(self_module, attr, None))
@@ -3512,279 +3656,36 @@ class SelfWrapper(ModuleType):
         # if we set this to None.  and 3.3 needs a value for __path__
         self.__path__ = []
         self.__self_module = self_module
-        self.__env = Environment(globals(), baked_args=baked_args)
+
+        # Copy the Command class and add any baked call kwargs to it
+        command_cls = Command
+        cls_attrs = command_cls.__dict__.copy()
+        cls_attrs.pop("__dict__", None)
+        if baked_args:
+            call_args, _ = command_cls._extract_call_args(baked_args)
+            cls_attrs["_call_args"] = cls_attrs["_call_args"].copy()
+            cls_attrs["_call_args"].update(call_args)
+        globs = globals().copy()
+        globs[command_cls.__name__] = type(
+            command_cls.__name__, command_cls.__bases__, cls_attrs
+        )
+
+        self.__env = Environment(globs, baked_args=baked_args)
 
     def __getattr__(self, name):
         return self.__env[name]
 
-    def __call__(self, **kwargs):
-        """ returns a new SelfWrapper object, where all commands spawned from it
-        have the baked_args kwargs set on them by default """
+    def bake(self, **kwargs):
         baked_args = self.__env.baked_args.copy()
         baked_args.update(kwargs)
-        new_mod = self.__class__(self.__self_module, baked_args)
-
-        # inspect the line in the parent frame that calls and assigns the new sh
-        # variable, and get the name of the new variable we're assigning to.
-        # this is very brittle and pretty much a sin.  but it works in 99% of
-        # the time and the tests pass
-        #
-        # the reason we need to do this is because we need to remove the old
-        # cached module from sys.modules.  if we don't, it gets re-used, and any
-        # old baked params get used, which is not what we want
-        parent = inspect.stack()[1]
-        try:
-            code = parent[4][0].strip()
-        except TypeError:
-            # On the REPL or from the commandline, we don't get the source code in the
-            # top stack frame
-            # Older versions of pypy don't set parent[1] the same way as CPython or newer versions
-            # of Pypy so we have to special case that too.
-            if parent[1] in ('<stdin>', '<string>') or (
-                    parent[1] == '<module>' and platform.python_implementation().lower() == 'pypy'):
-                # This depends on things like Python's calling convention and the layout of stack
-                # frames but it's a fix for a bug in a very cornery cornercase so....
-                module_name = parent[0].f_code.co_names[-1]
-            else:
-                raise
-        else:
-            parsed = ast.parse(code)
-            try:
-                module_name = parsed.body[0].targets[0].id
-            except Exception:
-                # Diagnose what went wrong
-                if not isinstance(parsed.body[0], ast.Assign):
-                    raise RuntimeError("A new execution context must be assigned to a variable")
-                raise
-
-        if module_name == __name__:
-            raise RuntimeError("Cannot use the name '%s' as an execution context" % __name__)
-
-        sys.modules.pop(module_name, None)
-
-        return new_mod
-
-
-def in_importlib(frame):
-    """ helper for checking if a filename is in importlib guts """
-    return frame.f_code.co_filename == "<frozen importlib._bootstrap>"
-
-
-def register_importer():
-    """ registers our fancy importer that can let us import from a module name,
-    like:
-
-        import sh
-        tmp = sh()
-        from tmp import ls
-    """
-
-    def test(importer_cls):
-        try:
-            return importer_cls.__class__.__name__ == ModuleImporterFromVariables.__name__
-        except AttributeError:
-            # ran into importer which is not a class instance
-            return False
-
-    already_registered = any([True for i in sys.meta_path if test(i)])
-
-    if not already_registered:
-        importer = ModuleImporterFromVariables(restrict_to=[SelfWrapper.__name__], )
-        sys.meta_path.insert(0, importer)
-
-    return not already_registered
-
-
-def fetch_module_from_frame(name, frame):
-    mod = frame.f_locals.get(name, frame.f_globals.get(name, None))
-    return mod
-
-
-class ModuleImporterFromVariables(object):
-    """ a fancy importer that allows us to import from a variable that was
-    recently set in either the local or global scope, like this:
-
-        sh2 = sh(_timeout=3)
-        from sh2 import ls
-
-    """
-
-    def __init__(self, restrict_to=None):
-        self.restrict_to = set(restrict_to or set())
-
-    def find_module(self, mod_fullname, path=None):
-        """ mod_fullname doubles as the name of the VARIABLE holding our new sh
-        context.  for example:
-
-            derp = sh()
-            from derp import ls
-
-        here, mod_fullname will be "derp".  keep that in mind as we go through
-        the rest of this function """
-
-        parent_frame = inspect.currentframe().f_back
-
-        if parent_frame and parent_frame.f_code.co_name == "find_spec":
-            parent_frame = parent_frame.f_back
-
-        while parent_frame and in_importlib(parent_frame):
-            parent_frame = parent_frame.f_back
-
-        # Calling PyImport_ImportModule("some_module"); via the C API may not
-        # have a parent frame. Early-out to avoid in_importlib() trying to
-        # get f_code from None when looking for 'some_module'.
-        # This also happens when using gevent apparently.
-        if not parent_frame:
-            return None
-
-        # this line is saying "hey, does mod_fullname exist as a name we've
-        # defined previously?"  the purpose of this is to ensure that
-        # mod_fullname is really a thing we've defined.  if we haven't defined
-        # it before, then we "can't" import from it
-        module = fetch_module_from_frame(mod_fullname, parent_frame)
-        if not module:
-            return None
-
-        # make sure it's a class we're allowed to import from
-        if module.__class__.__name__ not in self.restrict_to:
-            return None
-
-        return self
-
-    def find_spec(self, fullname, path=None, target=None):
-        """ find_module() is deprecated since Python 3.4 in favor of find_spec() """
-
-        from importlib.machinery import ModuleSpec
-        found = self.find_module(fullname, path)
-        return ModuleSpec(fullname, found) if found is not None else None
-
-    def load_module(self, mod_fullname):
-        parent_frame = inspect.currentframe().f_back
-
-        while in_importlib(parent_frame):
-            parent_frame = parent_frame.f_back
-
-        module = fetch_module_from_frame(mod_fullname, parent_frame)
-
-        # we HAVE to include the module in sys.modules, per the import PEP.
-        # older versions of python were more lenient about this being set, but
-        # not in >= python3.3, unfortunately.  this requirement necessitates the
-        # ugly code in SelfWrapper.__call__
-        sys.modules[mod_fullname] = module
-        module.__loader__ = self
-
-        return module
-
-
-def run_tests(env, locale, a, version, force_select, **extra_env):  # pragma: no cover
-    py_version = "python"
-    py_version += str(version)
-
-    py_bin = which(py_version)
-    return_code = None
-
-    poller = "poll"
-    if force_select:
-        poller = "select"
-
-    if py_bin:
-        print("Testing %s, locale %r, poller: %s" % (py_version.capitalize(), locale, poller))
-
-        env["SH_TESTS_USE_SELECT"] = str(int(force_select))
-        env["LANG"] = locale
-
-        for k, v in extra_env.items():
-            env[k] = str(v)
-
-        cmd = [py_bin, "-W", "ignore", os.path.join(THIS_DIR, "test.py")] + a[1:]
-        print("Running %r" % cmd)
-        return_code = os.spawnve(os.P_WAIT, cmd[0], cmd, env)
-
-    return return_code
-
-
-def main():  # pragma: no cover
-    from optparse import OptionParser
-
-    parser = OptionParser()
-    parser.add_option("-e", "--envs", dest="envs", default=None, action="append")
-    parser.add_option("-l", "--locales", dest="constrain_locales", default=None, action="append")
-    options, parsed_args = parser.parse_args()
-
-    # these are essentially restrictions on what envs/constrain_locales to restrict to for
-    # the tests.  if they're empty lists, it means use all available
-    action = None
-    if parsed_args:
-        action = parsed_args[0]
-
-    if action in ("test", "travis", "tox"):
-        import test
-        coverage = None
-        if test.HAS_UNICODE_LITERAL:
-            try:
-                import coverage
-            except ImportError:
-                pass
-
-        env = os.environ.copy()
-        env["SH_TESTS_RUNNING"] = "1"
-        if coverage:
-            test.append_module_path(env, coverage)
-
-        # if we're testing locally, run all versions of python on the system
-        if action == "test":
-            all_versions = ("2.6", "2.7", "3.1", "3.2", "3.3", "3.4", "3.5", "3.6", "3.7", "3.8")
-
-        # if we're testing on travis or tox, just use the system's default python, since travis will spawn a vm per
-        # python version in our .travis.yml file, and tox will run its matrix via tox.ini
-        else:
-            v = sys.version_info
-            sys_ver = "%d.%d" % (v[0], v[1])
-            all_versions = (sys_ver,)
-
-        all_force_select = [True]
-        if HAS_POLL:
-            all_force_select.append(False)
-
-        all_locales = ("en_US.UTF-8", "C")
-        i = 0
-        ran_versions = set()
-        for locale in all_locales:
-            # make sure this locale is allowed
-            if options.constrain_locales and locale not in options.constrain_locales:
-                continue
-
-            for version in all_versions:
-                # make sure this version is allowed
-                if options.envs and version not in options.envs:
-                    continue
-
-                for force_select in all_force_select:
-                    env_copy = env.copy()
-
-                    ran_versions.add(version)
-                    exit_code = run_tests(env_copy, locale, parsed_args, version, force_select, SH_TEST_RUN_IDX=i)
-
-                    if exit_code is None:
-                        print("Couldn't find %s, skipping" % version)
-
-                    elif exit_code != 0:
-                        print("Failed for %s, %s" % (version, locale))
-                        exit(1)
-
-                    i += 1
-
-        print("Tested Python versions: %s" % ",".join(sorted(list(ran_versions))))
-
-    else:
-        env = Environment(globals())
-        run_repl(env)
+        new_sh = self.__class__(self.__self_module, baked_args)
+        return new_sh
 
 
 if __name__ == "__main__":  # pragma: no cover
     # we're being run as a stand-alone script
-    main()
+    env = Environment(globals())
+    run_repl(env)
 else:
     # we're being imported from somewhere
     sys.modules[__name__] = SelfWrapper(sys.modules[__name__])
-    register_importer()
diff --git a/tests/Dockerfile b/tests/Dockerfile
new file mode 100644
index 0000000..a429352
--- /dev/null
+++ b/tests/Dockerfile
@@ -0,0 +1,47 @@
+FROM ubuntu:bionic
+
+ARG cache_bust
+RUN apt update &&\
+    apt -y install locales
+
+RUN locale-gen en_US.UTF-8
+ENV LANG en_US.UTF-8
+ENV LANGUAGE en_US:en
+ENV LC_ALL en_US.UTF-8
+ENV TZ Etc/UTC
+ENV DEBIAN_FRONTEND noninteractive
+
+RUN apt-get -y install\
+    software-properties-common\
+    curl\
+    sudo\
+    lsof
+
+RUN add-apt-repository ppa:deadsnakes/ppa
+RUN apt-get update
+RUN apt-get -y install\
+    python3.8\
+    python3.9\
+    python3.10\
+    python3.11
+
+RUN apt-get -y install\
+    python3.8-distutils\
+    python3.9-distutils\
+    && curl https://bootstrap.pypa.io/get-pip.py | python3.9 -
+
+ARG uid=1000
+RUN groupadd -g $uid shtest\
+    && useradd -m -u $uid -g $uid shtest\
+    && gpasswd -a shtest sudo\
+    && echo "shtest:shtest" | chpasswd
+
+
+ENV TOX_PARALLEL_NO_SPINNER=1
+USER shtest
+WORKDIR /home/shtest/
+
+ENV PATH="/home/shtest/.local/bin:$PATH"
+RUN pip install tox flake8 black rstcheck mypy
+
+COPY README.rst sh.py .flake8 tox.ini tests/test.py /home/shtest/
diff --git a/tests/__init__.py b/tests/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/test.py b/tests/test.py
similarity index 72%
rename from test.py
rename to tests/test.py
index d9cc1da..d18e675 100644
--- a/test.py
+++ b/tests/test.py
@@ -1,7 +1,5 @@
 # -*- coding: utf8 -*-
-from contextlib import contextmanager
-from functools import wraps
-from os.path import exists, join, realpath, dirname, split
+import asyncio
 import errno
 import fcntl
 import inspect
@@ -10,62 +8,56 @@ import os
 import platform
 import pty
 import resource
-import sh
 import signal
 import stat
 import sys
 import tempfile
 import time
 import unittest
+import unittest.mock
 import warnings
+from asyncio.queues import Queue as AQueue
+from contextlib import contextmanager
+from functools import partial, wraps
+from hashlib import md5
+from io import BytesIO, StringIO
+from os.path import dirname, exists, join, realpath, split
+from pathlib import Path
 
-IS_PY3 = sys.version_info[0] == 3
-IS_PY2 = not IS_PY3
-MINOR_VER = sys.version_info[1]
+import sh
 
-# coverage doesn't work in python 3.1, 3.2 due to it just being a shit
-# python
-HAS_UNICODE_LITERAL = not (IS_PY3 and MINOR_VER in (1, 2))
+THIS_DIR = Path(__file__).resolve().parent
+RAND_BYTES = os.urandom(10)
 
-cov = None
-if HAS_UNICODE_LITERAL:
-    run_idx = int(os.environ.pop("SH_TEST_RUN_IDX", "0"))
-    first_run = run_idx == 0
+# we have to use the real path because on osx, /tmp is a symlink to
+# /private/tmp, and so assertions that gettempdir() == sh.pwd() will fail
+tempdir = Path(tempfile.gettempdir()).resolve()
+IS_MACOS = platform.system() in ("AIX", "Darwin")
 
-    try:
-        import coverage
-    except ImportError:
-        pass
-    else:
-        # for some reason, we can't run auto_data on the first run, or the coverage
-        # numbers get really screwed up
-        auto_data = True
-        if first_run:
-            auto_data = False
 
-        cov = coverage.Coverage(auto_data=auto_data)
+def hash(a: str):
+    h = md5(a.encode("utf8") + RAND_BYTES)
+    return h.hexdigest()
 
-        if first_run:
-            cov.erase()
 
-        cov.start()
+def randomize_order(a, b):
+    h1 = hash(a)
+    h2 = hash(b)
+    if h1 == h2:
+        return 0
+    elif h1 < h2:
+        return -1
+    else:
+        return 1
 
-try:
-    import unittest.mock
-except ImportError:
-    HAS_MOCK = False
-else:
-    HAS_MOCK = True
 
-# we have to use the real path because on osx, /tmp is a symlink to
-# /private/tmp, and so assertions that gettempdir() == sh.pwd() will fail
-tempdir = realpath(tempfile.gettempdir())
-IS_MACOS = platform.system() in ("AIX", "Darwin")
+unittest.TestLoader.sortTestMethodsUsing = staticmethod(randomize_order)
 
 
 # these 3 functions are helpers for modifying PYTHONPATH with a module's main
 # directory
 
+
 def append_pythonpath(env, path):
     key = "PYTHONPATH"
     pypath = [p for p in env.get(key, "").split(":") if p]
@@ -88,61 +80,14 @@ def append_module_path(env, m):
     append_pythonpath(env, get_module_import_dir(m))
 
 
-if IS_PY3:
-    xrange = range
-    unicode = str
-    long = int
-    from io import StringIO
-
-    ioStringIO = StringIO
-    from io import BytesIO as cStringIO
-
-    iocStringIO = cStringIO
-else:
-    from StringIO import StringIO
-    from cStringIO import StringIO as cStringIO
-    from io import StringIO as ioStringIO
-    from io import BytesIO as iocStringIO
-
-THIS_DIR = dirname(os.path.abspath(__file__))
-
 system_python = sh.Command(sys.executable)
 
 # this is to ensure that our `python` helper here is able to import our local sh
 # module, and not the system one
 baked_env = os.environ.copy()
 append_module_path(baked_env, sh)
-python = system_python.bake(_env=baked_env)
-
-if hasattr(logging, 'NullHandler'):
-    NullHandler = logging.NullHandler
-else:
-    class NullHandler(logging.Handler):
-        def handle(self, record):
-            pass
-
-        def emit(self, record):
-            pass
-
-        def createLock(self):
-            self.lock = None
-
-skipUnless = getattr(unittest, "skipUnless", None)
-if not skipUnless:
-    # our stupid skipUnless wrapper for python2.6
-    def skipUnless(condition, reason):
-        def wrapper(test):
-            if condition:
-                return test
-            else:
-                @wraps(test)
-                def skip(*args, **kwargs):
-                    return
-
-                return skip
-
-        return wrapper
-skip_unless = skipUnless
+python = system_python.bake(_env=baked_env, _return_cmd=True)
+pythons = python.bake(_return_cmd=False)
 
 
 def requires_progs(*progs):
@@ -154,21 +99,24 @@ def requires_progs(*progs):
             missing.append(prog)
 
     friendly_missing = ", ".join(missing)
-    return skipUnless(len(missing) == 0, "Missing required system programs: %s"
-                      % friendly_missing)
+    return unittest.skipUnless(
+        len(missing) == 0, "Missing required system programs: %s" % friendly_missing
+    )
 
 
-requires_posix = skipUnless(os.name == "posix", "Requires POSIX")
-requires_utf8 = skipUnless(sh.DEFAULT_ENCODING == "UTF-8", "System encoding must be UTF-8")
-not_macos = skipUnless(not IS_MACOS, "Doesn't work on MacOS")
-requires_py3 = skipUnless(IS_PY3, "Test only works on Python 3")
-requires_py35 = skipUnless(IS_PY3 and MINOR_VER >= 5, "Test only works on Python 3.5 or higher")
+requires_posix = unittest.skipUnless(os.name == "posix", "Requires POSIX")
+requires_utf8 = unittest.skipUnless(
+    sh.DEFAULT_ENCODING == "UTF-8", "System encoding must be UTF-8"
+)
+not_macos = unittest.skipUnless(not IS_MACOS, "Doesn't work on MacOS")
 
 
 def requires_poller(poller):
     use_select = bool(int(os.environ.get("SH_TESTS_USE_SELECT", "0")))
     cur_poller = "select" if use_select else "poll"
-    return skipUnless(cur_poller == poller, "Only enabled for select.%s" % cur_poller)
+    return unittest.skipUnless(
+        cur_poller == poller, "Only enabled for select.%s" % cur_poller
+    )
 
 
 @contextmanager
@@ -182,14 +130,13 @@ def ulimit(key, new_soft):
 
 
 def create_tmp_test(code, prefix="tmp", delete=True, **kwargs):
-    """ creates a temporary test file that lives on disk, on which we can run
-    python with sh """
+    """creates a temporary test file that lives on disk, on which we can run
+    python with sh"""
 
     py = tempfile.NamedTemporaryFile(prefix=prefix, delete=delete)
 
     code = code.format(**kwargs)
-    if IS_PY3:
-        code = code.encode("UTF-8")
+    code = code.encode("UTF-8")
 
     py.write(code)
     py.flush()
@@ -205,6 +152,12 @@ def create_tmp_test(code, prefix="tmp", delete=True, **kwargs):
 
 
 class BaseTests(unittest.TestCase):
+    def setUp(self):
+        warnings.simplefilter("ignore", ResourceWarning)
+
+    def tearDown(self):
+        warnings.simplefilter("default", ResourceWarning)
+
     def assert_oserror(self, num, fn, *args, **kwargs):
         try:
             fn(*args, **kwargs)
@@ -218,59 +171,35 @@ class BaseTests(unittest.TestCase):
             self.assertEqual(len(w), 1)
             self.assertTrue(issubclass(w[-1].category, DeprecationWarning))
 
-    # python2.6 lacks this
-    def assertIn(self, needle, haystack):
-        s = super(BaseTests, self)
-        if hasattr(s, "assertIn"):
-            s.assertIn(needle, haystack)
-        else:
-            self.assertTrue(needle in haystack)
-
-    # python2.6 lacks this
-    def assertNotIn(self, needle, haystack):
-        s = super(BaseTests, self)
-        if hasattr(s, "assertNotIn"):
-            s.assertNotIn(needle, haystack)
-        else:
-            self.assertTrue(needle not in haystack)
 
-    # python2.6 lacks this
-    def assertLess(self, a, b):
-        s = super(BaseTests, self)
-        if hasattr(s, "assertLess"):
-            s.assertLess(a, b)
-        else:
-            self.assertTrue(a < b)
+class ArgTests(BaseTests):
+    def test_list_args(self):
+        processed = sh._aggregate_keywords({"arg": [1, 2, 3]}, "=", "--")
+        self.assertListEqual(processed, ["--arg=1", "--arg=2", "--arg=3"])
 
-    # python2.6 lacks this
-    def assertGreater(self, a, b):
-        s = super(BaseTests, self)
-        if hasattr(s, "assertGreater"):
-            s.assertGreater(a, b)
-        else:
-            self.assertTrue(a > b)
+    def test_bool_values(self):
+        processed = sh._aggregate_keywords({"truthy": True, "falsey": False}, "=", "--")
+        self.assertListEqual(processed, ["--truthy"])
 
-    # python2.6 lacks this
-    def skipTest(self, msg):
-        s = super(BaseTests, self)
-        if hasattr(s, "skipTest"):
-            s.skipTest(msg)
-        else:
-            return
+    def test_space_sep(self):
+        processed = sh._aggregate_keywords({"arg": "123"}, " ", "--")
+        self.assertListEqual(processed, ["--arg", "123"])
 
 
 @requires_posix
 class FunctionalTests(BaseTests):
-
     def setUp(self):
         self._environ = os.environ.copy()
+        super().setUp()
 
     def tearDown(self):
         os.environ = self._environ
+        super().tearDown()
 
     def test_print_command(self):
         from sh import ls, which
-        actual_location = which("ls")
+
+        actual_location = which("ls").strip()
         out = str(ls)
         self.assertEqual(out, actual_location)
 
@@ -278,21 +207,17 @@ class FunctionalTests(BaseTests):
         from sh import echo
 
         test = "漢字"
-        if not IS_PY3:
-            test = test.decode("utf8")
-
         p = echo(test, _encoding="utf8")
         output = p.strip()
         self.assertEqual(test, output)
 
     def test_unicode_exception(self):
         from sh import ErrorReturnCode
+
         py = create_tmp_test("exit(1)")
 
         arg = "漢字"
         native_arg = arg
-        if not IS_PY3:
-            arg = arg.decode("utf8")
 
         try:
             python(py.name, arg, _encoding="utf8")
@@ -309,77 +234,94 @@ class FunctionalTests(BaseTests):
         self.assertEqual(out, b"hi world\n")
 
     def test_trunc_exc(self):
-        py = create_tmp_test("""
+        py = create_tmp_test(
+            """
 import sys
 sys.stdout.write("a" * 1000)
 sys.stderr.write("b" * 1000)
 exit(1)
-""")
+"""
+        )
         self.assertRaises(sh.ErrorReturnCode_1, python, py.name)
 
     def test_number_arg(self):
-        py = create_tmp_test("""
+        py = create_tmp_test(
+            """
 from optparse import OptionParser
 parser = OptionParser()
 options, args = parser.parse_args()
 print(args[0])
-""")
+"""
+        )
 
         out = python(py.name, 3).strip()
         self.assertEqual(out, "3")
 
+    def test_arg_string_coercion(self):
+        py = create_tmp_test(
+            """
+from argparse import ArgumentParser
+parser = ArgumentParser()
+parser.add_argument("-n", type=int)
+parser.add_argument("--number", type=int)
+ns = parser.parse_args()
+print(ns.n + ns.number)
+"""
+        )
+
+        out = python(py.name, n=3, number=4, _long_sep=None).strip()
+        self.assertEqual(out, "7")
+
     def test_empty_stdin_no_hang(self):
-        py = create_tmp_test("""
+        py = create_tmp_test(
+            """
 import sys
 data = sys.stdin.read()
 sys.stdout.write("no hang")
-""")
-        out = python(py.name, _in="", _timeout=2)
+"""
+        )
+        out = pythons(py.name, _in="", _timeout=2)
         self.assertEqual(out, "no hang")
 
-        out = python(py.name, _in=None, _timeout=2)
+        out = pythons(py.name, _in=None, _timeout=2)
         self.assertEqual(out, "no hang")
 
     def test_exit_code(self):
         from sh import ErrorReturnCode_3
-        py = create_tmp_test("""
+
+        py = create_tmp_test(
+            """
 exit(3)
-""")
+"""
+        )
         self.assertRaises(ErrorReturnCode_3, python, py.name)
 
     def test_patched_glob(self):
         from glob import glob
 
-        py = create_tmp_test("""
+        py = create_tmp_test(
+            """
 import sys
 print(sys.argv[1:])
-""")
+"""
+        )
         files = glob("*.faowjefoajweofj")
         out = python(py.name, files).strip()
         self.assertEqual(out, "['*.faowjefoajweofj']")
 
-    @requires_py35
-    def test_patched_glob_with_recursive_argument(self):
-        from glob import glob
-
-        py = create_tmp_test("""
-import sys
-print(sys.argv[1:])
-""")
-        files = glob("*.faowjefoajweofj", recursive=True)
-        out = python(py.name, files).strip()
-        self.assertEqual(out, "['*.faowjefoajweofj']")
-
     def test_exit_code_with_hasattr(self):
         from sh import ErrorReturnCode_3
-        py = create_tmp_test("""
+
+        py = create_tmp_test(
+            """
 exit(3)
-""")
+"""
+        )
 
         try:
             out = python(py.name, _iter=True)
             # hasattr can swallow exceptions
-            hasattr(out, 'something_not_there')
+            hasattr(out, "something_not_there")
             list(out)
             self.assertEqual(out.exit_code, 3)
             self.fail("Command exited with error, but no exception thrown")
@@ -388,9 +330,12 @@ exit(3)
 
     def test_exit_code_from_exception(self):
         from sh import ErrorReturnCode_3
-        py = create_tmp_test("""
+
+        py = create_tmp_test(
+            """
 exit(3)
-""")
+"""
+        )
 
         self.assertRaises(ErrorReturnCode_3, python, py.name)
 
@@ -401,11 +346,13 @@ exit(3)
 
     def test_stdin_from_string(self):
         from sh import sed
-        self.assertEqual(sed(_in="one test three", e="s/test/two/").strip(),
-                         "one two three")
+
+        self.assertEqual(
+            sed(_in="one test three", e="s/test/two/").strip(), "one two three"
+        )
 
     def test_ok_code(self):
-        from sh import ls, ErrorReturnCode_1, ErrorReturnCode_2
+        from sh import ErrorReturnCode_1, ErrorReturnCode_2, ls
 
         exc_to_test = ErrorReturnCode_2
         code_to_pass = 2
@@ -424,14 +371,17 @@ exit(3)
 
     def test_ok_code_exception(self):
         from sh import ErrorReturnCode_0
+
         py = create_tmp_test("exit(0)")
         self.assertRaises(ErrorReturnCode_0, python, py.name, _ok_code=2)
 
     def test_none_arg(self):
-        py = create_tmp_test("""
+        py = create_tmp_test(
+            """
 import sys
 print(sys.argv[1:])
-""")
+"""
+        )
         maybe_arg = "some"
         out = python(py.name, maybe_arg).strip()
         self.assertEqual(out, "['some']")
@@ -441,34 +391,37 @@ print(sys.argv[1:])
         self.assertEqual(out, "[]")
 
     def test_quote_escaping(self):
-        py = create_tmp_test("""
+        py = create_tmp_test(
+            """
 from optparse import OptionParser
 parser = OptionParser()
 options, args = parser.parse_args()
 print(args)
-""")
+"""
+        )
         out = python(py.name, "one two three").strip()
         self.assertEqual(out, "['one two three']")
 
-        out = python(py.name, "one \"two three").strip()
+        out = python(py.name, 'one "two three').strip()
         self.assertEqual(out, "['one \"two three']")
 
         out = python(py.name, "one", "two three").strip()
         self.assertEqual(out, "['one', 'two three']")
 
-        out = python(py.name, "one", "two \"haha\" three").strip()
+        out = python(py.name, "one", 'two "haha" three').strip()
         self.assertEqual(out, "['one', 'two \"haha\" three']")
 
         out = python(py.name, "one two's three").strip()
-        self.assertEqual(out, "[\"one two's three\"]")
+        self.assertEqual(out, '["one two\'s three"]')
 
-        out = python(py.name, 'one two\'s three').strip()
-        self.assertEqual(out, "[\"one two's three\"]")
+        out = python(py.name, "one two's three").strip()
+        self.assertEqual(out, '["one two\'s three"]')
 
     def test_multiple_pipes(self):
         import time
 
-        py = create_tmp_test("""
+        py = create_tmp_test(
+            """
 import sys
 import os
 import time
@@ -476,19 +429,22 @@ import time
 for l in "andrew":
     sys.stdout.write(l)
     time.sleep(.2)
-""")
+"""
+        )
 
-        inc_py = create_tmp_test("""
+        inc_py = create_tmp_test(
+            """
 import sys
 while True:
     letter = sys.stdin.read(1)
     if not letter:
         break
     sys.stdout.write(chr(ord(letter)+1))
-""")
+"""
+        )
 
-        def inc(proc, *args, **kwargs):
-            return python(proc, "-u", inc_py.name, *args, **kwargs)
+        def inc(*args, **kwargs):
+            return python("-u", inc_py.name, *args, **kwargs)
 
         class Derp(object):
             def __init__(self):
@@ -506,16 +462,16 @@ while True:
         derp = Derp()
 
         p = inc(
-            inc(
-                inc(
-                    python("-u", py.name, _piped=True),
-                    _piped=True),
-                _piped=True),
-            _out=derp.agg)
+            _in=inc(
+                _in=inc(_in=python("-u", py.name, _piped=True), _piped=True),
+                _piped=True,
+            ),
+            _out=derp.agg,
+        )
 
         p.wait()
         self.assertEqual("".join(derp.stdout), "dqguhz")
-        self.assertTrue(all([t > .15 for t in derp.times]))
+        self.assertTrue(all([t > 0.15 for t in derp.times]))
 
     def test_manual_stdin_string(self):
         from sh import tr
@@ -533,9 +489,10 @@ while True:
         self.assertEqual(out, match)
 
     def test_manual_stdin_file(self):
-        from sh import tr
         import tempfile
 
+        from sh import tr
+
         test_string = "testing\nherp\nderp\n"
 
         stdin = tempfile.NamedTemporaryFile()
@@ -549,6 +506,7 @@ while True:
 
     def test_manual_stdin_queue(self):
         from sh import tr
+
         try:
             from Queue import Queue
         except ImportError:
@@ -567,8 +525,8 @@ while True:
         self.assertEqual(out, match)
 
     def test_environment(self):
-        """ tests that environments variables that we pass into sh commands
-        exist in the environment, and on the sh module """
+        """tests that environments variables that we pass into sh commands
+        exist in the environment, and on the sh module"""
         import os
 
         # this is the environment we'll pass into our commands
@@ -576,18 +534,21 @@ while True:
 
         # first we test that the environment exists in our child process as
         # we've set it
-        py = create_tmp_test("""
+        py = create_tmp_test(
+            """
 import os
 
 for key in list(os.environ.keys()):
     if key != "HERP":
         del os.environ[key]
 print(dict(os.environ))
-""")
+"""
+        )
         out = python(py.name, _env=env).strip()
         self.assertEqual(out, "{'HERP': 'DERP'}")
 
-        py = create_tmp_test("""
+        py = create_tmp_test(
+            """
 import os, sys
 sys.path.insert(0, os.getcwd())
 import sh
@@ -595,7 +556,8 @@ for key in list(os.environ.keys()):
     if key != "HERP":
         del os.environ[key]
 print(dict(HERP=sh.HERP))
-""")
+"""
+        )
         out = python(py.name, _env=env, _cwd=THIS_DIR).strip()
         self.assertEqual(out, "{'HERP': 'DERP'}")
 
@@ -605,15 +567,21 @@ print(dict(HERP=sh.HERP))
         self.assertEqual(out, "{'HERP': 'DERP'}")
 
     def test_which(self):
-        from sh import which, ls
+        # Test 'which' as built-in function
+        from sh import ls
+
+        which = sh._SelfWrapper__env.b_which
         self.assertEqual(which("fjoawjefojawe"), None)
         self.assertEqual(which("ls"), str(ls))
 
     def test_which_paths(self):
-        from sh import which
-        py = create_tmp_test("""
+        # Test 'which' as built-in function
+        which = sh._SelfWrapper__env.b_which
+        py = create_tmp_test(
+            """
 print("hi")
-""")
+"""
+        )
         test_path = dirname(py.name)
         _, test_name = os.path.split(py.name)
 
@@ -624,9 +592,11 @@ print("hi")
         self.assertEqual(found_path, py.name)
 
     def test_no_close_fds(self):
-        # guarantee some extra fds in our parent process that don't close on exec.  we have to explicitly do this
-        # because at some point (I believe python 3.4), python started being more stringent with closing fds to prevent
-        # security vulnerabilities.  python 2.7, for example, doesn't set CLOEXEC on tempfile.TemporaryFile()s
+        # guarantee some extra fds in our parent process that don't close on exec. we
+        # have to explicitly do this because at some point (I believe python 3.4),
+        # python started being more stringent with closing fds to prevent security
+        # vulnerabilities.  python 2.7, for example, doesn't set CLOEXEC on
+        # tempfile.TemporaryFile()s
         #
         # https://www.python.org/dev/peps/pep-0446/
         tmp = [tempfile.TemporaryFile() for i in range(10)]
@@ -635,22 +605,26 @@ print("hi")
             flags &= ~fcntl.FD_CLOEXEC
             fcntl.fcntl(t.fileno(), fcntl.F_SETFD, flags)
 
-        py = create_tmp_test("""
+        py = create_tmp_test(
+            """
 import os
 print(len(os.listdir("/dev/fd")))
-""")
+"""
+        )
         out = python(py.name, _close_fds=False).strip()
-        # pick some number greater than 4, since it's hard to know exactly how many fds will be open/inherted in the
-        # child
+        # pick some number greater than 4, since it's hard to know exactly how many fds
+        # will be open/inherted in the child
         self.assertGreater(int(out), 7)
 
         for t in tmp:
             t.close()
 
     def test_close_fds(self):
-        # guarantee some extra fds in our parent process that don't close on exec.  we have to explicitly do this
-        # because at some point (I believe python 3.4), python started being more stringent with closing fds to prevent
-        # security vulnerabilities.  python 2.7, for example, doesn't set CLOEXEC on tempfile.TemporaryFile()s
+        # guarantee some extra fds in our parent process that don't close on exec.
+        # we have to explicitly do this because at some point (I believe python 3.4),
+        # python started being more stringent with closing fds to prevent security
+        # vulnerabilities.  python 2.7, for example, doesn't set CLOEXEC on
+        # tempfile.TemporaryFile()s
         #
         # https://www.python.org/dev/peps/pep-0446/
         tmp = [tempfile.TemporaryFile() for i in range(10)]
@@ -659,10 +633,12 @@ print(len(os.listdir("/dev/fd")))
             flags &= ~fcntl.FD_CLOEXEC
             fcntl.fcntl(t.fileno(), fcntl.F_SETFD, flags)
 
-        py = create_tmp_test("""
+        py = create_tmp_test(
+            """
 import os
 print(os.listdir("/dev/fd"))
-""")
+"""
+        )
         out = python(py.name).strip()
         self.assertEqual(out, "['0', '1', '2', '3']")
 
@@ -670,9 +646,11 @@ print(os.listdir("/dev/fd"))
             t.close()
 
     def test_pass_fds(self):
-        # guarantee some extra fds in our parent process that don't close on exec.  we have to explicitly do this
-        # because at some point (I believe python 3.4), python started being more stringent with closing fds to prevent
-        # security vulnerabilities.  python 2.7, for example, doesn't set CLOEXEC on tempfile.TemporaryFile()s
+        # guarantee some extra fds in our parent process that don't close on exec.
+        # we have to explicitly do this because at some point (I believe python 3.4),
+        # python started being more stringent with closing fds to prevent security
+        # vulnerabilities.  python 2.7, for example, doesn't set CLOEXEC on
+        # tempfile.TemporaryFile()s
         #
         # https://www.python.org/dev/peps/pep-0446/
         tmp = [tempfile.TemporaryFile() for i in range(10)]
@@ -682,10 +660,12 @@ print(os.listdir("/dev/fd"))
             fcntl.fcntl(t.fileno(), fcntl.F_SETFD, flags)
         last_fd = tmp[-1].fileno()
 
-        py = create_tmp_test("""
+        py = create_tmp_test(
+            """
 import os
 print(os.listdir("/dev/fd"))
-""")
+"""
+        )
         out = python(py.name, _pass_fds=[last_fd]).strip()
         inherited = [0, 1, 2, 3, last_fd]
         inherited_str = [str(i) for i in inherited]
@@ -696,22 +676,22 @@ print(os.listdir("/dev/fd"))
 
     def test_no_arg(self):
         import pwd
+
         from sh import whoami
+
         u1 = whoami().strip()
         u2 = pwd.getpwuid(os.geteuid())[0]
         self.assertEqual(u1, u2)
 
     def test_incompatible_special_args(self):
         from sh import ls
+
         self.assertRaises(TypeError, ls, _iter=True, _piped=True)
 
     def test_invalid_env(self):
         from sh import ls
 
         exc = TypeError
-        if IS_PY2 and MINOR_VER == 6:
-            exc = ValueError
-
         self.assertRaises(exc, ls, _env="XXX")
         self.assertRaises(exc, ls, _env={"foo": 123})
         self.assertRaises(exc, ls, _env={123: "bar"})
@@ -719,20 +699,25 @@ print(os.listdir("/dev/fd"))
     def test_exception(self):
         from sh import ErrorReturnCode_2
 
-        py = create_tmp_test("""
+        py = create_tmp_test(
+            """
 exit(2)
-""")
+"""
+        )
         self.assertRaises(ErrorReturnCode_2, python, py.name)
 
     def test_piped_exception1(self):
         from sh import ErrorReturnCode_2
 
-        py = create_tmp_test("""
+        py = create_tmp_test(
+            """
 import sys
 sys.stdout.write("line1\\n")
 sys.stdout.write("line2\\n")
+sys.stdout.flush()
 exit(2)
-""")
+"""
+        )
 
         py2 = create_tmp_test("")
 
@@ -744,12 +729,15 @@ exit(2)
     def test_piped_exception2(self):
         from sh import ErrorReturnCode_2
 
-        py = create_tmp_test("""
+        py = create_tmp_test(
+            """
 import sys
 sys.stdout.write("line1\\n")
 sys.stdout.write("line2\\n")
+sys.stdout.flush()
 exit(2)
-""")
+"""
+        )
 
         py2 = create_tmp_test("")
 
@@ -768,12 +756,14 @@ exit(2)
 
         def do_import():
             import sh
+
             sh.awoefaowejfw
 
         self.assertRaises(CommandNotFound, do_import)
 
         def do_import():
             import sh
+
             sh.Command("ofajweofjawoe")
 
         self.assertRaises(CommandNotFound, do_import)
@@ -781,37 +771,35 @@ exit(2)
     def test_command_wrapper_equivalence(self):
         from sh import Command, ls, which
 
-        self.assertEqual(Command(which("ls")), ls)
+        self.assertEqual(Command(str(which("ls")).strip()), ls)
 
     def test_doesnt_execute_directories(self):
-        save_path = os.environ['PATH']
+        save_path = os.environ["PATH"]
         bin_dir1 = tempfile.mkdtemp()
         bin_dir2 = tempfile.mkdtemp()
-        gcc_dir1 = os.path.join(bin_dir1, 'gcc')
-        gcc_file2 = os.path.join(bin_dir2, 'gcc')
+        gcc_dir1 = os.path.join(bin_dir1, "gcc")
+        gcc_file2 = os.path.join(bin_dir2, "gcc")
         try:
-            os.environ['PATH'] = os.pathsep.join((bin_dir1, bin_dir2))
+            os.environ["PATH"] = os.pathsep.join((bin_dir1, bin_dir2))
             # a folder named 'gcc', its executable, but should not be
             # discovered by internal which(1)-clone
             os.makedirs(gcc_dir1)
             # an executable named gcc -- only this should be executed
-            bunk_header = '#!/bin/sh\necho $*'
+            bunk_header = "#!/bin/sh\necho $*"
             with open(gcc_file2, "w") as h:
                 h.write(bunk_header)
             os.chmod(gcc_file2, int(0o755))
 
-            import sh
             from sh import gcc
-            if IS_PY3:
-                self.assertEqual(gcc._path,
-                                 gcc_file2.encode(sh.DEFAULT_ENCODING))
-            else:
-                self.assertEqual(gcc._path, gcc_file2)
-            self.assertEqual(gcc('no-error').stdout.strip(),
-                             'no-error'.encode("ascii"))
+
+            self.assertEqual(gcc._path, gcc_file2)
+            self.assertEqual(
+                gcc("no-error", _return_cmd=True).stdout.strip(),
+                "no-error".encode("ascii"),
+            )
 
         finally:
-            os.environ['PATH'] = save_path
+            os.environ["PATH"] = save_path
             if exists(gcc_file2):
                 os.unlink(gcc_file2)
             if exists(gcc_dir1):
@@ -822,13 +810,15 @@ exit(2)
                 os.rmdir(bin_dir2)
 
     def test_multiple_args_short_option(self):
-        py = create_tmp_test("""
+        py = create_tmp_test(
+            """
 from optparse import OptionParser
 parser = OptionParser()
 parser.add_option("-l", dest="long_option")
 options, args = parser.parse_args()
 print(len(options.long_option.split()))
-""")
+"""
+        )
         num_args = int(python(py.name, l="one two three"))  # noqa: E741
         self.assertEqual(num_args, 3)
 
@@ -836,84 +826,120 @@ print(len(options.long_option.split()))
         self.assertEqual(num_args, 3)
 
     def test_multiple_args_long_option(self):
-        py = create_tmp_test("""
+        py = create_tmp_test(
+            """
 from optparse import OptionParser
 parser = OptionParser()
 parser.add_option("-l", "--long-option", dest="long_option")
 options, args = parser.parse_args()
 print(len(options.long_option.split()))
-""")
-        num_args = int(python(py.name, long_option="one two three",
-                              nothing=False))
+"""
+        )
+        num_args = int(python(py.name, long_option="one two three", nothing=False))
         self.assertEqual(num_args, 3)
 
         num_args = int(python(py.name, "--long-option", "one's two's three's"))
         self.assertEqual(num_args, 3)
 
     def test_short_bool_option(self):
-        py = create_tmp_test("""
+        py = create_tmp_test(
+            """
 from optparse import OptionParser
 parser = OptionParser()
 parser.add_option("-s", action="store_true", default=False, dest="short_option")
 options, args = parser.parse_args()
 print(options.short_option)
-""")
+"""
+        )
         self.assertTrue(python(py.name, s=True).strip() == "True")
         self.assertTrue(python(py.name, s=False).strip() == "False")
         self.assertTrue(python(py.name).strip() == "False")
 
     def test_long_bool_option(self):
-        py = create_tmp_test("""
+        py = create_tmp_test(
+            """
 from optparse import OptionParser
 parser = OptionParser()
-parser.add_option("-l", "--long-option", action="store_true", default=False, dest="long_option")
+parser.add_option("-l", "--long-option", action="store_true", default=False, \
+    dest="long_option")
 options, args = parser.parse_args()
 print(options.long_option)
-""")
+"""
+        )
         self.assertTrue(python(py.name, long_option=True).strip() == "True")
         self.assertTrue(python(py.name).strip() == "False")
 
     def test_false_bool_ignore(self):
-        py = create_tmp_test("""
+        py = create_tmp_test(
+            """
 import sys
 print(sys.argv[1:])
-""")
+"""
+        )
         test = True
         self.assertEqual(python(py.name, test and "-n").strip(), "['-n']")
         test = False
         self.assertEqual(python(py.name, test and "-n").strip(), "[]")
 
     def test_composition(self):
-        from sh import ls, wc
-        c1 = int(wc(ls("-A1"), l=True))  # noqa: E741
-        c2 = len(os.listdir("."))
-        self.assertEqual(c1, c2)
+        py1 = create_tmp_test(
+            """
+import sys
+print(int(sys.argv[1]) * 2)
+        """
+        )
+
+        py2 = create_tmp_test(
+            """
+import sys
+print(int(sys.argv[1]) + 1)
+        """
+        )
+
+        res = python(py2.name, python(py1.name, 8)).strip()
+        self.assertEqual("17", res)
 
     def test_incremental_composition(self):
-        from sh import ls, wc
-        c1 = int(wc(ls("-A1", _piped=True), l=True).strip())  # noqa: E741
-        c2 = len(os.listdir("."))
-        self.assertEqual(c1, c2)
+        py1 = create_tmp_test(
+            """
+import sys
+print(int(sys.argv[1]) * 2)
+        """
+        )
+
+        py2 = create_tmp_test(
+            """
+import sys
+print(int(sys.stdin.read()) + 1)
+        """
+        )
+
+        res = python(py2.name, _in=python(py1.name, 8, _piped=True)).strip()
+        self.assertEqual("17", res)
 
     def test_short_option(self):
         from sh import sh
+
         s1 = sh(c="echo test").strip()
         s2 = "test"
         self.assertEqual(s1, s2)
 
     def test_long_option(self):
-        py = create_tmp_test("""
+        py = create_tmp_test(
+            """
 from optparse import OptionParser
 parser = OptionParser()
 parser.add_option("-l", "--long-option", action="store", default="", dest="long_option")
 options, args = parser.parse_args()
 print(options.long_option.upper())
-""")
+"""
+        )
         self.assertTrue(python(py.name, long_option="testing").strip() == "TESTING")
         self.assertTrue(python(py.name).strip() == "")
 
     def test_raw_args(self):
-        py = create_tmp_test("""
+        py = create_tmp_test(
+            """
 from optparse import OptionParser
 parser = OptionParser()
 parser.add_option("--long_option", action="store", default=None,
@@ -926,17 +952,21 @@ if options.long_option1:
     print(options.long_option1.upper())
 else:
     print(options.long_option2.upper())
-""")
-        self.assertEqual(python(py.name,
-                                {"long_option": "underscore"}).strip(), "UNDERSCORE")
+"""
+        )
+        self.assertEqual(
+            python(py.name, {"long_option": "underscore"}).strip(), "UNDERSCORE"
+        )
 
         self.assertEqual(python(py.name, long_option="hyphen").strip(), "HYPHEN")
 
     def test_custom_separator(self):
-        py = create_tmp_test("""
+        py = create_tmp_test(
+            """
 import sys
 print(sys.argv[1])
-""")
+"""
+        )
 
         opt = {"long-option": "underscore"}
         correct = "--long-option=custom=underscore"
@@ -951,55 +981,62 @@ print(sys.argv[1])
         self.assertEqual(out, correct)
 
     def test_custom_separator_space(self):
-        py = create_tmp_test("""
+        py = create_tmp_test(
+            """
 import sys
 print(str(sys.argv[1:]))
-""")
+"""
+        )
         opt = {"long-option": "space"}
         correct = ["--long-option", "space"]
         out = python(py.name, opt, _long_sep=" ").strip()
         self.assertEqual(out, str(correct))
 
     def test_custom_long_prefix(self):
-        py = create_tmp_test("""
+        py = create_tmp_test(
+            """
 import sys
 print(sys.argv[1])
-""")
+"""
+        )
 
-        out = python(py.name, {"long-option": "underscore"},
-                     _long_prefix="-custom-").strip()
+        out = python(
+            py.name, {"long-option": "underscore"}, _long_prefix="-custom-"
+        ).strip()
         self.assertEqual(out, "-custom-long-option=underscore")
 
-        out = python(py.name, {"long-option": True},
-                     _long_prefix="-custom-").strip()
+        out = python(py.name, {"long-option": True}, _long_prefix="-custom-").strip()
         self.assertEqual(out, "-custom-long-option")
 
         # test baking too
-        out = python.bake(py.name, {"long-option": "underscore"},
-                          _long_prefix="-baked-")().strip()
+        out = python.bake(
+            py.name, {"long-option": "underscore"}, _long_prefix="-baked-"
+        )().strip()
         self.assertEqual(out, "-baked-long-option=underscore")
 
-        out = python.bake(py.name, {"long-option": True},
-                          _long_prefix="-baked-")().strip()
+        out = python.bake(
+            py.name, {"long-option": True}, _long_prefix="-baked-"
+        )().strip()
         self.assertEqual(out, "-baked-long-option")
 
     def test_command_wrapper(self):
         from sh import Command, which
 
-        ls = Command(which("ls"))
-        wc = Command(which("wc"))
+        ls = Command(str(which("ls")).strip())
+        wc = Command(str(which("wc")).strip())
 
-        c1 = int(wc(ls("-A1"), l=True))  # noqa: E741
-        c2 = len(os.listdir("."))
+        c1 = int(wc(l=True, _in=ls("-A1", THIS_DIR, _return_cmd=True)))  # noqa: E741
+        c2 = len(os.listdir(THIS_DIR))
 
         self.assertEqual(c1, c2)
 
     def test_background(self):
-        from sh import sleep
         import time
 
+        from sh import sleep
+
         start = time.time()
-        sleep_time = .5
+        sleep_time = 0.5
         p = sleep(sleep_time, _bg=True)
 
         now = time.time()
@@ -1010,26 +1047,25 @@ print(sys.argv[1])
         self.assertGreater(now - start, sleep_time)
 
     def test_background_exception(self):
-        from sh import ls, ErrorReturnCode_1, ErrorReturnCode_2
-        p = ls("/ofawjeofj", _bg=True, _bg_exc=False)  # should not raise
-
-        exc_to_test = ErrorReturnCode_2
-        if IS_MACOS:
-            exc_to_test = ErrorReturnCode_1
-        self.assertRaises(exc_to_test, p.wait)  # should raise
+        py = create_tmp_test("exit(1)")
+        p = python(py.name, _bg=True, _bg_exc=False)  # should not raise
+        self.assertRaises(sh.ErrorReturnCode_1, p.wait)  # should raise
 
     def test_with_context(self):
-        from sh import whoami
         import getpass
 
-        py = create_tmp_test("""
+        from sh import whoami
+
+        py = create_tmp_test(
+            """
 import sys
 import os
 import subprocess
 
 print("with_context")
 subprocess.Popen(sys.argv[1:], shell=False).wait()
-""")
+"""
+        )
 
         cmd1 = python.bake(py.name, _with=True)
         with cmd1:
@@ -1038,10 +1074,12 @@ subprocess.Popen(sys.argv[1:], shell=False).wait()
         self.assertIn(getpass.getuser(), out)
 
     def test_with_context_args(self):
-        from sh import whoami
         import getpass
 
-        py = create_tmp_test("""
+        from sh import whoami
+
+        py = create_tmp_test(
+            """
 import sys
 import os
 import subprocess
@@ -1053,27 +1091,31 @@ options, args = parser.parse_args()
 
 if options.opt:
     subprocess.Popen(args[0], shell=False).wait()
-""")
+"""
+        )
         with python(py.name, opt=True, _with=True):
             out = whoami()
-        self.assertTrue(getpass.getuser() == out.strip())
+        self.assertEqual(getpass.getuser(), out.strip())
 
         with python(py.name, _with=True):
             out = whoami()
-        self.assertTrue(out == "")
+        self.assertEqual(out.strip(), "")
 
     def test_binary_input(self):
-        py = create_tmp_test("""
+        py = create_tmp_test(
+            """
 import sys
 data = sys.stdin.read()
 sys.stdout.write(data)
-""")
-        data = b'1234'
-        out = python(py.name, _in=data)
+"""
+        )
+        data = b"1234"
+        out = pythons(py.name, _in=data)
         self.assertEqual(out, "1234")
 
     def test_err_to_out(self):
-        py = create_tmp_test("""
+        py = create_tmp_test(
+            """
 import sys
 import os
 
@@ -1081,12 +1123,14 @@ sys.stdout.write("stdout")
 sys.stdout.flush()
 sys.stderr.write("stderr")
 sys.stderr.flush()
-""")
-        stdout = python(py.name, _err_to_out=True)
+"""
+        )
+        stdout = pythons(py.name, _err_to_out=True)
         self.assertEqual(stdout, "stdoutstderr")
 
     def test_err_to_out_and_sys_stdout(self):
-        py = create_tmp_test("""
+        py = create_tmp_test(
+            """
 import sys
 import os
 
@@ -1094,40 +1138,47 @@ sys.stdout.write("stdout")
 sys.stdout.flush()
 sys.stderr.write("stderr")
 sys.stderr.flush()
-""")
+"""
+        )
         master, slave = os.pipe()
-        stdout = python(py.name, _err_to_out=True, _out=slave)
+        stdout = pythons(py.name, _err_to_out=True, _out=slave)
         self.assertEqual(stdout, "")
         self.assertEqual(os.read(master, 12), b"stdoutstderr")
 
     def test_err_piped(self):
-        py = create_tmp_test("""
+        py = create_tmp_test(
+            """
 import sys
 sys.stderr.write("stderr")
-""")
+"""
+        )
 
-        py2 = create_tmp_test("""
+        py2 = create_tmp_test(
+            """
 import sys
 while True:
     line = sys.stdin.read()
     if not line:
         break
     sys.stdout.write(line)
-""")
+"""
+        )
 
-        out = python(python("-u", py.name, _piped="err"), "-u", py2.name)
+        out = pythons("-u", py2.name, _in=python("-u", py.name, _piped="err"))
         self.assertEqual(out, "stderr")
 
     def test_out_redirection(self):
         import tempfile
 
-        py = create_tmp_test("""
+        py = create_tmp_test(
+            """
 import sys
 import os
 
 sys.stdout.write("stdout")
 sys.stderr.write("stderr")
-""")
+"""
+        )
 
         file_obj = tempfile.NamedTemporaryFile()
         out = python(py.name, _out=file_obj)
@@ -1155,13 +1206,15 @@ sys.stderr.write("stderr")
     def test_err_redirection(self):
         import tempfile
 
-        py = create_tmp_test("""
+        py = create_tmp_test(
+            """
 import sys
 import os
 
 sys.stdout.write("stdout")
 sys.stderr.write("stderr")
-""")
+"""
+        )
         file_obj = tempfile.NamedTemporaryFile()
         p = python("-u", py.name, _err=file_obj)
 
@@ -1185,11 +1238,42 @@ sys.stderr.write("stderr")
         self.assertEqual(stderr, "stderr")
         self.assertGreater(len(p.stderr), 0)
 
+    def test_out_and_err_redirection(self):
+        import tempfile
+
+        py = create_tmp_test(
+            """
+import sys
+import os
+
+sys.stdout.write("stdout")
+sys.stderr.write("stderr")
+"""
+        )
+        err_file_obj = tempfile.NamedTemporaryFile()
+        out_file_obj = tempfile.NamedTemporaryFile()
+        p = python(py.name, _out=out_file_obj, _err=err_file_obj, _tee=("err", "out"))
+
+        out_file_obj.seek(0)
+        stdout = out_file_obj.read().decode()
+        out_file_obj.close()
+
+        err_file_obj.seek(0)
+        stderr = err_file_obj.read().decode()
+        err_file_obj.close()
+
+        self.assertEqual(stdout, "stdout")
+        self.assertEqual(p.stdout, b"stdout")
+        self.assertEqual(stderr, "stderr")
+        self.assertEqual(p.stderr, b"stderr")
+
     def test_tty_tee(self):
-        py = create_tmp_test("""
+        py = create_tmp_test(
+            """
 import sys
 sys.stdout.write("stdout")
-""")
+"""
+        )
         read, write = pty.openpty()
         out = python("-u", py.name, _out=write).stdout
         tee = os.read(read, 6)
@@ -1210,32 +1294,37 @@ sys.stdout.write("stdout")
 
     def test_err_redirection_actual_file(self):
         import tempfile
+
         file_obj = tempfile.NamedTemporaryFile()
-        py = create_tmp_test("""
+        py = create_tmp_test(
+            """
 import sys
 import os
 
 sys.stdout.write("stdout")
 sys.stderr.write("stderr")
-""")
-        stdout = python("-u", py.name, _err=file_obj.name).wait()
+"""
+        )
+        stdout = pythons("-u", py.name, _err=file_obj.name)
         file_obj.seek(0)
         stderr = file_obj.read().decode()
         file_obj.close()
-        self.assertTrue(stdout == "stdout")
-        self.assertTrue(stderr == "stderr")
+        self.assertEqual(stdout, "stdout")
+        self.assertEqual(stderr, "stderr")
 
     def test_subcommand_and_bake(self):
         import getpass
 
-        py = create_tmp_test("""
+        py = create_tmp_test(
+            """
 import sys
 import os
 import subprocess
 
 print("subcommand")
 subprocess.Popen(sys.argv[1:], shell=False).wait()
-""")
+"""
+        )
 
         cmd1 = python.bake(py.name)
         out = cmd1.whoami()
@@ -1243,34 +1332,39 @@ subprocess.Popen(sys.argv[1:], shell=False).wait()
         self.assertIn(getpass.getuser(), out)
 
     def test_multiple_bakes(self):
-        py = create_tmp_test("""
+        py = create_tmp_test(
+            """
 import sys
 sys.stdout.write(str(sys.argv[1:]))
-""")
+"""
+        )
 
         out = python.bake(py.name).bake("bake1").bake("bake2")()
-        self.assertEqual("['bake1', 'bake2']", out)
+        self.assertEqual("['bake1', 'bake2']", str(out))
 
     def test_arg_preprocessor(self):
-        py = create_tmp_test("""
+        py = create_tmp_test(
+            """
 import sys
 sys.stdout.write(str(sys.argv[1:]))
-""")
+"""
+        )
 
         def arg_preprocess(args, kwargs):
             args.insert(0, "preprocessed")
             kwargs["a-kwarg"] = 123
             return args, kwargs
 
-        cmd = python.bake(py.name, _arg_preprocess=arg_preprocess)
+        cmd = pythons.bake(py.name, _arg_preprocess=arg_preprocess)
         out = cmd("arg")
         self.assertEqual("['preprocessed', 'arg', '--a-kwarg=123']", out)
 
     def test_bake_args_come_first(self):
         from sh import ls
+
         ls = ls.bake(h=True)
 
-        ran = ls("-la").ran
+        ran = ls("-la", _return_cmd=True).ran
         ft = ran.index("-h")
         self.assertIn("-la", ran[ft:])
 
@@ -1284,11 +1378,13 @@ sys.stdout.write(str(sys.argv[1:]))
 
     # https://github.com/amoffat/sh/pull/252
     def test_stdout_pipe(self):
-        py = create_tmp_test(r"""
+        py = create_tmp_test(
+            r"""
 import sys
 
 sys.stdout.write("foobar\n")
-""")
+"""
+        )
 
         read_fd, write_fd = os.pipe()
         python(py.name, _out=write_fd, u=True)
@@ -1297,6 +1393,7 @@ sys.stdout.write("foobar\n")
             self.fail("Timeout while reading from pipe")
 
         import signal
+
         signal.signal(signal.SIGALRM, alarm)
         signal.alarm(3)
 
@@ -1306,12 +1403,14 @@ sys.stdout.write("foobar\n")
         signal.signal(signal.SIGALRM, signal.SIG_DFL)
 
     def test_stdout_callback(self):
-        py = create_tmp_test("""
+        py = create_tmp_test(
+            """
 import sys
 import os
 
 for i in range(5): print(i)
-""")
+"""
+        )
         stdout = []
 
         def agg(line):
@@ -1325,7 +1424,8 @@ for i in range(5): print(i)
     def test_stdout_callback_no_wait(self):
         import time
 
-        py = create_tmp_test("""
+        py = create_tmp_test(
+            """
 import sys
 import os
 import time
@@ -1333,31 +1433,36 @@ import time
 for i in range(5):
     print(i)
     time.sleep(.5)
-""")
+"""
+        )
 
         stdout = []
 
-        def agg(line): stdout.append(line)
+        def agg(line):
+            stdout.append(line)
 
         python("-u", py.name, _out=agg, _bg=True)
 
         # we give a little pause to make sure that the NamedTemporaryFile
         # exists when the python process actually starts
-        time.sleep(.5)
+        time.sleep(0.5)
 
         self.assertNotEqual(len(stdout), 5)
 
     def test_stdout_callback_line_buffered(self):
-        py = create_tmp_test("""
+        py = create_tmp_test(
+            """
 import sys
 import os
 
 for i in range(5): print("herpderp")
-""")
+"""
+        )
 
         stdout = []
 
-        def agg(line): stdout.append(line)
+        def agg(line):
+            stdout.append(line)
 
         p = python("-u", py.name, _out=agg, _out_bufsize=1)
         p.wait()
@@ -1365,16 +1470,19 @@ for i in range(5): print("herpderp")
         self.assertEqual(len(stdout), 5)
 
     def test_stdout_callback_line_unbuffered(self):
-        py = create_tmp_test("""
+        py = create_tmp_test(
+            """
 import sys
 import os
 
 for i in range(5): print("herpderp")
-""")
+"""
+        )
 
         stdout = []
 
-        def agg(char): stdout.append(char)
+        def agg(char):
+            stdout.append(char)
 
         p = python("-u", py.name, _out=agg, _out_bufsize=0)
         p.wait()
@@ -1383,16 +1491,19 @@ for i in range(5): print("herpderp")
         self.assertEqual(len(stdout), len("herpderp") * 5 + 5)
 
     def test_stdout_callback_buffered(self):
-        py = create_tmp_test("""
+        py = create_tmp_test(
+            """
 import sys
 import os
 
 for i in range(5): sys.stdout.write("herpderp")
-""")
+"""
+        )
 
         stdout = []
 
-        def agg(chunk): stdout.append(chunk)
+        def agg(chunk):
+            stdout.append(chunk)
 
         p = python("-u", py.name, _out=agg, _out_bufsize=4)
         p.wait()
@@ -1400,16 +1511,16 @@ for i in range(5): sys.stdout.write("herpderp")
         self.assertEqual(len(stdout), len("herp") / 2 * 5)
 
     def test_stdout_callback_with_input(self):
-        py = create_tmp_test("""
+        py = create_tmp_test(
+            """
 import sys
 import os
-IS_PY3 = sys.version_info[0] == 3
-if IS_PY3: raw_input = input
 
 for i in range(5): print(str(i))
-derp = raw_input("herp? ")
+derp = input("herp? ")
 print(derp)
-""")
+"""
+        )
 
         def agg(line, stdin):
             if line.strip() == "4":
@@ -1421,12 +1532,14 @@ print(derp)
         self.assertIn("derp", p)
 
     def test_stdout_callback_exit(self):
-        py = create_tmp_test("""
+        py = create_tmp_test(
+            """
 import sys
 import os
 
 for i in range(5): print(i)
-""")
+"""
+        )
 
         stdout = []
 
@@ -1444,7 +1557,9 @@ for i in range(5): print(i)
 
     def test_stdout_callback_terminate(self):
         import signal
-        py = create_tmp_test("""
+
+        py = create_tmp_test(
+            """
 import sys
 import os
 import time
@@ -1452,7 +1567,8 @@ import time
 for i in range(5):
     print(i)
     time.sleep(.5)
-""")
+"""
+        )
 
         stdout = []
 
@@ -1464,6 +1580,7 @@ for i in range(5):
                 return True
 
         import sh
+
         caught_signal = False
         try:
             p = python("-u", py.name, _out=agg, _bg=True)
@@ -1479,7 +1596,8 @@ for i in range(5):
     def test_stdout_callback_kill(self):
         import signal
 
-        py = create_tmp_test("""
+        py = create_tmp_test(
+            """
 import sys
 import os
 import time
@@ -1487,7 +1605,8 @@ import time
 for i in range(5):
     print(i)
     time.sleep(.5)
-""")
+"""
+        )
 
         stdout = []
 
@@ -1499,6 +1618,7 @@ for i in range(5):
                 return True
 
         import sh
+
         caught_signal = False
         try:
             p = python("-u", py.name, _out=agg, _bg=True)
@@ -1514,23 +1634,27 @@ for i in range(5):
     def test_general_signal(self):
         from signal import SIGINT
 
-        py = create_tmp_test("""
+        py = create_tmp_test(
+            """
 import sys
 import os
 import time
 import signal
 
+i = 0
 def sig_handler(sig, frame):
-    print(10)
-    exit(0)
+    global i
+    i = 42
 
 signal.signal(signal.SIGINT, sig_handler)
 
-for i in range(5):
+for _ in range(6):
     print(i)
+    i += 1
     sys.stdout.flush()
-    time.sleep(0.5)
-""")
+    time.sleep(1)
+"""
+        )
 
         stdout = []
 
@@ -1545,10 +1669,11 @@ for i in range(5):
         p.wait()
 
         self.assertEqual(p.process.exit_code, 0)
-        self.assertEqual(p, "0\n1\n2\n3\n10\n")
+        self.assertEqual(str(p), "0\n1\n2\n3\n42\n43\n")
 
     def test_iter_generator(self):
-        py = create_tmp_test("""
+        py = create_tmp_test(
+            """
 import sys
 import os
 import time
@@ -1556,7 +1681,8 @@ import time
 for i in range(42):
     print(i)
     sys.stdout.flush()
-""")
+"""
+        )
 
         out = []
         for line in python(py.name, _iter=True):
@@ -1564,6 +1690,128 @@ for i in range(42):
         self.assertEqual(len(out), 42)
         self.assertEqual(sum(out), 861)
 
+    def test_async(self):
+        py = create_tmp_test(
+            """
+import os
+import time
+time.sleep(0.5)
+print("hello")
+"""
+        )
+
+        alternating = []
+        q = AQueue()
+
+        async def producer(q):
+            alternating.append(1)
+            msg = await python(py.name, _async=True)
+            alternating.append(1)
+            await q.put(msg.strip())
+
+        async def consumer(q):
+            await asyncio.sleep(0.1)
+            alternating.append(2)
+            msg = await q.get()
+            self.assertEqual(msg, "hello")
+            alternating.append(2)
+
+        loop = asyncio.get_event_loop()
+        fut = asyncio.gather(producer(q), consumer(q))
+        loop.run_until_complete(fut)
+        self.assertListEqual(alternating, [1, 2, 1, 2])
+
+    def test_async_exc(self):
+        py = create_tmp_test("""exit(34)""")
+
+        async def producer():
+            await python(py.name, _async=True)
+
+        loop = asyncio.get_event_loop()
+        self.assertRaises(sh.ErrorReturnCode_34, loop.run_until_complete, producer())
+
+    def test_async_iter(self):
+        py = create_tmp_test(
+            """
+for i in range(5):
+    print(i)
+"""
+        )
+        q = AQueue()
+
+        # this list will prove that our coroutines are yielding to eachother as each
+        # line is produced
+        alternating = []
+
+        async def producer(q):
+            async for line in python(py.name, _iter=True):
+                alternating.append(1)
+                await q.put(int(line.strip()))
+
+            await q.put(None)
+
+        async def consumer(q):
+            while True:
+                line = await q.get()
+                if line is None:
+                    return
+                alternating.append(2)
+
+        loop = asyncio.get_event_loop()
+        res = asyncio.gather(producer(q), consumer(q))
+        loop.run_until_complete(res)
+        self.assertListEqual(alternating, [1, 2, 1, 2, 1, 2, 1, 2, 1, 2])
+
+    def test_async_iter_exc(self):
+        py = create_tmp_test(
+            """
+for i in range(5):
+    print(i)
+exit(34)
+"""
+        )
+
+        lines = []
+
+        async def producer():
+            async for line in python(py.name, _async=True):
+                lines.append(int(line.strip()))
+
+        loop = asyncio.get_event_loop()
+        self.assertRaises(sh.ErrorReturnCode_34, loop.run_until_complete, producer())
+
+    def test_handle_both_out_and_err(self):
+        py = create_tmp_test(
+            """
+import sys
+import os
+import time
+
+for i in range(42):
+    sys.stdout.write(str(i) + "\\n")
+    sys.stdout.flush()
+    if i % 2 == 0:
+        sys.stderr.write(str(i) + "\\n")
+        sys.stderr.flush()
+"""
+        )
+
+        out = []
+
+        def handle_out(line):
+            out.append(int(line.strip()))
+
+        err = []
+
+        def handle_err(line):
+            err.append(int(line.strip()))
+
+        p = python(py.name, _err=handle_err, _out=handle_out, _bg=True)
+        p.wait()
+
+        self.assertEqual(sum(out), 861)
+        self.assertEqual(sum(err), 420)
+
     def test_iter_unicode(self):
         # issue https://github.com/amoffat/sh/issues/224
         test_string = "\xe4\xbd\x95\xe4\xbd\x95\n" * 150  # len > buffer_s
@@ -1575,12 +1823,14 @@ for i in range(42):
     def test_nonblocking_iter(self):
         from errno import EWOULDBLOCK
 
-        py = create_tmp_test("""
+        py = create_tmp_test(
+            """
 import time
 import sys
 time.sleep(1)
 sys.stdout.write("stdout")
-""")
+"""
+        )
         count = 0
         value = None
         for line in python(py.name, _iter_noblock=True):
@@ -1591,12 +1841,14 @@ sys.stdout.write("stdout")
         self.assertGreater(count, 0)
         self.assertEqual(value, "stdout")
 
-        py = create_tmp_test("""
+        py = create_tmp_test(
+            """
 import time
 import sys
 time.sleep(1)
 sys.stderr.write("stderr")
-""")
+"""
+        )
 
         count = 0
         value = None
@@ -1609,13 +1861,15 @@ sys.stderr.write("stderr")
         self.assertEqual(value, "stderr")
 
     def test_for_generator_to_err(self):
-        py = create_tmp_test("""
+        py = create_tmp_test(
+            """
 import sys
 import os
 
 for i in range(42):
     sys.stderr.write(str(i)+"\\n")
-""")
+"""
+        )
 
         out = []
         for line in python("-u", py.name, _iter="err"):
@@ -1629,7 +1883,8 @@ for i in range(42):
         self.assertEqual(len(out), 0)
 
     def test_sigpipe(self):
-        py1 = create_tmp_test("""
+        py1 = create_tmp_test(
+            """
 import sys
 import os
 import time
@@ -1643,9 +1898,11 @@ signal.signal(signal.SIGPIPE, signal.SIG_DFL)
 for letter in "andrew":
     time.sleep(0.6)
     print(letter)
-        """)
+        """
+        )
 
-        py2 = create_tmp_test("""
+        py2 = create_tmp_test(
+            """
 import sys
 import os
 import time
@@ -1656,10 +1913,15 @@ while True:
         break
     print(line.strip().upper())
     exit(0)
-        """)
+        """
+        )
 
         p1 = python("-u", py1.name, _piped="out")
-        p2 = python(p1, "-u", py2.name)
+        p2 = python(
+            "-u",
+            py2.name,
+            _in=p1,
+        )
 
         # SIGPIPE should happen, but it shouldn't be an error, since _piped is
         # truthful
@@ -1669,7 +1931,8 @@ while True:
     def test_piped_generator(self):
         import time
 
-        py1 = create_tmp_test("""
+        py1 = create_tmp_test(
+            """
 import sys
 import os
 import time
@@ -1677,9 +1940,11 @@ import time
 for letter in "andrew":
     time.sleep(0.6)
     print(letter)
-        """)
+        """
+        )
 
-        py2 = create_tmp_test("""
+        py2 = create_tmp_test(
+            """
 import sys
 import os
 import time
@@ -1689,14 +1954,16 @@ while True:
     if not line:
         break
     print(line.strip().upper())
-        """)
+        """
+        )
 
         times = []
         last_received = None
 
         letters = ""
-        for line in python(python("-u", py1.name, _piped="out"), "-u",
-                           py2.name, _iter=True):
+        for line in python(
+            "-u", py2.name, _iter=True, _in=python("-u", py1.name, _piped="out")
+        ):
             letters += line.strip()
 
             now = time.time()
@@ -1705,17 +1972,32 @@ while True:
             last_received = now
 
         self.assertEqual("ANDREW", letters)
-        self.assertTrue(all([t > .3 for t in times]))
+        self.assertTrue(all([t > 0.3 for t in times]))
+
+    def test_no_out_iter_err(self):
+        py = create_tmp_test(
+            """
+import sys
+sys.stderr.write("1\\n")
+sys.stderr.write("2\\n")
+sys.stderr.write("3\\n")
+sys.stderr.flush()
+"""
+        )
+        nums = [int(num.strip()) for num in python(py.name, _iter="err", _no_out=True)]
+        assert nums == [1, 2, 3]
 
     def test_generator_and_callback(self):
-        py = create_tmp_test("""
+        py = create_tmp_test(
+            """
 import sys
 import os
 
 for i in range(42):
     sys.stderr.write(str(i * 2)+"\\n")
     print(i)
-""")
+"""
+        )
 
         stderr = []
 
@@ -1730,14 +2012,15 @@ for i in range(42):
         self.assertEqual(sum(stderr), 1722)
 
     def test_cast_bg(self):
-        py = create_tmp_test("""
+        py = create_tmp_test(
+            """
 import sys
 import time
 time.sleep(0.5)
 sys.stdout.write(sys.argv[1])
-""")
+"""
+        )
         self.assertEqual(int(python(py.name, "123", _bg=True)), 123)
-        self.assertEqual(long(python(py.name, "456", _bg=True)), 456)
         self.assertEqual(float(python(py.name, "789", _bg=True)), 789.0)
 
     def test_cmd_eq(self):
@@ -1758,29 +2041,30 @@ sys.stdout.write(sys.argv[1])
         system_python(py.name, _fg=True)
 
     def test_fg_false(self):
-        """ https://github.com/amoffat/sh/issues/520 """
+        """https://github.com/amoffat/sh/issues/520"""
         py = create_tmp_test("print('hello')")
         buf = StringIO()
         python(py.name, _fg=False, _out=buf)
         self.assertEqual(buf.getvalue(), "hello\n")
 
     def test_fg_true(self):
-        """ https://github.com/amoffat/sh/issues/520 """
+        """https://github.com/amoffat/sh/issues/520"""
         py = create_tmp_test("print('hello')")
         buf = StringIO()
         self.assertRaises(TypeError, python, py.name, _fg=True, _out=buf)
 
     def test_fg_env(self):
-        py = create_tmp_test("""
+        py = create_tmp_test(
+            """
 import os
 code = int(os.environ.get("EXIT", "0"))
 exit(code)
-""")
+"""
+        )
 
         env = os.environ.copy()
         env["EXIT"] = "3"
-        self.assertRaises(sh.ErrorReturnCode_3, python, py.name, _fg=True,
-                          _env=env)
+        self.assertRaises(sh.ErrorReturnCode_3, python, py.name, _fg=True, _env=env)
 
     def test_fg_alternative(self):
         py = create_tmp_test("exit(0)")
@@ -1797,24 +2081,38 @@ exit(code)
         outfile.seek(0)
         self.assertEqual(b"output\n", outfile.read())
 
+    def test_out_pathlike(self):
+        from pathlib import Path
+
+        outfile = tempfile.NamedTemporaryFile()
+        py = create_tmp_test("print('output')")
+        python(py.name, _out=Path(outfile.name))
+        outfile.seek(0)
+        self.assertEqual(b"output\n", outfile.read())
+
     def test_bg_exit_code(self):
-        py = create_tmp_test("""
+        py = create_tmp_test(
+            """
 import time
 time.sleep(1)
 exit(49)
-""")
+"""
+        )
         p = python(py.name, _ok_code=49, _bg=True)
         self.assertEqual(49, p.exit_code)
 
     def test_cwd(self):
-        from sh import pwd
         from os.path import realpath
+
+        from sh import pwd
+
         self.assertEqual(str(pwd(_cwd="/tmp")), realpath("/tmp") + "\n")
         self.assertEqual(str(pwd(_cwd="/etc")), realpath("/etc") + "\n")
 
     def test_cwd_fg(self):
         td = realpath(tempfile.mkdtemp())
-        py = create_tmp_test("""
+        py = create_tmp_test(
+            """
 import sh
 import os
 from os.path import realpath
@@ -1822,7 +2120,10 @@ orig = realpath(os.getcwd())
 print(orig)
 sh.pwd(_cwd="{newdir}", _fg=True)
 print(realpath(os.getcwd()))
-""".format(newdir=td))
+""".format(
+                newdir=td
+            )
+        )
 
         orig, newdir, restored = python(py.name).strip().split("\n")
         newdir = realpath(newdir)
@@ -1841,11 +2142,12 @@ print(realpath(os.getcwd()))
         stdin.flush()
         stdin.seek(0)
 
-        out = tr(tr("[:lower:]", "[:upper:]", _in=data), "[:upper:]", "[:lower:]")
+        out = tr("[:upper:]", "[:lower:]", _in=tr("[:lower:]", "[:upper:]", _in=data))
         self.assertTrue(out == data)
 
     def test_tty_input(self):
-        py = create_tmp_test("""
+        py = create_tmp_test(
+            """
 import sys
 import os
 
@@ -1858,7 +2160,8 @@ if os.isatty(sys.stdin.fileno()):
 else:
     sys.stdout.write("no tty attached!\\n")
     sys.stdout.flush()
-""")
+"""
+        )
 
         test_pw = "test123"
         expected_stars = "*" * len(test_pw)
@@ -1881,10 +2184,11 @@ else:
         self.assertEqual(d["stars"], expected_stars)
 
         response = python(py.name)
-        self.assertEqual(response, "no tty attached!\n")
+        self.assertEqual(str(response), "no tty attached!\n")
 
     def test_tty_output(self):
-        py = create_tmp_test("""
+        py = create_tmp_test(
+            """
 import sys
 import os
 
@@ -1894,12 +2198,13 @@ if os.isatty(sys.stdout.fileno()):
 else:
     sys.stdout.write("no tty attached")
     sys.stdout.flush()
-""")
+"""
+        )
 
-        out = python(py.name, _tty_out=True)
+        out = pythons(py.name, _tty_out=True)
         self.assertEqual(out, "tty attached")
 
-        out = python(py.name, _tty_out=False)
+        out = pythons(py.name, _tty_out=False)
         self.assertEqual(out, "no tty attached")
 
     def test_stringio_output(self):
@@ -1909,15 +2214,7 @@ else:
         echo("-n", "testing 123", _out=out)
         self.assertEqual(out.getvalue(), "testing 123")
 
-        out = cStringIO()
-        echo("-n", "testing 123", _out=out)
-        self.assertEqual(out.getvalue().decode(), "testing 123")
-
-        out = ioStringIO()
-        echo("-n", "testing 123", _out=out)
-        self.assertEqual(out.getvalue(), "testing 123")
-
-        out = iocStringIO()
+        out = BytesIO()
         echo("-n", "testing 123", _out=out)
         self.assertEqual(out.getvalue().decode(), "testing 123")
 
@@ -1941,7 +2238,8 @@ else:
         self.assertEqual(len(output), 100)
 
     def test_change_stdout_buffering(self):
-        py = create_tmp_test("""
+        py = create_tmp_test(
+            """
 import sys
 import os
 
@@ -1960,7 +2258,8 @@ sys.stdout.flush()
 # would ruin our test.  we want to make sure we get the string "unbuffered"
 # before the process ends, without writing a newline
 sys.stdin.read(1)
-""")
+"""
+        )
 
         d = {
             "newline_buffer_success": False,
@@ -1990,10 +2289,12 @@ sys.stdin.read(1)
         self.assertTrue(d["unbuffered_success"])
 
     def test_callable_interact(self):
-        py = create_tmp_test("""
+        py = create_tmp_test(
+            """
 import sys
 sys.stdout.write("line1")
-""")
+"""
+        )
 
         class Callable(object):
             def __init__(self):
@@ -2007,19 +2308,23 @@ sys.stdout.write("line1")
         self.assertEqual(cb.line, "line1")
 
     def test_encoding(self):
-        return self.skipTest("what's the best way to test a different '_encoding' special keyword argument?")
+        return self.skipTest(
+            "what's the best way to test a different '_encoding' special keyword"
+            "argument?"
+        )
 
     def test_timeout(self):
-        import sh
         from time import time
 
+        import sh
+
         sleep_for = 3
         timeout = 1
         started = time()
         try:
             sh.sleep(sleep_for, _timeout=timeout).wait()
         except sh.TimeoutException as e:
-            assert 'sleep 3' in e.full_cmd
+            assert "sleep 3" in e.full_cmd
         else:
             self.fail("no timeout exception")
         elapsed = time() - started
@@ -2044,25 +2349,29 @@ sys.stdout.write("line1")
         self.assertRaises(RuntimeError, p.wait, timeout=-3)
 
     def test_binary_pipe(self):
-        binary = b'\xec;\xedr\xdbF'
+        binary = b"\xec;\xedr\xdbF"
 
-        py1 = create_tmp_test("""
+        py1 = create_tmp_test(
+            """
 import sys
 import os
 
 sys.stdout = os.fdopen(sys.stdout.fileno(), "wb", 0)
 sys.stdout.write(b'\\xec;\\xedr\\xdbF')
-""")
+"""
+        )
 
-        py2 = create_tmp_test("""
+        py2 = create_tmp_test(
+            """
 import sys
 import os
 
 sys.stdin = os.fdopen(sys.stdin.fileno(), "rb", 0)
 sys.stdout = os.fdopen(sys.stdout.fileno(), "wb", 0)
 sys.stdout.write(sys.stdin.read())
-""")
-        out = python(python(py1.name), py2.name)
+"""
+        )
+        out = python(py2.name, _in=python(py1.name))
         self.assertEqual(out.stdout, binary)
 
     # designed to trigger the "... (%d more, please see e.stdout)" output
@@ -2070,39 +2379,37 @@ sys.stdout.write(sys.stdin.read())
     def test_failure_with_large_output(self):
         from sh import ErrorReturnCode_1
 
-        py = create_tmp_test("""
+        py = create_tmp_test(
+            """
 print("andrewmoffat" * 1000)
 exit(1)
-""")
+"""
+        )
         self.assertRaises(ErrorReturnCode_1, python, py.name)
 
     # designed to check if the ErrorReturnCode constructor does not raise
     # an UnicodeDecodeError
     def test_non_ascii_error(self):
-        from sh import ls, ErrorReturnCode
+        from sh import ErrorReturnCode, ls
 
         test = "/á"
-
-        # coerce to unicode
-        if IS_PY3:
-            pass
-        else:
-            test = test.decode("utf8")
-
-        self.assertRaises(ErrorReturnCode, ls, test)
+        self.assertRaises(ErrorReturnCode, ls, test, _encoding="utf8")
 
     def test_no_out(self):
-        py = create_tmp_test("""
+        py = create_tmp_test(
+            """
 import sys
 sys.stdout.write("stdout")
 sys.stderr.write("stderr")
-""")
+"""
+        )
         p = python(py.name, _no_out=True)
         self.assertEqual(p.stdout, b"")
         self.assertEqual(p.stderr, b"stderr")
         self.assertTrue(p.process._pipe_queue.empty())
 
-        def callback(line): pass
+        def callback(line):
+            pass
 
         p = python(py.name, _out=callback)
         self.assertEqual(p.stdout, b"")
@@ -2115,26 +2422,31 @@ sys.stderr.write("stderr")
         self.assertFalse(p.process._pipe_queue.empty())
 
     def test_tty_stdin(self):
-        py = create_tmp_test("""
+        py = create_tmp_test(
+            """
 import sys
 sys.stdout.write(sys.stdin.read())
 sys.stdout.flush()
-""")
-        out = python(py.name, _in="test\n", _tty_in=True)
+"""
+        )
+        out = pythons(py.name, _in="test\n", _tty_in=True)
         self.assertEqual("test\n", out)
 
     def test_no_err(self):
-        py = create_tmp_test("""
+        py = create_tmp_test(
+            """
 import sys
 sys.stdout.write("stdout")
 sys.stderr.write("stderr")
-""")
+"""
+        )
         p = python(py.name, _no_err=True)
         self.assertEqual(p.stderr, b"")
         self.assertEqual(p.stdout, b"stdout")
         self.assertFalse(p.process._pipe_queue.empty())
 
-        def callback(line): pass
+        def callback(line):
+            pass
 
         p = python(py.name, _err=callback)
         self.assertEqual(p.stderr, b"")
@@ -2150,50 +2462,57 @@ sys.stderr.write("stderr")
         from sh import ls
 
         # calling a command regular should fill up the pipe_queue
-        p = ls()
+        p = ls(_return_cmd=True)
         self.assertFalse(p.process._pipe_queue.empty())
 
         # calling a command with a callback should not
-        def callback(line): pass
+        def callback(line):
+            pass
 
-        p = ls(_out=callback)
+        p = ls(_out=callback, _return_cmd=True)
         self.assertTrue(p.process._pipe_queue.empty())
 
         # calling a command regular with no_pipe also should not
-        p = ls(_no_pipe=True)
+        p = ls(_no_pipe=True, _return_cmd=True)
         self.assertTrue(p.process._pipe_queue.empty())
 
     def test_decode_error_handling(self):
         from functools import partial
 
-        py = create_tmp_test("""
+        py = create_tmp_test(
+            """
 # -*- coding: utf8 -*-
 import sys
 import os
 sys.stdout = os.fdopen(sys.stdout.fileno(), 'wb')
-IS_PY3 = sys.version_info[0] == 3
-if IS_PY3:
-    sys.stdout.write(bytes("te漢字st", "utf8"))
-else:
-    sys.stdout.write("te漢字st")
-""")
-        fn = partial(python, py.name, _encoding="ascii")
-
-        def s(fn): str(fn())
+sys.stdout.write(bytes("te漢字st", "utf8") + "äåéë".encode("latin_1"))
+"""
+        )
+        fn = partial(pythons, py.name, _encoding="ascii")
+        self.assertRaises(UnicodeDecodeError, fn)
 
-        self.assertRaises(UnicodeDecodeError, s, fn)
+        p = pythons(py.name, _encoding="ascii", _decode_errors="ignore")
+        self.assertEqual(p, "test")
 
-        p = python(py.name, _encoding="ascii", _decode_errors="ignore")
+        p = pythons(
+            py.name,
+            _encoding="ascii",
+            _decode_errors="ignore",
+            _out=sys.stdout,
+            _tee=True,
+        )
         self.assertEqual(p, "test")
 
     def test_signal_exception(self):
         from sh import SignalException_15
 
         def throw_terminate_signal():
-            py = create_tmp_test("""
+            py = create_tmp_test(
+                """
 import time
 while True: time.sleep(1)
-""")
+"""
+            )
             to_kill = python(py.name, _bg=True)
             to_kill.terminate()
             to_kill.wait()
@@ -2201,12 +2520,15 @@ while True: time.sleep(1)
         self.assertRaises(SignalException_15, throw_terminate_signal)
 
     def test_signal_group(self):
-        child = create_tmp_test("""
+        child = create_tmp_test(
+            """
 import time
 time.sleep(3)
-""")
+"""
+        )
 
-        parent = create_tmp_test("""
+        parent = create_tmp_test(
+            """
 import sys
 import sh
 python = sh.Command(sys.executable)
@@ -2214,10 +2536,12 @@ p = python("{child_file}", _bg=True, _new_session=False)
 print(p.pid)
 print(p.process.pgid)
 p.wait()
-""", child_file=child.name)
+""",
+            child_file=child.name,
+        )
 
         def launch():
-            p = python(parent.name, _bg=True, _iter=True)
+            p = python(parent.name, _bg=True, _iter=True, _new_group=True)
             child_pid = int(next(p).strip())
             child_pgid = int(next(p).strip())
             parent_pid = p.pid
@@ -2260,14 +2584,16 @@ p.wait()
         assert_dead(child_pid)
 
     def test_pushd(self):
-        """ test basic pushd functionality """
+        """test basic pushd functionality"""
+        child = realpath(tempfile.mkdtemp())
+
         old_wd1 = sh.pwd().strip()
         old_wd2 = os.getcwd()
 
         self.assertEqual(old_wd1, old_wd2)
-        self.assertNotEqual(old_wd1, tempdir)
+        self.assertNotEqual(old_wd1, child)
 
-        with sh.pushd(tempdir):
+        with sh.pushd(child):
             new_wd1 = sh.pwd().strip()
             new_wd2 = os.getcwd()
 
@@ -2276,33 +2602,20 @@ p.wait()
         self.assertEqual(old_wd3, old_wd4)
         self.assertEqual(old_wd1, old_wd3)
 
-        self.assertEqual(new_wd1, tempdir)
-        self.assertEqual(new_wd2, tempdir)
+        self.assertEqual(new_wd1, child)
+        self.assertEqual(new_wd2, child)
 
     def test_pushd_cd(self):
-        """ test that pushd works like pushd/popd with built-in cd correctly """
-        import sh
-
+        """test that pushd works like pushd/popd"""
         child = realpath(tempfile.mkdtemp())
         try:
             old_wd = os.getcwd()
             with sh.pushd(tempdir):
-                self.assertEqual(tempdir, os.getcwd())
-                sh.cd(child)
-                self.assertEqual(child, os.getcwd())
-
+                self.assertEqual(str(tempdir), os.getcwd())
             self.assertEqual(old_wd, os.getcwd())
         finally:
             os.rmdir(child)
 
-    def test_cd_homedir(self):
-        orig = os.getcwd()
-        my_dir = os.path.realpath(os.path.expanduser("~"))  # Use realpath because homedir may be a symlink
-        sh.cd()
-
-        self.assertNotEqual(orig, os.getcwd())
-        self.assertEqual(my_dir, os.getcwd())
-
     def test_non_existant_cwd(self):
         from sh import ls
 
@@ -2333,11 +2646,13 @@ p.wait()
                 self.exit_code = exit_code
                 self.success = success
 
-        py = create_tmp_test("""
+        py = create_tmp_test(
+            """
 from time import time, sleep
 sleep(1)
 print(time())
-""")
+"""
+        )
 
         callback = Callback()
         p = python(py.name, _done=callback, _bg=True)
@@ -2353,6 +2668,31 @@ print(time())
         self.assertEqual(callback.exit_code, 0)
         self.assertTrue(callback.success)
 
+    # https://github.com/amoffat/sh/issues/564
+    def test_done_callback_no_deadlock(self):
+        import time
+
+        py = create_tmp_test(
+            """
+from sh import sleep
+
+def done(cmd, success, exit_code):
+    print(cmd, success, exit_code)
+
+sleep('1', _done=done)
+"""
+        )
+
+        p = python(py.name, _bg=True, _timeout=2)
+
+        # do a little setup to prove that a command with a _done callback is run
+        # in the background
+        wait_start = time.time()
+        p.wait()
+        wait_elapsed = time.time() - wait_start
+
+        self.assertLess(abs(wait_elapsed - 1.0), 1.0)
+
     def test_fork_exc(self):
         from sh import ForkException
 
@@ -2363,10 +2703,11 @@ print(time())
 
         self.assertRaises(ForkException, python, py.name, _preexec_fn=fail)
 
-    def test_new_session(self):
+    def test_new_session_new_group(self):
         from threading import Event
 
-        py = create_tmp_test("""
+        py = create_tmp_test(
+            """
 import os
 import time
 pid = os.getpid()
@@ -2376,58 +2717,66 @@ stuff = [pid, pgid, sid]
 
 print(",".join([str(el) for el in stuff]))
 time.sleep(0.5)
-""")
+"""
+        )
 
         event = Event()
 
-        def handle(line, stdin, p):
+        def handle(run_asserts, line, stdin, p):
             pid, pgid, sid = line.strip().split(",")
             pid = int(pid)
             pgid = int(pgid)
             sid = int(sid)
+            test_pid = os.getpgid(os.getpid())
 
             self.assertEqual(p.pid, pid)
-            self.assertEqual(pid, pgid)
             self.assertEqual(p.pgid, pgid)
             self.assertEqual(pgid, p.get_pgid())
-            self.assertEqual(pid, sid)
-            self.assertEqual(sid, pgid)
             self.assertEqual(p.sid, sid)
             self.assertEqual(sid, p.get_sid())
 
+            run_asserts(pid, pgid, sid, test_pid)
             event.set()
 
-        # new session
-        p = python(py.name, _out=handle)
+        def session_true_group_false(pid, pgid, sid, test_pid):
+            self.assertEqual(pid, sid)
+            self.assertEqual(pid, pgid)
+
+        p = python(
+            py.name, _out=partial(handle, session_true_group_false), _new_session=True
+        )
         p.wait()
         self.assertTrue(event.is_set())
 
         event.clear()
 
-        def handle(line, stdin, p):
-            pid, pgid, sid = line.strip().split(",")
-            pid = int(pid)
-            pgid = int(pgid)
-            sid = int(sid)
+        def session_false_group_false(pid, pgid, sid, test_pid):
+            self.assertEqual(test_pid, pgid)
+            self.assertNotEqual(pid, sid)
 
-            test_pid = os.getpgid(os.getpid())
+        p = python(
+            py.name, _out=partial(handle, session_false_group_false), _new_session=False
+        )
+        p.wait()
+        self.assertTrue(event.is_set())
 
-            self.assertEqual(p.pid, pid)
-            self.assertNotEqual(test_pid, pgid)
-            self.assertEqual(p.pgid, pgid)
-            self.assertEqual(pgid, p.get_pgid())
-            self.assertNotEqual(pid, sid)
-            self.assertNotEqual(sid, pgid)
-            self.assertEqual(p.sid, sid)
-            self.assertEqual(sid, p.get_sid())
+        event.clear()
 
-            event.set()
+        def session_false_group_true(pid, pgid, sid, test_pid):
+            self.assertEqual(pid, pgid)
+            self.assertNotEqual(pid, sid)
 
-        # no new session
-        p = python(py.name, _out=handle, _new_session=False)
+        p = python(
+            py.name,
+            _out=partial(handle, session_false_group_true),
+            _new_session=False,
+            _new_group=True,
+        )
         p.wait()
         self.assertTrue(event.is_set())
 
+        event.clear()
+
     def test_done_cb_exc(self):
         from sh import ErrorReturnCode
 
@@ -2453,10 +2802,12 @@ time.sleep(0.5)
             self.fail("command should've thrown an exception")
 
     def test_callable_stdin(self):
-        py = create_tmp_test("""
+        py = create_tmp_test(
+            """
 import sys
 sys.stdout.write(sys.stdin.read())
-""")
+"""
+        )
 
         def create_stdin():
             state = {"count": 0}
@@ -2470,7 +2821,7 @@ sys.stdout.write(sys.stdin.read())
 
             return stdin
 
-        out = python(py.name, _in=create_stdin())
+        out = pythons(py.name, _in=create_stdin())
         self.assertEqual("0123", out)
 
     def test_stdin_unbuffered_bufsize(self):
@@ -2479,7 +2830,8 @@ sys.stdout.write(sys.stdin.read())
         # this tries to receive some known data and measures the time it takes
         # to receive it.  since we're flushing by newline, we should only be
         # able to receive the data when a newline is fed in
-        py = create_tmp_test("""
+        py = create_tmp_test(
+            """
 import sys
 from time import time
 
@@ -2496,7 +2848,8 @@ sys.stdout.write(data + "\\n")
 sys.stdout.write(str(waited) + "\\n")
 
 sys.stdout.flush()
-""")
+"""
+        )
 
         def create_stdin():
             yield "test"
@@ -2520,7 +2873,8 @@ sys.stdout.flush()
         # this tries to receive some known data and measures the time it takes
         # to receive it.  since we're flushing by newline, we should only be
         # able to receive the data when a newline is fed in
-        py = create_tmp_test("""
+        py = create_tmp_test(
+            """
 import sys
 from time import time
 
@@ -2537,7 +2891,8 @@ sys.stdout.write(data)
 sys.stdout.write(str(waited) + "\\n")
 
 sys.stdout.flush()
-""")
+"""
+        )
 
         # we'll feed in text incrementally, sleeping strategically before
         # sending a newline.  we then measure the amount that we slept
@@ -2559,26 +2914,31 @@ sys.stdout.flush()
         self.assertLess(abs(1 - time2), 0.5)
 
     def test_custom_timeout_signal(self):
-        from sh import TimeoutException
         import signal
 
-        py = create_tmp_test("""
+        from sh import TimeoutException
+
+        py = create_tmp_test(
+            """
 import time
 time.sleep(3)
-""")
+"""
+        )
         try:
-            python(py.name, _timeout=1, _timeout_signal=signal.SIGQUIT)
+            python(py.name, _timeout=1, _timeout_signal=signal.SIGHUP)
         except TimeoutException as e:
-            self.assertEqual(e.exit_code, signal.SIGQUIT)
+            self.assertEqual(e.exit_code, signal.SIGHUP)
         else:
             self.fail("we should have handled a TimeoutException")
 
     def test_append_stdout(self):
-        py = create_tmp_test("""
+        py = create_tmp_test(
+            """
 import sys
 num = sys.stdin.read()
 sys.stdout.write(num)
-""")
+"""
+        )
         append_file = tempfile.NamedTemporaryFile(mode="a+b")
         python(py.name, _in="1", _out=append_file)
         python(py.name, _in="2", _out=append_file)
@@ -2587,11 +2947,13 @@ sys.stdout.write(num)
         self.assertEqual(b"12", output)
 
     def test_shadowed_subcommand(self):
-        py = create_tmp_test("""
+        py = create_tmp_test(
+            """
 import sys
 sys.stdout.write(sys.argv[1])
-""")
-        out = python.bake(py.name).bake_()
+"""
+        )
+        out = pythons.bake(py.name).bake_()
         self.assertEqual("bake", out)
 
     def test_no_proc_no_attr(self):
@@ -2602,10 +2964,12 @@ sys.stdout.write(sys.argv[1])
     def test_partially_applied_callback(self):
         from functools import partial
 
-        py = create_tmp_test("""
+        py = create_tmp_test(
+            """
 for i in range(10):
     print(i)
-""")
+"""
+        )
 
         output = []
 
@@ -2630,7 +2994,8 @@ for i in range(10):
         import time
 
         # child process that will write to a file if it receives a SIGHUP
-        child = create_tmp_test("""
+        child = create_tmp_test(
+            """
 import signal
 import sys
 import time
@@ -2643,11 +3008,13 @@ with open(output_file, "w") as f:
     signal.signal(signal.SIGHUP, handle_sighup)
     time.sleep(2)
     f.write("made it!\\n")
-""")
+"""
+        )
 
         # the parent that will terminate before the child writes to the output
         # file, potentially causing a SIGHUP
-        parent = create_tmp_test("""
+        parent = create_tmp_test(
+            """
 import os
 import time
 import sys
@@ -2658,7 +3025,8 @@ output_file = sys.argv[2]
 python_name = os.path.basename(sys.executable)
 os.spawnlp(os.P_NOWAIT, python_name, python_name, child_file, output_file)
 time.sleep(1) # give child a chance to set up
-""")
+"""
+        )
 
         output_file = tempfile.NamedTemporaryFile(delete=True)
         python(parent.name, child.name, output_file.name)
@@ -2670,18 +3038,22 @@ time.sleep(1) # give child a chance to set up
     def test_unchecked_producer_failure(self):
         from sh import ErrorReturnCode_2
 
-        producer = create_tmp_test("""
+        producer = create_tmp_test(
+            """
 import sys
 for i in range(10):
     print(i)
 sys.exit(2)
-""")
+"""
+        )
 
-        consumer = create_tmp_test("""
+        consumer = create_tmp_test(
+            """
 import sys
 for line in sys.stdin:
     pass
-""")
+"""
+        )
 
         direct_pipe = python(producer.name, _piped=True)
         self.assertRaises(ErrorReturnCode_2, python, direct_pipe, consumer.name)
@@ -2692,33 +3064,41 @@ for line in sys.stdin:
 
         from sh import ErrorReturnCode_2
 
-        producer = create_tmp_test("""
+        producer = create_tmp_test(
+            """
 import sys
 for i in range(10):
     print(i)
 sys.exit(2)
-""")
+"""
+        )
 
-        middleman = create_tmp_test("""
+        middleman = create_tmp_test(
+            """
 import sys
 for line in sys.stdin:
     print("> " + line)
-""")
+"""
+        )
 
-        consumer = create_tmp_test("""
+        consumer = create_tmp_test(
+            """
 import sys
 for line in sys.stdin:
     pass
-""")
+"""
+        )
 
         producer_normal_pipe = python(producer.name, _piped=True)
-        middleman_normal_pipe = python(producer_normal_pipe, middleman.name, _piped=True)
-        self.assertRaises(ErrorReturnCode_2, python, middleman_normal_pipe, consumer.name)
+        middleman_normal_pipe = python(
+            middleman.name, _piped=True, _in=producer_normal_pipe
+        )
+        self.assertRaises(
+            ErrorReturnCode_2, python, middleman_normal_pipe, consumer.name
+        )
 
 
-@skip_unless(HAS_MOCK, "requires unittest.mock")
 class MockTests(BaseTests):
-
     def test_patch_command_cls(self):
         def fn():
             cmd = sh.Command("afowejfow")
@@ -2749,12 +3129,14 @@ class MiscTests(BaseTests):
     def test_pickling(self):
         import pickle
 
-        py = create_tmp_test("""
+        py = create_tmp_test(
+            """
 import sys
 sys.stdout.write("some output")
 sys.stderr.write("some error")
 exit(1)
-""")
+"""
+        )
 
         try:
             python(py.name)
@@ -2773,7 +3155,7 @@ exit(1)
         with ulimit(resource.RLIMIT_NOFILE, 2048):
             cutoff_fd = 1024
             pipes = []
-            for i in xrange(cutoff_fd):
+            for i in range(cutoff_fd):
                 master, slave = os.pipe()
                 pipes.append((master, slave))
                 if slave >= cutoff_fd:
@@ -2788,79 +3170,17 @@ exit(1)
         self.assertRaises(DeprecationWarning, sh.args, _env={})
 
     def test_percent_doesnt_fail_logging(self):
-        """ test that a command name doesn't interfere with string formatting in
-        the internal loggers """
-        py = create_tmp_test("""
+        """test that a command name doesn't interfere with string formatting in
+        the internal loggers"""
+        py = create_tmp_test(
+            """
 print("cool")
-""")
+"""
+        )
         python(py.name, "%")
         python(py.name, "%%")
         python(py.name, "%%%")
 
-    # TODO
-    # for some reason, i can't get a good stable baseline measured in this test
-    # on osx.  so skip it for now if osx
-    @not_macos
-    @requires_progs("lsof")
-    def test_no_fd_leak(self):
-        import sh
-        import os
-        from itertools import product
-
-        # options whose combinations can possibly cause fd leaks
-        kwargs = {
-            "_tty_out": (True, False),
-            "_tty_in": (True, False),
-            "_err_to_out": (True, False),
-        }
-
-        def get_opts(possible_values):
-            all_opts = []
-            for opt, values in possible_values.items():
-                opt_collection = []
-                all_opts.append(opt_collection)
-
-                for val in values:
-                    pair = (opt, val)
-                    opt_collection.append(pair)
-
-            for combo in product(*all_opts):
-                opt_dict = {}
-                for key, val in combo:
-                    opt_dict[key] = val
-                yield opt_dict
-
-        test_pid = os.getpid()
-
-        def get_num_fds():
-            lines = sh.lsof(p=test_pid).strip().split("\n")
-
-            def test(line):
-                line = line.upper()
-                return "CHR" in line or "PIPE" in line
-
-            lines = [line for line in lines if test(line)]
-            return len(lines) - 1
-
-        py = create_tmp_test("")
-
-        def test_command(**opts):
-            python(py.name, **opts)
-
-        # make sure our baseline is stable.. we can remove this
-        test_command()
-        baseline = get_num_fds()
-        for i in xrange(10):
-            test_command()
-            num_fds = get_num_fds()
-            self.assertEqual(baseline, num_fds)
-
-        for opts in get_opts(kwargs):
-            for i in xrange(2):
-                test_command(**opts)
-                num_fds = get_num_fds()
-                self.assertEqual(baseline, num_fds, (baseline, num_fds, opts))
-
     def test_pushd_thread_safety(self):
         import threading
         import time
@@ -2896,9 +3216,11 @@ print("cool")
             os.rmdir(temp2)
 
     def test_stdin_nohang(self):
-        py = create_tmp_test("""
+        py = create_tmp_test(
+            """
 print("hi")
-""")
+"""
+        )
         read, write = os.pipe()
         stdin = os.fdopen(read, "r")
         python(py.name, _in=stdin)
@@ -2908,10 +3230,16 @@ print("hi")
         from sh import Command
 
         python_name = os.path.basename(sys.executable)
-        py = create_tmp_test("""#!/usr/bin/env {0}
+        py = create_tmp_test(
+            """#!/usr/bin/env {0}
 # -*- coding: utf8 -*-
 print("字")
-""".format(python_name), prefix="字", delete=False)
+""".format(
+                python_name
+            ),
+            prefix="字",
+            delete=False,
+        )
 
         try:
             py.close()
@@ -2921,16 +3249,13 @@ print("字")
             # all of these should behave just fine
             str(cmd)
             repr(cmd)
-            unicode(cmd)
 
-            running = cmd()
+            running = cmd(_return_cmd=True)
             str(running)
             repr(running)
-            unicode(running)
 
             str(running.process)
             repr(running.process)
-            unicode(running.process)
 
         finally:
             os.unlink(py.name)
@@ -2938,12 +3263,13 @@ print("字")
     # https://github.com/amoffat/sh/issues/121
     def test_wraps(self):
         from sh import ls
+
         wraps(ls)(lambda f: True)
 
     def test_signal_exception_aliases(self):
-        """ proves that signal exceptions with numbers and names are equivalent
-        """
+        """proves that signal exceptions with numbers and names are equivalent"""
         import signal
+
         import sh
 
         sig_name = "SignalException_%d" % signal.SIGQUIT
@@ -2953,9 +3279,11 @@ print("字")
         self.assertEqual(sig, SignalException_SIGQUIT)
 
     def test_change_log_message(self):
-        py = create_tmp_test("""
+        py = create_tmp_test(
+            """
 print("cool")
-""")
+"""
+        )
 
         def log_msg(cmd, call_args, pid=None):
             return "Hi! I ran something"
@@ -2977,11 +3305,13 @@ print("cool")
 
     # https://github.com/amoffat/sh/issues/273
     def test_stop_iteration_doesnt_block(self):
-        """ proves that calling calling next() on a stopped iterator doesn't
-        hang. """
-        py = create_tmp_test("""
+        """proves that calling calling next() on a stopped iterator doesn't
+        hang."""
+        py = create_tmp_test(
+            """
 print("cool")
-""")
+"""
+        )
         p = python(py.name, _iter=True)
         for i in range(100):
             try:
@@ -2994,12 +3324,14 @@ print("cool")
         import threading
         import time
 
-        py = create_tmp_test("""
+        py = create_tmp_test(
+            """
 import sys
 a = sys.argv
 res = (a[1], a[3])
 sys.stdout.write(repr(res))
-""")
+"""
+        )
 
         p1 = python.bake("-u", py.name, 1)
         p2 = python.bake("-u", py.name, 2)
@@ -3033,14 +3365,17 @@ sys.stdout.write(repr(res))
     def test_eintr(self):
         import signal
 
-        def handler(num, frame): pass
+        def handler(num, frame):
+            pass
 
         signal.signal(signal.SIGALRM, handler)
 
-        py = create_tmp_test("""
+        py = create_tmp_test(
+            """
 import time
 time.sleep(2)
-""")
+"""
+        )
         p = python(py.name, _bg=True)
         signal.alarm(1)
         p.wait()
@@ -3049,6 +3384,7 @@ time.sleep(2)
 class StreamBuffererTests(unittest.TestCase):
     def test_unbuffered(self):
         from sh import StreamBufferer
+
         b = StreamBufferer(0)
 
         self.assertEqual(b.process(b"test"), [b"test"])
@@ -3058,6 +3394,7 @@ class StreamBuffererTests(unittest.TestCase):
 
     def test_newline_buffered(self):
         from sh import StreamBufferer
+
         b = StreamBufferer(1)
 
         self.assertEqual(b.process(b"testing\none\ntwo"), [b"testing\n", b"one\n"])
@@ -3066,6 +3403,7 @@ class StreamBuffererTests(unittest.TestCase):
 
     def test_chunk_buffered(self):
         from sh import StreamBufferer
+
         b = StreamBufferer(10)
 
         self.assertEqual(b.process(b"testing\none\ntwo"), [b"testing\non"])
@@ -3077,17 +3415,35 @@ class StreamBuffererTests(unittest.TestCase):
 class ExecutionContextTests(unittest.TestCase):
     def test_basic(self):
         import sh
+
         out = StringIO()
-        _sh = sh(_out=out)
+        _sh = sh.bake(_out=out)
         _sh.echo("-n", "TEST")
         self.assertEqual("TEST", out.getvalue())
 
+    def test_multiline_defaults(self):
+        py = create_tmp_test(
+            """
+import os
+print(os.environ["ABC"])
+"""
+        )
+
+        sh2 = sh.bake(
+            _env={
+                "ABC": "123",
+            }
+        )
+        output = sh2.python(py.name).strip()
+        assert output == "123"
+
     def test_no_interfere1(self):
         import sh
+
         out = StringIO()
-        _sh = sh(_out=out)  # noqa: F841
-        from _sh import echo
-        echo("-n", "TEST")
+        _sh = sh.bake(_out=out)  # noqa: F841
+
+        _sh.echo("-n", "TEST")
         self.assertEqual("TEST", out.getvalue())
 
         # Emptying the StringIO
@@ -3099,112 +3455,59 @@ class ExecutionContextTests(unittest.TestCase):
 
     def test_no_interfere2(self):
         import sh
+
         out = StringIO()
         from sh import echo
-        _sh = sh(_out=out)  # noqa: F841
+
+        _sh = sh.bake(_out=out)  # noqa: F841
         echo("-n", "TEST")
         self.assertEqual("", out.getvalue())
 
-    def test_no_bad_name(self):
-        out = StringIO()
-
-        def fn():
-            import sh
-            sh = sh(_out=out)
-
-        self.assertRaises(RuntimeError, fn)
-
     def test_set_in_parent_function(self):
         import sh
+
         out = StringIO()
-        _sh = sh(_out=out)
+        _sh = sh.bake(_out=out)
 
         def nested1():
             _sh.echo("-n", "TEST1")
 
         def nested2():
             import sh
+
             sh.echo("-n", "TEST2")
 
         nested1()
         nested2()
         self.assertEqual("TEST1", out.getvalue())
 
-    def test_reimport_no_interfere(self):
-        import sh
-        out = StringIO()
-        _sh = sh(_out=out)
-        import _sh  # this reimport '_sh' from the eponymous local variable
-        _sh.echo("-n", "TEST")
-        self.assertEqual("TEST", out.getvalue())
-
-    def test_importer_detects_module_name(self):
+    def test_command_with_baked_call_args(self):
+        # Test that sh.Command() knows about baked call args
         import sh
-        _sh = sh()
-        omg = _sh  # noqa: F841
-        from omg import cat  # noqa: F401
-
-    def test_importer_only_works_with_sh(self):
-        def unallowed_import():
-            _os = os  # noqa: F841
-            from _os import path  # noqa: F401
-
-        self.assertRaises(ImportError, unallowed_import)
-
-    def test_reimport_from_cli(self):
-        # The REPL and CLI both need special handling to create an execution context that is safe to
-        # reimport
-        if IS_PY3:
-            cmdstr = '; '.join(('import sh, io, sys',
-                                'out = io.StringIO()',
-                                '_sh = sh(_out=out)',
-                                'import _sh',
-                                '_sh.echo("-n", "TEST")',
-                                'sys.stderr.write(out.getvalue())',
-                                ))
-        else:
-            cmdstr = '; '.join(('import sh, StringIO, sys',
-                                'out = StringIO.StringIO()',
-                                '_sh = sh(_out=out)',
-                                'import _sh',
-                                '_sh.echo("-n", "TEST")',
-                                'sys.stderr.write(out.getvalue())',
-                                ))
 
-        err = StringIO()
-
-        python('-c', cmdstr, _err=err)
-        self.assertEqual('TEST', err.getvalue())
+        _sh = sh.bake(_ok_code=1)
+        self.assertEqual(sh.Command._call_args["ok_code"], 0)
+        self.assertEqual(_sh.Command._call_args["ok_code"], 1)
 
 
 if __name__ == "__main__":
     root = logging.getLogger()
     root.setLevel(logging.DEBUG)
-    root.addHandler(NullHandler())
+    root.addHandler(logging.NullHandler())
 
-    test_kwargs = {}
+    test_kwargs = {"warnings": "ignore"}
 
-    if IS_PY2 and MINOR_VER != 6:
-        test_kwargs["failfast"] = True
-        test_kwargs["verbosity"] = 2
+    # if we're running a specific test, we can let unittest framework figure out
+    # that test and run it itself.  it will also handle setting the return code
+    # of the process if any tests error or fail
+    if len(sys.argv) > 1:
+        unittest.main(**test_kwargs)
 
-    try:
-        # if we're running a specific test, we can let unittest framework figure out
-        # that test and run it itself.  it will also handle setting the return code
-        # of the process if any tests error or fail
-        if len(sys.argv) > 1:
-            unittest.main(**test_kwargs)
-
-        # otherwise, it looks like we want to run all the tests
-        else:
-            suite = unittest.TestLoader().loadTestsFromModule(sys.modules[__name__])
-            test_kwargs["verbosity"] = 2
-            result = unittest.TextTestRunner(**test_kwargs).run(suite)
-
-            if not result.wasSuccessful():
-                exit(1)
+    # otherwise, it looks like we want to run all the tests
+    else:
+        suite = unittest.TestLoader().loadTestsFromModule(sys.modules[__name__])
+        test_kwargs["verbosity"] = 2
+        result = unittest.TextTestRunner(**test_kwargs).run(suite)
 
-    finally:
-        if cov:
-            cov.stop()
-            cov.save()
+        if not result.wasSuccessful():
+            exit(1)
diff --git a/tox.ini b/tox.ini
index 1f986c7..0bfc16d 100644
--- a/tox.ini
+++ b/tox.ini
@@ -1,13 +1,26 @@
 [tox]
-# virtualenv for py26 is broken, so don't put it here
-envlist = py{27,31,32,33,34,35,36,37,38},docs
+envlist = py{38,39,310,311}-locale-{c,utf8}-poller-{poll,select},lint
+isolated_build = True
 
 [testenv]
-deps = -r requirements-dev.txt
+allowlist_externals = poetry
+setenv =
+    locale-c: LANG=C
+    locale-utf8: LANG=en_US.UTF-8
+    poller-select: SH_TESTS_USE_SELECT=1
+    poller-poll: SH_TESTS_USE_SELECT=0
+    SH_TESTS_RUNNING=1
 commands =
-    python sh.py tox
+    python test.py {posargs}
 
-[testenv:docs]
-basepython = python3
+[testenv:lint]
+allowlist_externals =
+    flake8
+    black
+    rstcheck
+    mypy
 commands =
-    python setup.py check --restructuredtext --metadata --strict
+    flake8 sh.py test.py
+    black --check --diff sh.py test.py
+    rstcheck README.rst
+    mypy sh.py
\ No newline at end of file

More details

Full run details

Historical runs