New Upstream Release - python-requests-cache

Ready changes

Summary

Merged new upstream version: 1.0.1 (was: 0.9.8).

Diff

diff --git a/.all-contributorsrc b/.all-contributorsrc
index d307ad8..274374f 100644
--- a/.all-contributorsrc
+++ b/.all-contributorsrc
@@ -781,6 +781,172 @@
       "contributions": [
         "bug"
       ]
+    },
+    {
+      "login": "mjpieters",
+      "name": "Martijn Pieters",
+      "avatar_url": "https://avatars.githubusercontent.com/u/46775?v=4",
+      "profile": "http://www.zopatista.com/",
+      "contributions": [
+        "ideas"
+      ]
+    },
+    {
+      "login": "dholth",
+      "name": "Daniel Holth",
+      "avatar_url": "https://avatars.githubusercontent.com/u/208018?v=4",
+      "profile": "https://monotreme.club/",
+      "contributions": [
+        "code"
+      ]
+    },
+    {
+      "login": "lazka",
+      "name": "Christoph Reiter",
+      "avatar_url": "https://avatars.githubusercontent.com/u/991986?v=4",
+      "profile": "http://lazka.github.io/",
+      "contributions": [
+        "bug",
+        "ideas"
+      ]
+    },
+    {
+      "login": "imba-tjd",
+      "name": "谭九鼎",
+      "avatar_url": "https://avatars.githubusercontent.com/u/24759802?v=4",
+      "profile": "https://www.zhihu.com/people/tan-jiu-ding",
+      "contributions": [
+        "doc"
+      ]
+    },
+    {
+      "login": "pb-jeff-oneill",
+      "name": "Jeff O'Neill",
+      "avatar_url": "https://avatars.githubusercontent.com/u/55557751?v=4",
+      "profile": "https://www.patentbots.com/",
+      "contributions": [
+        "bug"
+      ]
+    },
+    {
+      "login": "hlYassine",
+      "name": "yassine",
+      "avatar_url": "https://avatars.githubusercontent.com/u/3386466?v=4",
+      "profile": "https://github.com/hlYassine",
+      "contributions": [
+        "bug"
+      ]
+    },
+    {
+      "login": "DavidSchmidt00",
+      "name": "David Schmidt",
+      "avatar_url": "https://avatars.githubusercontent.com/u/43894937?v=4",
+      "profile": "https://github.com/DavidSchmidt00",
+      "contributions": [
+        "bug",
+        "code"
+      ]
+    },
+    {
+      "login": "ValueRaider",
+      "name": "ValueRaider",
+      "avatar_url": "https://avatars.githubusercontent.com/u/96923577?v=4",
+      "profile": "https://github.com/ValueRaider",
+      "contributions": [
+        "doc"
+      ]
+    },
+    {
+      "login": "masavini",
+      "name": "masavini",
+      "avatar_url": "https://avatars.githubusercontent.com/u/6315187?v=4",
+      "profile": "https://github.com/masavini",
+      "contributions": [
+        "doc"
+      ]
+    },
+    {
+      "login": "eserdk",
+      "name": "eserdk",
+      "avatar_url": "https://avatars.githubusercontent.com/u/16106844?v=4",
+      "profile": "https://github.com/eserdk",
+      "contributions": [
+        "ideas"
+      ]
+    },
+    {
+      "login": "gsalvatella",
+      "name": "gsalvatella ",
+      "avatar_url": "https://avatars.githubusercontent.com/u/42438361?v=4",
+      "profile": "https://github.com/gsalvatella",
+      "contributions": [
+        "bug"
+      ]
+    },
+    {
+      "login": "dericke",
+      "name": "Evan D",
+      "avatar_url": "https://avatars.githubusercontent.com/u/3587185?v=4",
+      "profile": "https://github.com/dericke",
+      "contributions": [
+        "bug"
+      ]
+    },
+    {
+      "login": "gismaps",
+      "name": "David GIS",
+      "avatar_url": "https://avatars.githubusercontent.com/u/65092729?v=4",
+      "profile": "https://github.com/gismaps",
+      "contributions": [
+        "bug"
+      ]
+    },
+    {
+      "login": "pfmoore",
+      "name": "Paul Moore",
+      "avatar_url": "https://avatars.githubusercontent.com/u/1110419?v=4",
+      "profile": "https://github.com/pfmoore",
+      "contributions": [
+        "ideas"
+      ]
+    },
+    {
+      "login": "carlosal1015",
+      "name": "Oromion",
+      "avatar_url": "https://avatars.githubusercontent.com/u/21283014?v=4",
+      "profile": "https://github.com/carlosal1015",
+      "contributions": [
+        "bug",
+        "platform"
+      ]
+    },
+    {
+      "login": "aaronsteers",
+      "name": "Aaron (\"AJ\") Steers",
+      "avatar_url": "https://avatars.githubusercontent.com/u/18150651?v=4",
+      "profile": "https://meltano.com/",
+      "contributions": [
+        "ideas"
+      ]
+    },
+    {
+      "login": "TheTechromancer",
+      "name": "TheTechromancer",
+      "avatar_url": "https://avatars.githubusercontent.com/u/20261699?v=4",
+      "profile": "https://github.com/TheTechromancer",
+      "contributions": [
+        "bug"
+      ]
+    },
+    {
+      "login": "sleiner",
+      "name": "Simon Leiner",
+      "avatar_url": "https://avatars.githubusercontent.com/u/6379313?v=4",
+      "profile": "https://github.com/sleiner",
+      "contributions": [
+        "code",
+        "feature"
+      ]
     }
   ],
   "contributorsPerLine": 7,
diff --git a/.github/ISSUE_TEMPLATE/bug.md b/.github/ISSUE_TEMPLATE/bug.md
index 5557290..8c91705 100644
--- a/.github/ISSUE_TEMPLATE/bug.md
+++ b/.github/ISSUE_TEMPLATE/bug.md
@@ -8,7 +8,7 @@ labels: bug
 
 <!--
 Note: See the troubleshooting page for help with some common issues:
-https://requests-cache.readthedocs.io/en/latest/user_guide/troubleshooting.html
+https://requests-cache.readthedocs.io/en/stable/user_guide/troubleshooting.html
 -->
 ### The problem
 _A description of what the bug is, including a complete traceback (if applicable)_
@@ -23,6 +23,6 @@ _With a complete code example, if possible_
 _Is there an existing workaround for this issue?_
 
 ### Environment
-- requests-cache version: [e.g. `0.8.1` or `master`]
-- Python version: [e.g. `3.9`]
-- Platform: [e.g. Debian 10]
+- requests-cache version: (for example, `0.9.5` or `main`)
+- Python version: (for example, `3.9`)
+- Platform: (for example, Debian 10)
diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml
index d7a2aaa..b3d917b 100644
--- a/.github/workflows/build.yml
+++ b/.github/workflows/build.yml
@@ -2,15 +2,14 @@ name: Build
 
 on:
   push:
-    branches: [main, v0.9]
+    branches: [main]
     tags: ['v*']
   pull_request:
-    branches: [main, v0.9]
+    branches: [main]
   workflow_dispatch:
 env:
   LATEST_PY_VERSION: '3.11'
-  COVERAGE_ARGS: '--cov --cov-report=term --cov-report=xml'
-  XDIST_ARGS: '--numprocesses=auto --dist=loadfile'
+  PYTEST_VERBOSE: 'true'
 
 jobs:
   # Run tests for each supported python version
@@ -18,7 +17,7 @@ jobs:
     runs-on: ubuntu-latest
     strategy:
       matrix:
-        python-version: ['3.7', '3.8', '3.9', '3.10', '3.11']
+        python-version: ['3.7', '3.8', '3.9', '3.10', '3.11', 'pypy3.9']
       fail-fast: false
     services:
       nginx:
@@ -28,8 +27,8 @@ jobs:
 
     steps:
       # Set up python + poetry
-      - uses: actions/checkout@v2
-      - uses: actions/setup-python@v2
+      - uses: actions/checkout@v3
+      - uses: actions/setup-python@v4
         with:
           python-version: ${{ matrix.python-version }}
       - uses: snok/install-poetry@v1.3
@@ -37,18 +36,18 @@ jobs:
           virtualenvs-in-project: true
 
       # Start integration test databases
-      - uses: supercharge/mongodb-github-action@1.7.0
+      - uses: supercharge/mongodb-github-action@1.9.0
         with:
-          mongodb-version: 4.4
-      - uses: supercharge/redis-github-action@1.4.0
+          mongodb-version: 5.0
+      - uses: supercharge/redis-github-action@1.5.0
         with:
           redis-version: 6
-      - uses: rrainn/dynamodb-action@v2.0.1
+      - uses: rrainn/dynamodb-action@v3.0.0
 
       # Cache packages per python version, and reuse until lockfile changes
       - name: Cache python packages
         id: cache
-        uses: actions/cache@v2
+        uses: actions/cache@v3
         with:
           path: .venv
           key: venv-${{ matrix.python-version }}-latest-${{ hashFiles('poetry.lock') }}
@@ -57,24 +56,31 @@ jobs:
         run: poetry install -v -E all
 
       # Run tests with coverage report
-      - name: Run tests
+      - name: Run unit + integration tests
+        if: ${{ !contains(matrix.python-version, 'pypy') }}
         run: |
           source $VENV
-          pytest -rs -x tests/unit ${{ env.XDIST_ARGS }} ${{ env.COVERAGE_ARGS }}
-          pytest -rs -x tests/integration --cov-append ${{ env.XDIST_ARGS }} ${{ env.COVERAGE_ARGS }}
+          nox -e cov -- xml
+
+      # pypy tests aren't run in parallel, so too slow for integration tests
+      - name: Run unit tests only
+        if: ${{ contains(matrix.python-version, 'pypy') }}
+        run: |
+          source $VENV
+          pytest tests/unit
 
       # Latest python version: send coverage report to codecov
       - name: "Upload coverage report to Codecov"
         if: ${{ matrix.python-version == env.LATEST_PY_VERSION }}
-        uses: codecov/codecov-action@v2
+        uses: codecov/codecov-action@v3
 
   # Run code analysis checks via pre-commit hooks
   analyze:
     runs-on: ubuntu-latest
     steps:
-      - uses: actions/checkout@v2
-      - uses: actions/setup-python@v2
+      - uses: actions/checkout@v3
+      - uses: actions/setup-python@v4
         with:
           python-version: ${{ env.LATEST_PY_VERSION }}
       - name: Run style checks & linting
-        uses: pre-commit/action@v2.0.3
+        uses: pre-commit/action@v3.0.0
diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml
index dd1754a..dec2bef 100644
--- a/.github/workflows/deploy.yml
+++ b/.github/workflows/deploy.yml
@@ -16,20 +16,27 @@ on:
         description: 'Version number for pre-releases; defaults to build number'
         required: false
         default: ''
+      skip-stress:
+        description: 'Set to "true" to skip stress tests'
+        required: false
+        default: 'false'
+      skip-publish:
+        description: 'Set to "true" to skip publishing to PyPI'
+        required: false
+        default: 'false'
 
 env:
   LATEST_PY_VERSION: '3.11'
-  XDIST_ARGS: '--numprocesses=auto --dist=loadfile'
+  PYTEST_VERBOSE: 'true'
+  STRESS_TEST_MULTIPLIER: 7
 
 jobs:
-  # Run tests for all supported requests versions
-  test:
-    runs-on: ubuntu-18.04
-    strategy:
-      matrix:
-        python-version: [3.7]
-        requests-version: [2.22, 2.23, 2.24, 2.25, latest]
-      fail-fast: false
+
+  # Run additional integration stress tests
+  test-stress:
+    if: ${{ github.event.inputs.skip-stress != 'true' }}
+    runs-on: ubuntu-latest
+
     services:
       nginx:
         image: kennethreitz/httpbin
@@ -38,51 +45,122 @@ jobs:
 
     steps:
       # Set up python + poetry
-      - uses: actions/checkout@v2
-      - uses: actions/setup-python@v2
+      - uses: actions/checkout@v3
+      - uses: actions/setup-python@v4
         with:
-          python-version: ${{ matrix.python-version }}
+          python-version: ${{ env.LATEST_PY_VERSION }}
       - uses: snok/install-poetry@v1.3
         with:
           virtualenvs-in-project: true
 
       # Start integration test databases
-      - uses: supercharge/mongodb-github-action@1.7.0
+      - uses: supercharge/mongodb-github-action@1.9.0
         with:
-          mongodb-version: 4.4
-      - uses: supercharge/redis-github-action@1.4.0
+          mongodb-version: 5.0
+      - uses: supercharge/redis-github-action@1.5.0
         with:
           redis-version: 6
-      - uses: rrainn/dynamodb-action@v2.0.1
+      - uses: rrainn/dynamodb-action@v3.0.0
+
+      # Cache packages per python version, and reuse until lockfile changes
+      - name: Cache python packages
+        id: cache
+        uses: actions/cache@v3
+        with:
+          path: .venv
+          key: venv-${{ env.LATEST_PY_VERSION }}-latest-${{ hashFiles('poetry.lock') }}
+      - name: Install dependencies
+        if: steps.cache.outputs.cache-hit != 'true'
+        run: poetry install -v -E all
+
+      # Run tests
+      - name: Run stress tests
+        run: |
+          source $VENV
+          nox -e stress -- ${{ env.STRESS_TEST_MULTIPLIER }}
+
+  # Run unit tests without any optional dependencies installed
+  test-minimum-deps:
+    runs-on: ubuntu-latest
+
+    steps:
+      # Set up python + poetry
+      - uses: actions/checkout@v3
+      - uses: actions/setup-python@v4
+        with:
+          python-version: ${{ env.LATEST_PY_VERSION }}
+      - uses: snok/install-poetry@v1.3
+        with:
+          virtualenvs-in-project: true
 
       # Cache packages per python version, and reuse until lockfile changes
       - name: Cache python packages
         id: cache
-        uses: actions/cache@v2
+        uses: actions/cache@v3
         with:
           path: .venv
-          key: venv-${{ matrix.python-version }}-${{ matrix.requests-version }}-${{ hashFiles('poetry.lock') }}
+          key: venv-${{ matrix.python-version }}-latest-minimum-deps-${{ hashFiles('poetry.lock') }}
+      - name: Install dependencies
+        if: steps.cache.outputs.cache-hit != 'true'
+        run: poetry install -v
+
+      # Run tests
+      - name: Run tests with no optional dependencies
+        run: |
+          source $VENV
+          pytest -n auto tests/unit
+
+  # Run unit tests for all supported platforms, python versions, and requests versions
+  test:
+    strategy:
+      matrix:
+        os: [ubuntu-latest, macos-latest, windows-latest]
+        python-version: ['3.7', '3.8', '3.9', '3.10', '3.11', 'pypy3.9']
+        requests-version: [2.22, 2.23, 2.24, 2.25, 2.26, 2.27, latest]
+        exclude:
+        - os: windows-latest
+          python-version: 'pypy3.9'
+      fail-fast: false
+    defaults:
+      run:
+        shell: bash
+    runs-on: ${{ matrix.os }}
+
+    steps:
+      # Set up python + poetry
+      - uses: actions/checkout@v3
+      - uses: actions/setup-python@v4
+        with:
+          python-version: ${{ matrix.python-version }}
+      - uses: snok/install-poetry@v1.3
+        with:
+          virtualenvs-in-project: true
+
+      # Cache packages per python version, and reuse until lockfile changes
+      - name: Cache python packages
+        id: cache
+        uses: actions/cache@v3
+        with:
+          path: .venv
+          key: venv-${{ matrix.os }}-${{ matrix.python-version }}-${{ matrix.requests-version }}-${{ hashFiles('poetry.lock') }}
       - name: Install dependencies
         if: steps.cache.outputs.cache-hit != 'true'
         run: |
           poetry add requests@${{ matrix.requests-version }} --lock
           poetry install -v -E all
 
-      # Run unit + integration tests, with additional stress tests
+      # Run tests
       - name: Run tests
-        run: |
-          source $VENV
-          pytest -x ${{ env.XDIST_ARGS }} tests/unit
-          pytest -x ${{ env.XDIST_ARGS }} tests/integration -k 'not concurrency'
-          STRESS_TEST_MULTIPLIER=10 pytest tests/integration -k 'concurrency'
+        run: poetry run pytest -n auto tests/unit
 
   # Deploy stable builds on tags only, and pre-release builds from manual trigger ("workflow_dispatch")
   release:
-    needs: [test]
+    if: ${{ github.event.inputs.skip-publish != 'true' }}
+    needs: [test, test-minimum-deps]
     runs-on: ubuntu-latest
     steps:
-      - uses: actions/checkout@v2
-      - uses: actions/setup-python@v2
+      - uses: actions/checkout@v3
+      - uses: actions/setup-python@v4
         with:
           python-version: ${{ env.LATEST_PY_VERSION }}
       - uses: snok/install-poetry@v1.3
@@ -98,7 +176,7 @@ jobs:
           poetry version $(poetry version -s).${{ env.pre-release-suffix }}${{ env.pre-release-version }}
           poetry version
 
-      - name: Build and publish to pypi
+      - name: Build and publish to PyPI
         run: |
           poetry build
           poetry publish -u  __token__ -p ${{ secrets.PYPI_TOKEN }}
diff --git a/.gitignore b/.gitignore
index 8a3d9f0..1ee54e2 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,15 +1,19 @@
-*.db
-*.py[cod]
-*.sqlite
-*.sqlite-journal
-*.egg
-*.egg-info
 build/
 dist/
 downloads/
+example_cache/
 http_cache/
 venv/
+
+*.db
+*.egg
+*.egg-info
+*.py[cod]
+*.sqlite
+*.sqlite-journal
 .venv
+Pipfile
+profile.json
 
 # JS
 node_modules/
@@ -25,8 +29,9 @@ package.json
 # Test / coverage reports
 .coverage
 .coverage.*
-.nox
+.COVERAGE.*
 .mypy_cache/
+.nox/
 test-reports/
 
 # Sphinx
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index e07f7e0..5939154 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -1,32 +1,28 @@
 repos:
   - repo: https://github.com/pre-commit/pre-commit-hooks
-    rev: v4.3.0
+    rev: v4.4.0
     hooks:
       - id: check-toml
       - id: check-yaml
+        args: ['--unsafe']
       - id: end-of-file-fixer
       - id: mixed-line-ending
       - id: trailing-whitespace
   - repo: https://github.com/psf/black
-    rev: 22.10.0
+    rev: 23.1.0
     hooks:
       - id: black
-  - repo: https://github.com/asottile/blacken-docs
-    rev: v1.12.1
-    hooks:
-      - id: blacken-docs
-        args: [--skip-errors, --skip-string-normalization]
   - repo: https://github.com/timothycrosley/isort
-    rev: 5.10.1
+    rev: 5.12.0
     hooks:
       - id: isort
   - repo: https://github.com/pycqa/flake8
-    rev: 3.9.2
+    rev: 6.0.0
     hooks:
       - id: flake8
         additional_dependencies: [flake8-comprehensions]
   - repo: https://github.com/pre-commit/mirrors-mypy
-    rev: v0.982
+    rev: v1.1.1
     hooks:
       - id: mypy
         files: requests_cache
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index 6c0360c..6e4dd3a 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -25,13 +25,6 @@ If you are interested in helping out, here are a few ways to get started:
   completely ruled out either
 * If you find an issue you want to work on, please comment on it so others know it's in progress
 
-## Pre-release Installation
-If you want to test out the latest in-development changes, you can install pre-release versions:
-```bash
-pip install --pre requests-cache
-```
-Pre-release documentation can be found here: https://requests-cache.readthedocs.io/en/latest/
-
 ## Dev Installation
 To set up for local development (requires [poetry](https://python-poetry.org/docs/#installation)):
 
@@ -123,7 +116,7 @@ For backend databases, you can install and run them on the host instead of in a
 as they are running on the default port.
 
 ## Documentation
-[Sphinx](http://www.sphinx-doc.org/en/master/) is used to generate documentation.
+[Sphinx](https://www.sphinx-doc.org/en/master/) is used to generate documentation.
 
 To build the docs locally:
 ```bash
@@ -198,6 +191,7 @@ pip install -U requests-cache
 Notes:
 * See python packaging docs on
 [pre-release versioning](https://packaging.python.org/guides/distributing-packages-using-setuptools/#pre-release-versioning) for more info on how this works
+* requests-cache pre-release docs can be found here: https://requests-cache.readthedocs.io/en/latest/
 * Any collaborator can trigger a pre-release build for requests-cache by going to
   **Actions > Deploy > Run workflow**
 * A complete list of builds can by found on [PyPI under 'Release History'](https://pypi.org/project/requests-cache/#history)
diff --git a/CONTRIBUTORS.md b/CONTRIBUTORS.md
index 5389511..98e280b 100644
--- a/CONTRIBUTORS.md
+++ b/CONTRIBUTORS.md
@@ -8,108 +8,132 @@ contributions that have helped to improve requests-cache:
 <!-- prettier-ignore-start -->
 <!-- markdownlint-disable -->
 <table>
-  <tr>
-    <td align="center"><a href="http://aljohri.com/"><img src="https://avatars.githubusercontent.com/u/2790092?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Al Johri</b></sub></a><br /><a href="#ideas-AlJohri" title="Ideas, Planning, & Feedback">πŸ€”</a></td>
-    <td align="center"><a href="http://grep.ro/"><img src="https://avatars.githubusercontent.com/u/27617?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Alex Morega</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/commits?author=mgax" title="Documentation">πŸ“–</a> <a href="https://github.com/requests-cache/requests-cache/issues?q=author%3Amgax" title="Bug reports">πŸ›</a></td>
-    <td align="center"><a href="https://github.com/meowcoder"><img src="https://avatars.githubusercontent.com/u/287868?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Alex Sinitsin</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/commits?author=meowcoder" title="Code">πŸ’»</a> <a href="https://github.com/requests-cache/requests-cache/issues?q=author%3Ameowcoder" title="Bug reports">πŸ›</a></td>
-    <td align="center"><a href="https://github.com/andrewkittredge"><img src="https://avatars.githubusercontent.com/u/430274?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Andrew Kittredge</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/commits?author=andrewkittredge" title="Documentation">πŸ“–</a></td>
-    <td align="center"><a href="https://github.com/thatguystone"><img src="https://avatars.githubusercontent.com/u/921573?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Andrew Stone</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/issues?q=author%3Athatguystone" title="Bug reports">πŸ›</a> <a href="https://github.com/requests-cache/requests-cache/commits?author=thatguystone" title="Code">πŸ’»</a></td>
-    <td align="center"><a href="https://themiurgo.github.io/website"><img src="https://avatars.githubusercontent.com/u/920728?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Antonio Lima</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/commits?author=themiurgo" title="Code">πŸ’»</a> <a href="#feature-themiurgo" title="New features">✨</a> <a href="#ideas-themiurgo" title="Ideas, Planning, & Feedback">πŸ€”</a></td>
-    <td align="center"><a href="https://github.com/borisdan"><img src="https://avatars.githubusercontent.com/u/5167646?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Boris Danilovich</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/commits?author=borisdan" title="Code">πŸ’»</a> <a href="https://github.com/requests-cache/requests-cache/issues?q=author%3Aborisdan" title="Bug reports">πŸ›</a></td>
-  </tr>
-  <tr>
-    <td align="center"><a href="http://char101.github.io/"><img src="https://avatars.githubusercontent.com/u/71255?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Charles</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/issues?q=author%3Achar101" title="Bug reports">πŸ›</a></td>
-    <td align="center"><a href="https://github.com/CharString"><img src="https://avatars.githubusercontent.com/u/325643?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Chris Wesseling</b></sub></a><br /><a href="#ideas-CharString" title="Ideas, Planning, & Feedback">πŸ€”</a></td>
-    <td align="center"><a href="https://github.com/christopher-dG"><img src="https://avatars.githubusercontent.com/u/17228795?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Chris de Graaf</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/commits?author=christopher-dG" title="Code">πŸ’»</a> <a href="#feature-christopher-dG" title="New features">✨</a></td>
-    <td align="center"><a href="http://twitter.com/daniel_aus_wa"><img src="https://avatars.githubusercontent.com/u/128286?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Daniel Rech</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/commits?author=dmr" title="Code">πŸ’»</a> <a href="https://github.com/requests-cache/requests-cache/commits?author=dmr" title="Documentation">πŸ“–</a></td>
-    <td align="center"><a href="http://davidstosik.github.io/"><img src="https://avatars.githubusercontent.com/u/816901?v=4?s=100" width="100px;" alt=""/><br /><sub><b>David Stosik</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/issues?q=author%3Adavidstosik" title="Bug reports">πŸ›</a></td>
-    <td align="center"><a href="https://github.com/scraperdragon"><img src="https://avatars.githubusercontent.com/u/1957682?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Dragon Dave McKee</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/commits?author=scraperdragon" title="Code">πŸ’»</a></td>
-    <td align="center"><a href="http://edwardbetts.com/"><img src="https://avatars.githubusercontent.com/u/3818?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Edward Betts</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/commits?author=EdwardBetts" title="Code">πŸ’»</a> <a href="https://github.com/requests-cache/requests-cache/commits?author=EdwardBetts" title="Documentation">πŸ“–</a></td>
-  </tr>
-  <tr>
-    <td align="center"><a href="https://github.com/glensc"><img src="https://avatars.githubusercontent.com/u/199095?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Elan RuusamΓ€e</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/issues?q=author%3Aglensc" title="Bug reports">πŸ›</a> <a href="https://github.com/requests-cache/requests-cache/commits?author=glensc" title="Documentation">πŸ“–</a> <a href="#ideas-glensc" title="Ideas, Planning, & Feedback">πŸ€”</a> <a href="https://github.com/requests-cache/requests-cache/commits?author=glensc" title="Code">πŸ’»</a></td>
-    <td align="center"><a href="https://github.com/Querela"><img src="https://avatars.githubusercontent.com/u/1648294?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Erik KΓΆrner</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/issues?q=author%3AQuerela" title="Bug reports">πŸ›</a> <a href="#ideas-Querela" title="Ideas, Planning, & Feedback">πŸ€”</a></td>
-    <td align="center"><a href="https://femtotrader.github.io/"><img src="https://avatars.githubusercontent.com/u/5049737?v=4?s=100" width="100px;" alt=""/><br /><sub><b>FemtoTrader</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/commits?author=femtotrader" title="Code">πŸ’»</a> <a href="https://github.com/requests-cache/requests-cache/issues?q=author%3Afemtotrader" title="Bug reports">πŸ›</a> <a href="#ideas-femtotrader" title="Ideas, Planning, & Feedback">πŸ€”</a> <a href="#feature-femtotrader" title="New features">✨</a></td>
-    <td align="center"><a href="http://www.floriandemmer.com/"><img src="https://avatars.githubusercontent.com/u/630975?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Florian Demmer</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/commits?author=fdemmer" title="Code">πŸ’»</a> <a href="https://github.com/requests-cache/requests-cache/issues?q=author%3Afdemmer" title="Bug reports">πŸ›</a></td>
-    <td align="center"><a href="https://gdr.name/"><img src="https://avatars.githubusercontent.com/u/315648?v=4?s=100" width="100px;" alt=""/><br /><sub><b>GDR!</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/issues?q=author%3Agjedeer" title="Bug reports">πŸ›</a></td>
-    <td align="center"><a href="https://github.com/Garrett-R"><img src="https://avatars.githubusercontent.com/u/6614695?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Garrett Reynolds</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/commits?author=Garrett-R" title="Code">πŸ’»</a></td>
-    <td align="center"><a href="https://github.com/gregdingle"><img src="https://avatars.githubusercontent.com/u/28797?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Greg Dingle</b></sub></a><br /><a href="#ideas-gregdingle" title="Ideas, Planning, & Feedback">πŸ€”</a></td>
-  </tr>
-  <tr>
-    <td align="center"><a href="https://github.com/chengguangnan"><img src="https://avatars.githubusercontent.com/u/861069?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Guangnan Cheng</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/commits?author=chengguangnan" title="Code">πŸ’»</a> <a href="#feature-chengguangnan" title="New features">✨</a></td>
-    <td align="center"><a href="https://honzajavorek.cz/"><img src="https://avatars.githubusercontent.com/u/283441?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Honza Javorek</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/issues?q=author%3Ahonzajavorek" title="Bug reports">πŸ›</a> <a href="#ideas-honzajavorek" title="Ideas, Planning, & Feedback">πŸ€”</a></td>
-    <td align="center"><a href="https://github.com/Iftahh"><img src="https://avatars.githubusercontent.com/u/798544?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Iftah</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/issues?q=author%3AIftahh" title="Bug reports">πŸ›</a> <a href="https://github.com/requests-cache/requests-cache/commits?author=Iftahh" title="Code">πŸ’»</a></td>
-    <td align="center"><a href="https://github.com/libbkmz"><img src="https://avatars.githubusercontent.com/u/1144960?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Ilya</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/commits?author=libbkmz" title="Code">πŸ’»</a></td>
-    <td align="center"><a href="https://rob.gant.ninja/"><img src="https://avatars.githubusercontent.com/u/710553?v=4?s=100" width="100px;" alt=""/><br /><sub><b>J Rob Gant</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/commits?author=rgant" title="Code">πŸ’»</a> <a href="https://github.com/requests-cache/requests-cache/issues?q=author%3Argant" title="Bug reports">πŸ›</a></td>
-    <td align="center"><a href="https://jacebrowning.info/"><img src="https://avatars.githubusercontent.com/u/939501?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Jace Browning</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/commits?author=jacebrowning" title="Code">πŸ’»</a> <a href="https://github.com/requests-cache/requests-cache/commits?author=jacebrowning" title="Documentation">πŸ“–</a></td>
-    <td align="center"><a href="https://github.com/jsemric"><img src="https://avatars.githubusercontent.com/u/22685064?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Jakub Semrič</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/commits?author=jsemric" title="Code">πŸ’»</a> <a href="#ideas-jsemric" title="Ideas, Planning, & Feedback">πŸ€”</a></td>
-  </tr>
-  <tr>
-    <td align="center"><a href="http://jeremydouglass.com/"><img src="https://avatars.githubusercontent.com/u/798570?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Jeremy Douglass</b></sub></a><br /><a href="#ideas-jeremydouglass" title="Ideas, Planning, & Feedback">πŸ€”</a></td>
-    <td align="center"><a href="https://github.com/jkwill87"><img src="https://avatars.githubusercontent.com/u/4343678?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Jessy Williams</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/commits?author=jkwill87" title="Code">πŸ’»</a> <a href="https://github.com/requests-cache/requests-cache/issues?q=author%3Ajkwill87" title="Bug reports">πŸ›</a> <a href="https://github.com/requests-cache/requests-cache/commits?author=jkwill87" title="Tests">⚠️</a></td>
-    <td align="center"><a href="https://www.openhub.net/accounts/jayvdb"><img src="https://avatars.githubusercontent.com/u/15092?v=4?s=100" width="100px;" alt=""/><br /><sub><b>John Vandenberg</b></sub></a><br /><a href="#infra-jayvdb" title="Infrastructure (Hosting, Build-Tools, etc)">πŸš‡</a> <a href="#platform-jayvdb" title="Packaging/porting to new platform">πŸ“¦</a> <a href="https://github.com/requests-cache/requests-cache/commits?author=jayvdb" title="Tests">⚠️</a></td>
-    <td align="center"><a href="https://github.com/johnraz"><img src="https://avatars.githubusercontent.com/u/304164?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Jonathan Liuti</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/issues?q=author%3Ajohnraz" title="Bug reports">πŸ›</a></td>
-    <td align="center"><a href="https://github.com/jonasjancarik"><img src="https://avatars.githubusercontent.com/u/2459191?v=4?s=100" width="100px;" alt=""/><br /><sub><b>JonΓ‘Ε‘ JančaΕ™Γ­k</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/issues?q=author%3Ajonasjancarik" title="Bug reports">πŸ›</a></td>
-    <td align="center"><a href="https://github.com/JWCook"><img src="https://avatars.githubusercontent.com/u/419936?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Jordan Cook</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/commits?author=JWCook" title="Code">πŸ’»</a> <a href="#maintenance-JWCook" title="Maintenance">🚧</a> <a href="#feature-JWCook" title="New features">✨</a> <a href="https://github.com/requests-cache/requests-cache/issues?q=author%3AJWCook" title="Bug reports">πŸ›</a> <a href="https://github.com/requests-cache/requests-cache/commits?author=JWCook" title="Tests">⚠️</a> <a href="https://github.com/requests-cache/requests-cache/commits?author=JWCook" title="Documentation">πŸ“–</a> <a href="#infra-JWCook" title="Infrastructure (Hosting, Build-Tools, etc)">πŸš‡</a></td>
-    <td align="center"><a href="http://jhermann.github.io/"><img src="https://avatars.githubusercontent.com/u/1068245?v=4?s=100" width="100px;" alt=""/><br /><sub><b>JΓΌrgen Hermann</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/issues?q=author%3Ajhermann" title="Bug reports">πŸ›</a> <a href="#ideas-jhermann" title="Ideas, Planning, & Feedback">πŸ€”</a></td>
-  </tr>
-  <tr>
-    <td align="center"><a href="https://github.com/FredHappyface"><img src="https://avatars.githubusercontent.com/u/41634689?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Kieran W</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/commits?author=FredHappyface" title="Documentation">πŸ“–</a> <a href="https://github.com/requests-cache/requests-cache/issues?q=author%3AFredHappyface" title="Bug reports">πŸ›</a></td>
-    <td align="center"><a href="https://github.com/MHellmund"><img src="https://avatars.githubusercontent.com/u/1593619?v=4?s=100" width="100px;" alt=""/><br /><sub><b>MHellmund</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/issues?q=author%3AMHellmund" title="Bug reports">πŸ›</a></td>
-    <td align="center"><a href="https://github.com/meggiman"><img src="https://avatars.githubusercontent.com/u/7403253?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Manuel Eggimann</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/issues?q=author%3Ameggiman" title="Bug reports">πŸ›</a> <a href="https://github.com/requests-cache/requests-cache/commits?author=meggiman" title="Code">πŸ’»</a></td>
-    <td align="center"><a href="http://marc-abramowitz.com/"><img src="https://avatars.githubusercontent.com/u/305268?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Marc Abramowitz</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/commits?author=msabramo" title="Code">πŸ’»</a> <a href="https://github.com/requests-cache/requests-cache/commits?author=msabramo" title="Documentation">πŸ“–</a></td>
-    <td align="center"><a href="https://gedmin.as/"><img src="https://avatars.githubusercontent.com/u/159967?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Marius Gedminas</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/commits?author=mgedmin" title="Code">πŸ’»</a> <a href="https://github.com/requests-cache/requests-cache/issues?q=author%3Amgedmin" title="Bug reports">πŸ›</a></td>
-    <td align="center"><a href="https://lab.ar90n.net/"><img src="https://avatars.githubusercontent.com/u/2285892?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Masahiro Wada</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/commits?author=ar90n" title="Code">πŸ’»</a> <a href="#feature-ar90n" title="New features">✨</a></td>
-    <td align="center"><a href="https://santini.di.unimi.it/"><img src="https://avatars.githubusercontent.com/u/612826?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Massimo Santini</b></sub></a><br /><a href="#ideas-mapio" title="Ideas, Planning, & Feedback">πŸ€”</a></td>
-  </tr>
-  <tr>
-    <td align="center"><a href="http://www.mherman.org/"><img src="https://avatars.githubusercontent.com/u/2018167?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Michael Herman</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/commits?author=mjhea0" title="Code">πŸ’»</a> <a href="https://github.com/requests-cache/requests-cache/commits?author=mjhea0" title="Documentation">πŸ“–</a></td>
-    <td align="center"><a href="https://mgorny.pl/"><img src="https://avatars.githubusercontent.com/u/110765?v=4?s=100" width="100px;" alt=""/><br /><sub><b>MichaΕ‚ GΓ³rny</b></sub></a><br /><a href="#infra-mgorny" title="Infrastructure (Hosting, Build-Tools, etc)">πŸš‡</a></td>
-    <td align="center"><a href="https://github.com/mnowotka"><img src="https://avatars.githubusercontent.com/u/837119?v=4?s=100" width="100px;" alt=""/><br /><sub><b>MichaΕ‚ Nowotka</b></sub></a><br /><a href="#ideas-mnowotka" title="Ideas, Planning, & Feedback">πŸ€”</a></td>
-    <td align="center"><a href="https://beaumont.dev/"><img src="https://avatars.githubusercontent.com/u/2266568?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Mike</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/commits?author=michaelbeaumont" title="Code">πŸ’»</a> <a href="#feature-michaelbeaumont" title="New features">✨</a></td>
-    <td align="center"><a href="https://github.com/n-a-t-e"><img src="https://avatars.githubusercontent.com/u/26209011?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Nate</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/issues?q=author%3An-a-t-e" title="Bug reports">πŸ›</a></td>
-    <td align="center"><a href="https://nathancahill.com/"><img src="https://avatars.githubusercontent.com/u/1383872?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Nathan Cahill</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/issues?q=author%3Anathancahill" title="Bug reports">πŸ›</a></td>
-    <td align="center"><a href="https://gitlab.com/kousu"><img src="https://avatars.githubusercontent.com/u/987487?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Nick</b></sub></a><br /><a href="#ideas-kousu" title="Ideas, Planning, & Feedback">πŸ€”</a></td>
-  </tr>
-  <tr>
-    <td align="center"><a href="https://github.com/olivierdalang"><img src="https://avatars.githubusercontent.com/u/1894106?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Olivier Dalang</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/commits?author=olivierdalang" title="Code">πŸ’»</a></td>
-    <td align="center"><a href="https://github.com/parkerhancock"><img src="https://avatars.githubusercontent.com/u/633163?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Parker Hancock</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/commits?author=parkerhancock" title="Code">πŸ’»</a> <a href="#feature-parkerhancock" title="New features">✨</a> <a href="https://github.com/requests-cache/requests-cache/issues?q=author%3Aparkerhancock" title="Bug reports">πŸ›</a> <a href="https://github.com/requests-cache/requests-cache/commits?author=parkerhancock" title="Tests">⚠️</a> <a href="https://github.com/requests-cache/requests-cache/commits?author=parkerhancock" title="Documentation">πŸ“–</a> <a href="#security-parkerhancock" title="Security">πŸ›‘οΈ</a> <a href="#ideas-parkerhancock" title="Ideas, Planning, & Feedback">πŸ€”</a></td>
-    <td align="center"><a href="https://github.com/pkrefta"><img src="https://avatars.githubusercontent.com/u/565487?v=4?s=100" width="100px;" alt=""/><br /><sub><b>PaweΕ‚ Krefta</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/issues?q=author%3Apkrefta" title="Bug reports">πŸ›</a></td>
-    <td align="center"><a href="https://phil.red/"><img src="https://avatars.githubusercontent.com/u/291575?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Philipp A.</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/issues?q=author%3Aflying-sheep" title="Bug reports">πŸ›</a></td>
-    <td align="center"><a href="https://rasmuse.github.io/"><img src="https://avatars.githubusercontent.com/u/1210973?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Rasmus Einarsson</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/issues?q=author%3Arasmuse" title="Bug reports">πŸ›</a></td>
-    <td align="center"><a href="https://roderic.ca/"><img src="https://avatars.githubusercontent.com/u/6867226?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Roderic Day</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/issues?q=author%3ARodericDay" title="Bug reports">πŸ›</a></td>
-    <td align="center"><a href="https://github.com/reclosedev"><img src="https://avatars.githubusercontent.com/u/660112?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Roman Haritonov</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/commits?author=reclosedev" title="Code">πŸ’»</a> <a href="#maintenance-reclosedev" title="Maintenance">🚧</a> <a href="#feature-reclosedev" title="New features">✨</a> <a href="https://github.com/requests-cache/requests-cache/issues?q=author%3Areclosedev" title="Bug reports">πŸ›</a> <a href="https://github.com/requests-cache/requests-cache/commits?author=reclosedev" title="Tests">⚠️</a> <a href="https://github.com/requests-cache/requests-cache/commits?author=reclosedev" title="Documentation">πŸ“–</a> <a href="#infra-reclosedev" title="Infrastructure (Hosting, Build-Tools, etc)">πŸš‡</a></td>
-  </tr>
-  <tr>
-    <td align="center"><a href="https://www.facebook.com/avasamdev"><img src="https://avatars.githubusercontent.com/u/1350584?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Samuel T.</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/issues?q=author%3AAvasam" title="Bug reports">πŸ›</a> <a href="#ideas-Avasam" title="Ideas, Planning, & Feedback">πŸ€”</a></td>
-    <td align="center"><a href="https://sebastian-hoeffner.de/"><img src="https://avatars.githubusercontent.com/u/1836815?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Sebastian HΓΆffner</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/commits?author=shoeffner" title="Code">πŸ’»</a> <a href="#feature-shoeffner" title="New features">✨</a> <a href="https://github.com/requests-cache/requests-cache/commits?author=shoeffner" title="Tests">⚠️</a> <a href="#ideas-shoeffner" title="Ideas, Planning, & Feedback">πŸ€”</a></td>
-    <td align="center"><a href="https://github.com/grubberr"><img src="https://avatars.githubusercontent.com/u/195743?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Serhii Chvaliuk</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/issues?q=author%3Agrubberr" title="Bug reports">πŸ›</a> <a href="https://github.com/requests-cache/requests-cache/commits?author=grubberr" title="Code">πŸ’»</a></td>
-    <td align="center"><a href="https://sbiewald.de/"><img src="https://avatars.githubusercontent.com/u/5983372?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Simon Biewald</b></sub></a><br /><a href="#security-Varbin" title="Security">πŸ›‘οΈ</a> <a href="#ideas-Varbin" title="Ideas, Planning, & Feedback">πŸ€”</a></td>
-    <td align="center"><a href="https://github.com/jseabold"><img src="https://avatars.githubusercontent.com/u/296164?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Skipper Seabold</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/issues?q=author%3Ajseabold" title="Bug reports">πŸ›</a></td>
-    <td align="center"><a href="http://pathmind.com/"><img src="https://avatars.githubusercontent.com/u/1197406?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Slin Lee</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/commits?author=slinlee" title="Documentation">πŸ“–</a></td>
-    <td align="center"><a href="https://www.stavros.io/"><img src="https://avatars.githubusercontent.com/u/23648?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Stavros Korokithakis</b></sub></a><br /><a href="#infra-skorokithakis" title="Infrastructure (Hosting, Build-Tools, etc)">πŸš‡</a> <a href="#tool-skorokithakis" title="Tools">πŸ”§</a> <a href="https://github.com/requests-cache/requests-cache/commits?author=skorokithakis" title="Documentation">πŸ“–</a></td>
-  </tr>
-  <tr>
-    <td align="center"><a href="https://cheginit.github.io/"><img src="https://avatars.githubusercontent.com/u/13016644?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Taher Chegini</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/issues?q=author%3Acheginit" title="Bug reports">πŸ›</a></td>
-    <td align="center"><a href="https://vladimir.panteleev.md/"><img src="https://avatars.githubusercontent.com/u/160894?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Vladimir Panteleev</b></sub></a><br /><a href="#ideas-CyberShadow" title="Ideas, Planning, & Feedback">πŸ€”</a></td>
-    <td align="center"><a href="https://sansec.io/"><img src="https://avatars.githubusercontent.com/u/1145479?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Willem de Groot</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/commits?author=gwillem" title="Code">πŸ’»</a> <a href="https://github.com/requests-cache/requests-cache/issues?q=author%3Agwillem" title="Bug reports">πŸ›</a></td>
-    <td align="center"><a href="https://github.com/WouterVH"><img src="https://avatars.githubusercontent.com/u/469509?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Wouter Vanden Hove</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/issues?q=author%3AWouterVH" title="Bug reports">πŸ›</a></td>
-    <td align="center"><a href="https://github.com/YetAnotherNerd"><img src="https://avatars.githubusercontent.com/u/320738?v=4?s=100" width="100px;" alt=""/><br /><sub><b>YetAnotherNerd</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/commits?author=YetAnotherNerd" title="Code">πŸ’»</a> <a href="#feature-YetAnotherNerd" title="New features">✨</a> <a href="https://github.com/requests-cache/requests-cache/issues?q=author%3AYetAnotherNerd" title="Bug reports">πŸ›</a></td>
-    <td align="center"><a href="https://github.com/aaron-mf1"><img src="https://avatars.githubusercontent.com/u/65560918?v=4?s=100" width="100px;" alt=""/><br /><sub><b>aaron-mf1</b></sub></a><br /><a href="#ideas-aaron-mf1" title="Ideas, Planning, & Feedback">πŸ€”</a></td>
-    <td align="center"><a href="https://github.com/coryairbhb"><img src="https://avatars.githubusercontent.com/u/50755629?v=4?s=100" width="100px;" alt=""/><br /><sub><b>coryairbhb</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/issues?q=author%3Acoryairbhb" title="Bug reports">πŸ›</a></td>
-  </tr>
-  <tr>
-    <td align="center"><a href="https://github.com/craigls"><img src="https://avatars.githubusercontent.com/u/972350?v=4?s=100" width="100px;" alt=""/><br /><sub><b>craig</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/commits?author=craigls" title="Code">πŸ’»</a> <a href="https://github.com/requests-cache/requests-cache/issues?q=author%3Acraigls" title="Bug reports">πŸ›</a></td>
-    <td align="center"><a href="https://stackoverflow.com/users/86643/denis"><img src="https://avatars.githubusercontent.com/u/1280390?v=4?s=100" width="100px;" alt=""/><br /><sub><b>denis-bz</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/issues?q=author%3Adenis-bz" title="Bug reports">πŸ›</a></td>
-    <td align="center"><a href="https://gir.st/"><img src="https://avatars.githubusercontent.com/u/11820748?v=4?s=100" width="100px;" alt=""/><br /><sub><b>girst</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/issues?q=author%3Agirst" title="Bug reports">πŸ›</a></td>
-    <td align="center"><a href="https://github.com/gorogoroumaru"><img src="https://avatars.githubusercontent.com/u/30716350?v=4?s=100" width="100px;" alt=""/><br /><sub><b>gorogoroumaru</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/commits?author=gorogoroumaru" title="Code">πŸ’»</a></td>
-    <td align="center"><a href="https://github.com/harvey251"><img src="https://avatars.githubusercontent.com/u/33844174?v=4?s=100" width="100px;" alt=""/><br /><sub><b>harvey251</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/issues?q=author%3Aharvey251" title="Bug reports">πŸ›</a></td>
-    <td align="center"><a href="https://github.com/mbarkhau"><img src="https://avatars.githubusercontent.com/u/446561?v=4?s=100" width="100px;" alt=""/><br /><sub><b>mbarkhau</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/commits?author=mbarkhau" title="Code">πŸ’»</a> <a href="https://github.com/requests-cache/requests-cache/commits?author=mbarkhau" title="Tests">⚠️</a> <a href="#infra-mbarkhau" title="Infrastructure (Hosting, Build-Tools, etc)">πŸš‡</a> <a href="https://github.com/requests-cache/requests-cache/issues?q=author%3Ambarkhau" title="Bug reports">πŸ›</a></td>
-    <td align="center"><a href="https://github.com/shiftinv"><img src="https://avatars.githubusercontent.com/u/8530778?v=4?s=100" width="100px;" alt=""/><br /><sub><b>shiftinv</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/commits?author=shiftinv" title="Code">πŸ’»</a> <a href="https://github.com/requests-cache/requests-cache/issues?q=author%3Ashiftinv" title="Bug reports">πŸ›</a></td>
-  </tr>
-  <tr>
-    <td align="center"><a href="https://www.witionstheme.com/"><img src="https://avatars.githubusercontent.com/u/55755139?v=4?s=100" width="100px;" alt=""/><br /><sub><b>witionstheme</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/issues?q=author%3Awitionstheme" title="Bug reports">πŸ›</a></td>
-  </tr>
+  <tbody>
+    <tr>
+      <td align="center" valign="top" width="14.28%"><a href="https://meltano.com/"><img src="https://avatars.githubusercontent.com/u/18150651?v=4?s=100" width="100px;" alt="Aaron ("AJ") Steers"/><br /><sub><b>Aaron ("AJ") Steers</b></sub></a><br /><a href="#ideas-aaronsteers" title="Ideas, Planning, & Feedback">πŸ€”</a></td>
+      <td align="center" valign="top" width="14.28%"><a href="http://aljohri.com/"><img src="https://avatars.githubusercontent.com/u/2790092?v=4?s=100" width="100px;" alt="Al Johri"/><br /><sub><b>Al Johri</b></sub></a><br /><a href="#ideas-AlJohri" title="Ideas, Planning, & Feedback">πŸ€”</a></td>
+      <td align="center" valign="top" width="14.28%"><a href="http://grep.ro/"><img src="https://avatars.githubusercontent.com/u/27617?v=4?s=100" width="100px;" alt="Alex Morega"/><br /><sub><b>Alex Morega</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/commits?author=mgax" title="Documentation">πŸ“–</a> <a href="https://github.com/requests-cache/requests-cache/issues?q=author%3Amgax" title="Bug reports">πŸ›</a></td>
+      <td align="center" valign="top" width="14.28%"><a href="https://github.com/meowcoder"><img src="https://avatars.githubusercontent.com/u/287868?v=4?s=100" width="100px;" alt="Alex Sinitsin"/><br /><sub><b>Alex Sinitsin</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/commits?author=meowcoder" title="Code">πŸ’»</a> <a href="https://github.com/requests-cache/requests-cache/issues?q=author%3Ameowcoder" title="Bug reports">πŸ›</a></td>
+      <td align="center" valign="top" width="14.28%"><a href="https://github.com/andrewkittredge"><img src="https://avatars.githubusercontent.com/u/430274?v=4?s=100" width="100px;" alt="Andrew Kittredge"/><br /><sub><b>Andrew Kittredge</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/commits?author=andrewkittredge" title="Documentation">πŸ“–</a></td>
+      <td align="center" valign="top" width="14.28%"><a href="https://github.com/thatguystone"><img src="https://avatars.githubusercontent.com/u/921573?v=4?s=100" width="100px;" alt="Andrew Stone"/><br /><sub><b>Andrew Stone</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/issues?q=author%3Athatguystone" title="Bug reports">πŸ›</a> <a href="https://github.com/requests-cache/requests-cache/commits?author=thatguystone" title="Code">πŸ’»</a></td>
+      <td align="center" valign="top" width="14.28%"><a href="https://themiurgo.github.io/website"><img src="https://avatars.githubusercontent.com/u/920728?v=4?s=100" width="100px;" alt="Antonio Lima"/><br /><sub><b>Antonio Lima</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/commits?author=themiurgo" title="Code">πŸ’»</a> <a href="#feature-themiurgo" title="New features">✨</a> <a href="#ideas-themiurgo" title="Ideas, Planning, & Feedback">πŸ€”</a></td>
+    </tr>
+    <tr>
+      <td align="center" valign="top" width="14.28%"><a href="https://github.com/borisdan"><img src="https://avatars.githubusercontent.com/u/5167646?v=4?s=100" width="100px;" alt="Boris Danilovich"/><br /><sub><b>Boris Danilovich</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/commits?author=borisdan" title="Code">πŸ’»</a> <a href="https://github.com/requests-cache/requests-cache/issues?q=author%3Aborisdan" title="Bug reports">πŸ›</a></td>
+      <td align="center" valign="top" width="14.28%"><a href="http://char101.github.io/"><img src="https://avatars.githubusercontent.com/u/71255?v=4?s=100" width="100px;" alt="Charles"/><br /><sub><b>Charles</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/issues?q=author%3Achar101" title="Bug reports">πŸ›</a></td>
+      <td align="center" valign="top" width="14.28%"><a href="https://github.com/CharString"><img src="https://avatars.githubusercontent.com/u/325643?v=4?s=100" width="100px;" alt="Chris Wesseling"/><br /><sub><b>Chris Wesseling</b></sub></a><br /><a href="#ideas-CharString" title="Ideas, Planning, & Feedback">πŸ€”</a></td>
+      <td align="center" valign="top" width="14.28%"><a href="https://github.com/christopher-dG"><img src="https://avatars.githubusercontent.com/u/17228795?v=4?s=100" width="100px;" alt="Chris de Graaf"/><br /><sub><b>Chris de Graaf</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/commits?author=christopher-dG" title="Code">πŸ’»</a> <a href="#feature-christopher-dG" title="New features">✨</a></td>
+      <td align="center" valign="top" width="14.28%"><a href="http://lazka.github.io/"><img src="https://avatars.githubusercontent.com/u/991986?v=4?s=100" width="100px;" alt="Christoph Reiter"/><br /><sub><b>Christoph Reiter</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/issues?q=author%3Alazka" title="Bug reports">πŸ›</a> <a href="#ideas-lazka" title="Ideas, Planning, & Feedback">πŸ€”</a></td>
+      <td align="center" valign="top" width="14.28%"><a href="https://monotreme.club/"><img src="https://avatars.githubusercontent.com/u/208018?v=4?s=100" width="100px;" alt="Daniel Holth"/><br /><sub><b>Daniel Holth</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/commits?author=dholth" title="Code">πŸ’»</a></td>
+      <td align="center" valign="top" width="14.28%"><a href="http://twitter.com/daniel_aus_wa"><img src="https://avatars.githubusercontent.com/u/128286?v=4?s=100" width="100px;" alt="Daniel Rech"/><br /><sub><b>Daniel Rech</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/commits?author=dmr" title="Code">πŸ’»</a> <a href="https://github.com/requests-cache/requests-cache/commits?author=dmr" title="Documentation">πŸ“–</a></td>
+    </tr>
+    <tr>
+      <td align="center" valign="top" width="14.28%"><a href="https://github.com/gismaps"><img src="https://avatars.githubusercontent.com/u/65092729?v=4?s=100" width="100px;" alt="David GIS"/><br /><sub><b>David GIS</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/issues?q=author%3Agismaps" title="Bug reports">πŸ›</a></td>
+      <td align="center" valign="top" width="14.28%"><a href="https://github.com/DavidSchmidt00"><img src="https://avatars.githubusercontent.com/u/43894937?v=4?s=100" width="100px;" alt="David Schmidt"/><br /><sub><b>David Schmidt</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/issues?q=author%3ADavidSchmidt00" title="Bug reports">πŸ›</a> <a href="https://github.com/requests-cache/requests-cache/commits?author=DavidSchmidt00" title="Code">πŸ’»</a></td>
+      <td align="center" valign="top" width="14.28%"><a href="http://davidstosik.github.io/"><img src="https://avatars.githubusercontent.com/u/816901?v=4?s=100" width="100px;" alt="David Stosik"/><br /><sub><b>David Stosik</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/issues?q=author%3Adavidstosik" title="Bug reports">πŸ›</a></td>
+      <td align="center" valign="top" width="14.28%"><a href="https://github.com/scraperdragon"><img src="https://avatars.githubusercontent.com/u/1957682?v=4?s=100" width="100px;" alt="Dragon Dave McKee"/><br /><sub><b>Dragon Dave McKee</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/commits?author=scraperdragon" title="Code">πŸ’»</a></td>
+      <td align="center" valign="top" width="14.28%"><a href="http://edwardbetts.com/"><img src="https://avatars.githubusercontent.com/u/3818?v=4?s=100" width="100px;" alt="Edward Betts"/><br /><sub><b>Edward Betts</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/commits?author=EdwardBetts" title="Code">πŸ’»</a> <a href="https://github.com/requests-cache/requests-cache/commits?author=EdwardBetts" title="Documentation">πŸ“–</a></td>
+      <td align="center" valign="top" width="14.28%"><a href="https://github.com/glensc"><img src="https://avatars.githubusercontent.com/u/199095?v=4?s=100" width="100px;" alt="Elan RuusamΓ€e"/><br /><sub><b>Elan RuusamΓ€e</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/issues?q=author%3Aglensc" title="Bug reports">πŸ›</a> <a href="https://github.com/requests-cache/requests-cache/commits?author=glensc" title="Documentation">πŸ“–</a> <a href="#ideas-glensc" title="Ideas, Planning, & Feedback">πŸ€”</a> <a href="https://github.com/requests-cache/requests-cache/commits?author=glensc" title="Code">πŸ’»</a></td>
+      <td align="center" valign="top" width="14.28%"><a href="https://github.com/Querela"><img src="https://avatars.githubusercontent.com/u/1648294?v=4?s=100" width="100px;" alt="Erik KΓΆrner"/><br /><sub><b>Erik KΓΆrner</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/issues?q=author%3AQuerela" title="Bug reports">πŸ›</a> <a href="#ideas-Querela" title="Ideas, Planning, & Feedback">πŸ€”</a></td>
+    </tr>
+    <tr>
+      <td align="center" valign="top" width="14.28%"><a href="https://github.com/dericke"><img src="https://avatars.githubusercontent.com/u/3587185?v=4?s=100" width="100px;" alt="Evan D"/><br /><sub><b>Evan D</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/issues?q=author%3Adericke" title="Bug reports">πŸ›</a></td>
+      <td align="center" valign="top" width="14.28%"><a href="https://femtotrader.github.io/"><img src="https://avatars.githubusercontent.com/u/5049737?v=4?s=100" width="100px;" alt="FemtoTrader"/><br /><sub><b>FemtoTrader</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/commits?author=femtotrader" title="Code">πŸ’»</a> <a href="https://github.com/requests-cache/requests-cache/issues?q=author%3Afemtotrader" title="Bug reports">πŸ›</a> <a href="#ideas-femtotrader" title="Ideas, Planning, & Feedback">πŸ€”</a> <a href="#feature-femtotrader" title="New features">✨</a></td>
+      <td align="center" valign="top" width="14.28%"><a href="http://www.floriandemmer.com/"><img src="https://avatars.githubusercontent.com/u/630975?v=4?s=100" width="100px;" alt="Florian Demmer"/><br /><sub><b>Florian Demmer</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/commits?author=fdemmer" title="Code">πŸ’»</a> <a href="https://github.com/requests-cache/requests-cache/issues?q=author%3Afdemmer" title="Bug reports">πŸ›</a></td>
+      <td align="center" valign="top" width="14.28%"><a href="https://gdr.name/"><img src="https://avatars.githubusercontent.com/u/315648?v=4?s=100" width="100px;" alt="GDR!"/><br /><sub><b>GDR!</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/issues?q=author%3Agjedeer" title="Bug reports">πŸ›</a></td>
+      <td align="center" valign="top" width="14.28%"><a href="https://github.com/Garrett-R"><img src="https://avatars.githubusercontent.com/u/6614695?v=4?s=100" width="100px;" alt="Garrett Reynolds"/><br /><sub><b>Garrett Reynolds</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/commits?author=Garrett-R" title="Code">πŸ’»</a></td>
+      <td align="center" valign="top" width="14.28%"><a href="https://github.com/gregdingle"><img src="https://avatars.githubusercontent.com/u/28797?v=4?s=100" width="100px;" alt="Greg Dingle"/><br /><sub><b>Greg Dingle</b></sub></a><br /><a href="#ideas-gregdingle" title="Ideas, Planning, & Feedback">πŸ€”</a></td>
+      <td align="center" valign="top" width="14.28%"><a href="https://github.com/chengguangnan"><img src="https://avatars.githubusercontent.com/u/861069?v=4?s=100" width="100px;" alt="Guangnan Cheng"/><br /><sub><b>Guangnan Cheng</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/commits?author=chengguangnan" title="Code">πŸ’»</a> <a href="#feature-chengguangnan" title="New features">✨</a></td>
+    </tr>
+    <tr>
+      <td align="center" valign="top" width="14.28%"><a href="https://honzajavorek.cz/"><img src="https://avatars.githubusercontent.com/u/283441?v=4?s=100" width="100px;" alt="Honza Javorek"/><br /><sub><b>Honza Javorek</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/issues?q=author%3Ahonzajavorek" title="Bug reports">πŸ›</a> <a href="#ideas-honzajavorek" title="Ideas, Planning, & Feedback">πŸ€”</a></td>
+      <td align="center" valign="top" width="14.28%"><a href="https://github.com/Iftahh"><img src="https://avatars.githubusercontent.com/u/798544?v=4?s=100" width="100px;" alt="Iftah"/><br /><sub><b>Iftah</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/issues?q=author%3AIftahh" title="Bug reports">πŸ›</a> <a href="https://github.com/requests-cache/requests-cache/commits?author=Iftahh" title="Code">πŸ’»</a></td>
+      <td align="center" valign="top" width="14.28%"><a href="https://github.com/libbkmz"><img src="https://avatars.githubusercontent.com/u/1144960?v=4?s=100" width="100px;" alt="Ilya"/><br /><sub><b>Ilya</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/commits?author=libbkmz" title="Code">πŸ’»</a></td>
+      <td align="center" valign="top" width="14.28%"><a href="https://rob.gant.ninja/"><img src="https://avatars.githubusercontent.com/u/710553?v=4?s=100" width="100px;" alt="J Rob Gant"/><br /><sub><b>J Rob Gant</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/commits?author=rgant" title="Code">πŸ’»</a> <a href="https://github.com/requests-cache/requests-cache/issues?q=author%3Argant" title="Bug reports">πŸ›</a></td>
+      <td align="center" valign="top" width="14.28%"><a href="https://jacebrowning.info/"><img src="https://avatars.githubusercontent.com/u/939501?v=4?s=100" width="100px;" alt="Jace Browning"/><br /><sub><b>Jace Browning</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/commits?author=jacebrowning" title="Code">πŸ’»</a> <a href="https://github.com/requests-cache/requests-cache/commits?author=jacebrowning" title="Documentation">πŸ“–</a></td>
+      <td align="center" valign="top" width="14.28%"><a href="https://github.com/jsemric"><img src="https://avatars.githubusercontent.com/u/22685064?v=4?s=100" width="100px;" alt="Jakub Semrič"/><br /><sub><b>Jakub Semrič</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/commits?author=jsemric" title="Code">πŸ’»</a> <a href="#ideas-jsemric" title="Ideas, Planning, & Feedback">πŸ€”</a></td>
+      <td align="center" valign="top" width="14.28%"><a href="https://www.patentbots.com/"><img src="https://avatars.githubusercontent.com/u/55557751?v=4?s=100" width="100px;" alt="Jeff O'Neill"/><br /><sub><b>Jeff O'Neill</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/issues?q=author%3Apb-jeff-oneill" title="Bug reports">πŸ›</a></td>
+    </tr>
+    <tr>
+      <td align="center" valign="top" width="14.28%"><a href="http://jeremydouglass.com/"><img src="https://avatars.githubusercontent.com/u/798570?v=4?s=100" width="100px;" alt="Jeremy Douglass"/><br /><sub><b>Jeremy Douglass</b></sub></a><br /><a href="#ideas-jeremydouglass" title="Ideas, Planning, & Feedback">πŸ€”</a></td>
+      <td align="center" valign="top" width="14.28%"><a href="https://github.com/jkwill87"><img src="https://avatars.githubusercontent.com/u/4343678?v=4?s=100" width="100px;" alt="Jessy Williams"/><br /><sub><b>Jessy Williams</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/commits?author=jkwill87" title="Code">πŸ’»</a> <a href="https://github.com/requests-cache/requests-cache/issues?q=author%3Ajkwill87" title="Bug reports">πŸ›</a> <a href="https://github.com/requests-cache/requests-cache/commits?author=jkwill87" title="Tests">⚠️</a></td>
+      <td align="center" valign="top" width="14.28%"><a href="https://www.openhub.net/accounts/jayvdb"><img src="https://avatars.githubusercontent.com/u/15092?v=4?s=100" width="100px;" alt="John Vandenberg"/><br /><sub><b>John Vandenberg</b></sub></a><br /><a href="#infra-jayvdb" title="Infrastructure (Hosting, Build-Tools, etc)">πŸš‡</a> <a href="#platform-jayvdb" title="Packaging/porting to new platform">πŸ“¦</a> <a href="https://github.com/requests-cache/requests-cache/commits?author=jayvdb" title="Tests">⚠️</a></td>
+      <td align="center" valign="top" width="14.28%"><a href="https://github.com/johnraz"><img src="https://avatars.githubusercontent.com/u/304164?v=4?s=100" width="100px;" alt="Jonathan Liuti"/><br /><sub><b>Jonathan Liuti</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/issues?q=author%3Ajohnraz" title="Bug reports">πŸ›</a></td>
+      <td align="center" valign="top" width="14.28%"><a href="https://github.com/jonasjancarik"><img src="https://avatars.githubusercontent.com/u/2459191?v=4?s=100" width="100px;" alt="JonΓ‘Ε‘ JančaΕ™Γ­k"/><br /><sub><b>JonΓ‘Ε‘ JančaΕ™Γ­k</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/issues?q=author%3Ajonasjancarik" title="Bug reports">πŸ›</a></td>
+      <td align="center" valign="top" width="14.28%"><a href="https://github.com/JWCook"><img src="https://avatars.githubusercontent.com/u/419936?v=4?s=100" width="100px;" alt="Jordan Cook"/><br /><sub><b>Jordan Cook</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/commits?author=JWCook" title="Code">πŸ’»</a> <a href="#maintenance-JWCook" title="Maintenance">🚧</a> <a href="#feature-JWCook" title="New features">✨</a> <a href="https://github.com/requests-cache/requests-cache/issues?q=author%3AJWCook" title="Bug reports">πŸ›</a> <a href="https://github.com/requests-cache/requests-cache/commits?author=JWCook" title="Tests">⚠️</a> <a href="https://github.com/requests-cache/requests-cache/commits?author=JWCook" title="Documentation">πŸ“–</a> <a href="#infra-JWCook" title="Infrastructure (Hosting, Build-Tools, etc)">πŸš‡</a></td>
+      <td align="center" valign="top" width="14.28%"><a href="http://jhermann.github.io/"><img src="https://avatars.githubusercontent.com/u/1068245?v=4?s=100" width="100px;" alt="JΓΌrgen Hermann"/><br /><sub><b>JΓΌrgen Hermann</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/issues?q=author%3Ajhermann" title="Bug reports">πŸ›</a> <a href="#ideas-jhermann" title="Ideas, Planning, & Feedback">πŸ€”</a></td>
+    </tr>
+    <tr>
+      <td align="center" valign="top" width="14.28%"><a href="https://github.com/FredHappyface"><img src="https://avatars.githubusercontent.com/u/41634689?v=4?s=100" width="100px;" alt="Kieran W"/><br /><sub><b>Kieran W</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/commits?author=FredHappyface" title="Documentation">πŸ“–</a> <a href="https://github.com/requests-cache/requests-cache/issues?q=author%3AFredHappyface" title="Bug reports">πŸ›</a></td>
+      <td align="center" valign="top" width="14.28%"><a href="https://github.com/MHellmund"><img src="https://avatars.githubusercontent.com/u/1593619?v=4?s=100" width="100px;" alt="MHellmund"/><br /><sub><b>MHellmund</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/issues?q=author%3AMHellmund" title="Bug reports">πŸ›</a></td>
+      <td align="center" valign="top" width="14.28%"><a href="https://github.com/meggiman"><img src="https://avatars.githubusercontent.com/u/7403253?v=4?s=100" width="100px;" alt="Manuel Eggimann"/><br /><sub><b>Manuel Eggimann</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/issues?q=author%3Ameggiman" title="Bug reports">πŸ›</a> <a href="https://github.com/requests-cache/requests-cache/commits?author=meggiman" title="Code">πŸ’»</a></td>
+      <td align="center" valign="top" width="14.28%"><a href="http://marc-abramowitz.com/"><img src="https://avatars.githubusercontent.com/u/305268?v=4?s=100" width="100px;" alt="Marc Abramowitz"/><br /><sub><b>Marc Abramowitz</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/commits?author=msabramo" title="Code">πŸ’»</a> <a href="https://github.com/requests-cache/requests-cache/commits?author=msabramo" title="Documentation">πŸ“–</a></td>
+      <td align="center" valign="top" width="14.28%"><a href="https://gedmin.as/"><img src="https://avatars.githubusercontent.com/u/159967?v=4?s=100" width="100px;" alt="Marius Gedminas"/><br /><sub><b>Marius Gedminas</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/commits?author=mgedmin" title="Code">πŸ’»</a> <a href="https://github.com/requests-cache/requests-cache/issues?q=author%3Amgedmin" title="Bug reports">πŸ›</a></td>
+      <td align="center" valign="top" width="14.28%"><a href="http://www.zopatista.com/"><img src="https://avatars.githubusercontent.com/u/46775?v=4?s=100" width="100px;" alt="Martijn Pieters"/><br /><sub><b>Martijn Pieters</b></sub></a><br /><a href="#ideas-mjpieters" title="Ideas, Planning, & Feedback">πŸ€”</a></td>
+      <td align="center" valign="top" width="14.28%"><a href="https://lab.ar90n.net/"><img src="https://avatars.githubusercontent.com/u/2285892?v=4?s=100" width="100px;" alt="Masahiro Wada"/><br /><sub><b>Masahiro Wada</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/commits?author=ar90n" title="Code">πŸ’»</a> <a href="#feature-ar90n" title="New features">✨</a></td>
+    </tr>
+    <tr>
+      <td align="center" valign="top" width="14.28%"><a href="https://santini.di.unimi.it/"><img src="https://avatars.githubusercontent.com/u/612826?v=4?s=100" width="100px;" alt="Massimo Santini"/><br /><sub><b>Massimo Santini</b></sub></a><br /><a href="#ideas-mapio" title="Ideas, Planning, & Feedback">πŸ€”</a></td>
+      <td align="center" valign="top" width="14.28%"><a href="http://www.mherman.org/"><img src="https://avatars.githubusercontent.com/u/2018167?v=4?s=100" width="100px;" alt="Michael Herman"/><br /><sub><b>Michael Herman</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/commits?author=mjhea0" title="Code">πŸ’»</a> <a href="https://github.com/requests-cache/requests-cache/commits?author=mjhea0" title="Documentation">πŸ“–</a></td>
+      <td align="center" valign="top" width="14.28%"><a href="https://mgorny.pl/"><img src="https://avatars.githubusercontent.com/u/110765?v=4?s=100" width="100px;" alt="MichaΕ‚ GΓ³rny"/><br /><sub><b>MichaΕ‚ GΓ³rny</b></sub></a><br /><a href="#infra-mgorny" title="Infrastructure (Hosting, Build-Tools, etc)">πŸš‡</a></td>
+      <td align="center" valign="top" width="14.28%"><a href="https://github.com/mnowotka"><img src="https://avatars.githubusercontent.com/u/837119?v=4?s=100" width="100px;" alt="MichaΕ‚ Nowotka"/><br /><sub><b>MichaΕ‚ Nowotka</b></sub></a><br /><a href="#ideas-mnowotka" title="Ideas, Planning, & Feedback">πŸ€”</a></td>
+      <td align="center" valign="top" width="14.28%"><a href="https://beaumont.dev/"><img src="https://avatars.githubusercontent.com/u/2266568?v=4?s=100" width="100px;" alt="Mike"/><br /><sub><b>Mike</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/commits?author=michaelbeaumont" title="Code">πŸ’»</a> <a href="#feature-michaelbeaumont" title="New features">✨</a></td>
+      <td align="center" valign="top" width="14.28%"><a href="https://github.com/n-a-t-e"><img src="https://avatars.githubusercontent.com/u/26209011?v=4?s=100" width="100px;" alt="Nate"/><br /><sub><b>Nate</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/issues?q=author%3An-a-t-e" title="Bug reports">πŸ›</a></td>
+      <td align="center" valign="top" width="14.28%"><a href="https://nathancahill.com/"><img src="https://avatars.githubusercontent.com/u/1383872?v=4?s=100" width="100px;" alt="Nathan Cahill"/><br /><sub><b>Nathan Cahill</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/issues?q=author%3Anathancahill" title="Bug reports">πŸ›</a></td>
+    </tr>
+    <tr>
+      <td align="center" valign="top" width="14.28%"><a href="https://gitlab.com/kousu"><img src="https://avatars.githubusercontent.com/u/987487?v=4?s=100" width="100px;" alt="Nick"/><br /><sub><b>Nick</b></sub></a><br /><a href="#ideas-kousu" title="Ideas, Planning, & Feedback">πŸ€”</a></td>
+      <td align="center" valign="top" width="14.28%"><a href="https://github.com/olivierdalang"><img src="https://avatars.githubusercontent.com/u/1894106?v=4?s=100" width="100px;" alt="Olivier Dalang"/><br /><sub><b>Olivier Dalang</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/commits?author=olivierdalang" title="Code">πŸ’»</a></td>
+      <td align="center" valign="top" width="14.28%"><a href="https://github.com/carlosal1015"><img src="https://avatars.githubusercontent.com/u/21283014?v=4?s=100" width="100px;" alt="Oromion"/><br /><sub><b>Oromion</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/issues?q=author%3Acarlosal1015" title="Bug reports">πŸ›</a> <a href="#platform-carlosal1015" title="Packaging/porting to new platform">πŸ“¦</a></td>
+      <td align="center" valign="top" width="14.28%"><a href="https://github.com/parkerhancock"><img src="https://avatars.githubusercontent.com/u/633163?v=4?s=100" width="100px;" alt="Parker Hancock"/><br /><sub><b>Parker Hancock</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/commits?author=parkerhancock" title="Code">πŸ’»</a> <a href="#feature-parkerhancock" title="New features">✨</a> <a href="https://github.com/requests-cache/requests-cache/issues?q=author%3Aparkerhancock" title="Bug reports">πŸ›</a> <a href="https://github.com/requests-cache/requests-cache/commits?author=parkerhancock" title="Tests">⚠️</a> <a href="https://github.com/requests-cache/requests-cache/commits?author=parkerhancock" title="Documentation">πŸ“–</a> <a href="#security-parkerhancock" title="Security">πŸ›‘οΈ</a> <a href="#ideas-parkerhancock" title="Ideas, Planning, & Feedback">πŸ€”</a></td>
+      <td align="center" valign="top" width="14.28%"><a href="https://github.com/pfmoore"><img src="https://avatars.githubusercontent.com/u/1110419?v=4?s=100" width="100px;" alt="Paul Moore"/><br /><sub><b>Paul Moore</b></sub></a><br /><a href="#ideas-pfmoore" title="Ideas, Planning, & Feedback">πŸ€”</a></td>
+      <td align="center" valign="top" width="14.28%"><a href="https://github.com/pkrefta"><img src="https://avatars.githubusercontent.com/u/565487?v=4?s=100" width="100px;" alt="PaweΕ‚ Krefta"/><br /><sub><b>PaweΕ‚ Krefta</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/issues?q=author%3Apkrefta" title="Bug reports">πŸ›</a></td>
+      <td align="center" valign="top" width="14.28%"><a href="https://phil.red/"><img src="https://avatars.githubusercontent.com/u/291575?v=4?s=100" width="100px;" alt="Philipp A."/><br /><sub><b>Philipp A.</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/issues?q=author%3Aflying-sheep" title="Bug reports">πŸ›</a></td>
+    </tr>
+    <tr>
+      <td align="center" valign="top" width="14.28%"><a href="https://rasmuse.github.io/"><img src="https://avatars.githubusercontent.com/u/1210973?v=4?s=100" width="100px;" alt="Rasmus Einarsson"/><br /><sub><b>Rasmus Einarsson</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/issues?q=author%3Arasmuse" title="Bug reports">πŸ›</a></td>
+      <td align="center" valign="top" width="14.28%"><a href="https://roderic.ca/"><img src="https://avatars.githubusercontent.com/u/6867226?v=4?s=100" width="100px;" alt="Roderic Day"/><br /><sub><b>Roderic Day</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/issues?q=author%3ARodericDay" title="Bug reports">πŸ›</a></td>
+      <td align="center" valign="top" width="14.28%"><a href="https://github.com/reclosedev"><img src="https://avatars.githubusercontent.com/u/660112?v=4?s=100" width="100px;" alt="Roman Haritonov"/><br /><sub><b>Roman Haritonov</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/commits?author=reclosedev" title="Code">πŸ’»</a> <a href="#maintenance-reclosedev" title="Maintenance">🚧</a> <a href="#feature-reclosedev" title="New features">✨</a> <a href="https://github.com/requests-cache/requests-cache/issues?q=author%3Areclosedev" title="Bug reports">πŸ›</a> <a href="https://github.com/requests-cache/requests-cache/commits?author=reclosedev" title="Tests">⚠️</a> <a href="https://github.com/requests-cache/requests-cache/commits?author=reclosedev" title="Documentation">πŸ“–</a> <a href="#infra-reclosedev" title="Infrastructure (Hosting, Build-Tools, etc)">πŸš‡</a></td>
+      <td align="center" valign="top" width="14.28%"><a href="https://www.facebook.com/avasamdev"><img src="https://avatars.githubusercontent.com/u/1350584?v=4?s=100" width="100px;" alt="Samuel T."/><br /><sub><b>Samuel T.</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/issues?q=author%3AAvasam" title="Bug reports">πŸ›</a> <a href="#ideas-Avasam" title="Ideas, Planning, & Feedback">πŸ€”</a></td>
+      <td align="center" valign="top" width="14.28%"><a href="https://sebastian-hoeffner.de/"><img src="https://avatars.githubusercontent.com/u/1836815?v=4?s=100" width="100px;" alt="Sebastian HΓΆffner"/><br /><sub><b>Sebastian HΓΆffner</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/commits?author=shoeffner" title="Code">πŸ’»</a> <a href="#feature-shoeffner" title="New features">✨</a> <a href="https://github.com/requests-cache/requests-cache/commits?author=shoeffner" title="Tests">⚠️</a> <a href="#ideas-shoeffner" title="Ideas, Planning, & Feedback">πŸ€”</a></td>
+      <td align="center" valign="top" width="14.28%"><a href="https://github.com/grubberr"><img src="https://avatars.githubusercontent.com/u/195743?v=4?s=100" width="100px;" alt="Serhii Chvaliuk"/><br /><sub><b>Serhii Chvaliuk</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/issues?q=author%3Agrubberr" title="Bug reports">πŸ›</a> <a href="https://github.com/requests-cache/requests-cache/commits?author=grubberr" title="Code">πŸ’»</a></td>
+      <td align="center" valign="top" width="14.28%"><a href="https://sbiewald.de/"><img src="https://avatars.githubusercontent.com/u/5983372?v=4?s=100" width="100px;" alt="Simon Biewald"/><br /><sub><b>Simon Biewald</b></sub></a><br /><a href="#security-Varbin" title="Security">πŸ›‘οΈ</a> <a href="#ideas-Varbin" title="Ideas, Planning, & Feedback">πŸ€”</a></td>
+    </tr>
+    <tr>
+      <td align="center" valign="top" width="14.28%"><a href="https://github.com/sleiner"><img src="https://avatars.githubusercontent.com/u/6379313?v=4?s=100" width="100px;" alt="Simon Leiner"/><br /><sub><b>Simon Leiner</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/commits?author=sleiner" title="Code">πŸ’»</a> <a href="#feature-sleiner" title="New features">✨</a></td>
+      <td align="center" valign="top" width="14.28%"><a href="https://github.com/jseabold"><img src="https://avatars.githubusercontent.com/u/296164?v=4?s=100" width="100px;" alt="Skipper Seabold"/><br /><sub><b>Skipper Seabold</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/issues?q=author%3Ajseabold" title="Bug reports">πŸ›</a></td>
+      <td align="center" valign="top" width="14.28%"><a href="http://pathmind.com/"><img src="https://avatars.githubusercontent.com/u/1197406?v=4?s=100" width="100px;" alt="Slin Lee"/><br /><sub><b>Slin Lee</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/commits?author=slinlee" title="Documentation">πŸ“–</a></td>
+      <td align="center" valign="top" width="14.28%"><a href="https://www.stavros.io/"><img src="https://avatars.githubusercontent.com/u/23648?v=4?s=100" width="100px;" alt="Stavros Korokithakis"/><br /><sub><b>Stavros Korokithakis</b></sub></a><br /><a href="#infra-skorokithakis" title="Infrastructure (Hosting, Build-Tools, etc)">πŸš‡</a> <a href="#tool-skorokithakis" title="Tools">πŸ”§</a> <a href="https://github.com/requests-cache/requests-cache/commits?author=skorokithakis" title="Documentation">πŸ“–</a></td>
+      <td align="center" valign="top" width="14.28%"><a href="https://cheginit.github.io/"><img src="https://avatars.githubusercontent.com/u/13016644?v=4?s=100" width="100px;" alt="Taher Chegini"/><br /><sub><b>Taher Chegini</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/issues?q=author%3Acheginit" title="Bug reports">πŸ›</a></td>
+      <td align="center" valign="top" width="14.28%"><a href="https://github.com/TheTechromancer"><img src="https://avatars.githubusercontent.com/u/20261699?v=4?s=100" width="100px;" alt="TheTechromancer"/><br /><sub><b>TheTechromancer</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/issues?q=author%3ATheTechromancer" title="Bug reports">πŸ›</a></td>
+      <td align="center" valign="top" width="14.28%"><a href="https://github.com/ValueRaider"><img src="https://avatars.githubusercontent.com/u/96923577?v=4?s=100" width="100px;" alt="ValueRaider"/><br /><sub><b>ValueRaider</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/commits?author=ValueRaider" title="Documentation">πŸ“–</a></td>
+    </tr>
+    <tr>
+      <td align="center" valign="top" width="14.28%"><a href="https://vladimir.panteleev.md/"><img src="https://avatars.githubusercontent.com/u/160894?v=4?s=100" width="100px;" alt="Vladimir Panteleev"/><br /><sub><b>Vladimir Panteleev</b></sub></a><br /><a href="#ideas-CyberShadow" title="Ideas, Planning, & Feedback">πŸ€”</a></td>
+      <td align="center" valign="top" width="14.28%"><a href="https://sansec.io/"><img src="https://avatars.githubusercontent.com/u/1145479?v=4?s=100" width="100px;" alt="Willem de Groot"/><br /><sub><b>Willem de Groot</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/commits?author=gwillem" title="Code">πŸ’»</a> <a href="https://github.com/requests-cache/requests-cache/issues?q=author%3Agwillem" title="Bug reports">πŸ›</a></td>
+      <td align="center" valign="top" width="14.28%"><a href="https://github.com/WouterVH"><img src="https://avatars.githubusercontent.com/u/469509?v=4?s=100" width="100px;" alt="Wouter Vanden Hove"/><br /><sub><b>Wouter Vanden Hove</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/issues?q=author%3AWouterVH" title="Bug reports">πŸ›</a></td>
+      <td align="center" valign="top" width="14.28%"><a href="https://github.com/YetAnotherNerd"><img src="https://avatars.githubusercontent.com/u/320738?v=4?s=100" width="100px;" alt="YetAnotherNerd"/><br /><sub><b>YetAnotherNerd</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/commits?author=YetAnotherNerd" title="Code">πŸ’»</a> <a href="#feature-YetAnotherNerd" title="New features">✨</a> <a href="https://github.com/requests-cache/requests-cache/issues?q=author%3AYetAnotherNerd" title="Bug reports">πŸ›</a></td>
+      <td align="center" valign="top" width="14.28%"><a href="https://github.com/aaron-mf1"><img src="https://avatars.githubusercontent.com/u/65560918?v=4?s=100" width="100px;" alt="aaron-mf1"/><br /><sub><b>aaron-mf1</b></sub></a><br /><a href="#ideas-aaron-mf1" title="Ideas, Planning, & Feedback">πŸ€”</a></td>
+      <td align="center" valign="top" width="14.28%"><a href="https://github.com/coryairbhb"><img src="https://avatars.githubusercontent.com/u/50755629?v=4?s=100" width="100px;" alt="coryairbhb"/><br /><sub><b>coryairbhb</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/issues?q=author%3Acoryairbhb" title="Bug reports">πŸ›</a></td>
+      <td align="center" valign="top" width="14.28%"><a href="https://github.com/craigls"><img src="https://avatars.githubusercontent.com/u/972350?v=4?s=100" width="100px;" alt="craig"/><br /><sub><b>craig</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/commits?author=craigls" title="Code">πŸ’»</a> <a href="https://github.com/requests-cache/requests-cache/issues?q=author%3Acraigls" title="Bug reports">πŸ›</a></td>
+    </tr>
+    <tr>
+      <td align="center" valign="top" width="14.28%"><a href="https://stackoverflow.com/users/86643/denis"><img src="https://avatars.githubusercontent.com/u/1280390?v=4?s=100" width="100px;" alt="denis-bz"/><br /><sub><b>denis-bz</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/issues?q=author%3Adenis-bz" title="Bug reports">πŸ›</a></td>
+      <td align="center" valign="top" width="14.28%"><a href="https://github.com/eserdk"><img src="https://avatars.githubusercontent.com/u/16106844?v=4?s=100" width="100px;" alt="eserdk"/><br /><sub><b>eserdk</b></sub></a><br /><a href="#ideas-eserdk" title="Ideas, Planning, & Feedback">πŸ€”</a></td>
+      <td align="center" valign="top" width="14.28%"><a href="https://gir.st/"><img src="https://avatars.githubusercontent.com/u/11820748?v=4?s=100" width="100px;" alt="girst"/><br /><sub><b>girst</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/issues?q=author%3Agirst" title="Bug reports">πŸ›</a></td>
+      <td align="center" valign="top" width="14.28%"><a href="https://github.com/gorogoroumaru"><img src="https://avatars.githubusercontent.com/u/30716350?v=4?s=100" width="100px;" alt="gorogoroumaru"/><br /><sub><b>gorogoroumaru</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/commits?author=gorogoroumaru" title="Code">πŸ’»</a></td>
+      <td align="center" valign="top" width="14.28%"><a href="https://github.com/gsalvatella"><img src="https://avatars.githubusercontent.com/u/42438361?v=4?s=100" width="100px;" alt="gsalvatella "/><br /><sub><b>gsalvatella </b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/issues?q=author%3Agsalvatella" title="Bug reports">πŸ›</a></td>
+      <td align="center" valign="top" width="14.28%"><a href="https://github.com/harvey251"><img src="https://avatars.githubusercontent.com/u/33844174?v=4?s=100" width="100px;" alt="harvey251"/><br /><sub><b>harvey251</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/issues?q=author%3Aharvey251" title="Bug reports">πŸ›</a></td>
+      <td align="center" valign="top" width="14.28%"><a href="https://github.com/masavini"><img src="https://avatars.githubusercontent.com/u/6315187?v=4?s=100" width="100px;" alt="masavini"/><br /><sub><b>masavini</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/commits?author=masavini" title="Documentation">πŸ“–</a></td>
+    </tr>
+    <tr>
+      <td align="center" valign="top" width="14.28%"><a href="https://github.com/mbarkhau"><img src="https://avatars.githubusercontent.com/u/446561?v=4?s=100" width="100px;" alt="mbarkhau"/><br /><sub><b>mbarkhau</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/commits?author=mbarkhau" title="Code">πŸ’»</a> <a href="https://github.com/requests-cache/requests-cache/commits?author=mbarkhau" title="Tests">⚠️</a> <a href="#infra-mbarkhau" title="Infrastructure (Hosting, Build-Tools, etc)">πŸš‡</a> <a href="https://github.com/requests-cache/requests-cache/issues?q=author%3Ambarkhau" title="Bug reports">πŸ›</a></td>
+      <td align="center" valign="top" width="14.28%"><a href="https://github.com/shiftinv"><img src="https://avatars.githubusercontent.com/u/8530778?v=4?s=100" width="100px;" alt="shiftinv"/><br /><sub><b>shiftinv</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/commits?author=shiftinv" title="Code">πŸ’»</a> <a href="https://github.com/requests-cache/requests-cache/issues?q=author%3Ashiftinv" title="Bug reports">πŸ›</a></td>
+      <td align="center" valign="top" width="14.28%"><a href="https://www.witionstheme.com/"><img src="https://avatars.githubusercontent.com/u/55755139?v=4?s=100" width="100px;" alt="witionstheme"/><br /><sub><b>witionstheme</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/issues?q=author%3Awitionstheme" title="Bug reports">πŸ›</a></td>
+      <td align="center" valign="top" width="14.28%"><a href="https://github.com/hlYassine"><img src="https://avatars.githubusercontent.com/u/3386466?v=4?s=100" width="100px;" alt="yassine"/><br /><sub><b>yassine</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/issues?q=author%3AhlYassine" title="Bug reports">πŸ›</a></td>
+      <td align="center" valign="top" width="14.28%"><a href="https://www.zhihu.com/people/tan-jiu-ding"><img src="https://avatars.githubusercontent.com/u/24759802?v=4?s=100" width="100px;" alt="谭九鼎"/><br /><sub><b>谭九鼎</b></sub></a><br /><a href="https://github.com/requests-cache/requests-cache/commits?author=imba-tjd" title="Documentation">πŸ“–</a></td>
+    </tr>
+  </tbody>
 </table>
 
 <!-- markdownlint-restore -->
diff --git a/HISTORY.md b/HISTORY.md
index 27ff402..1b4ce20 100644
--- a/HISTORY.md
+++ b/HISTORY.md
@@ -1,6 +1,164 @@
 # History
 
-## 0.9.8 (2023-01-13)
+## Unreleased
+* Add support for regular expressions when using `urls_expire_after`
+
+## 1.0.1 (2023-03-24)
+* Ignore `Cache-Control: must-revalidate` and `no-cache` response headers with `cache_control=False`
+
+## 1.0.0 (2023-03-01)
+[See all unreleased issues and PRs](https://github.com/requests-cache/requests-cache/milestone/10?closed=1)
+
+πŸ•— **Expiration & headers:**
+* Add support for `Cache-Control: min-fresh`
+* Add support for `Cache-Control: max-stale`
+* Add support for `Cache-Control: only-if-cached`
+* Add support for `Cache-Control: stale-if-error`
+* Add support for `Cache-Control: stale-while-error`
+* Add support for `Vary`
+* Revalidate for `Cache-Control: no-cache` request or response header
+* Revalidate for `Cache-Control: max-age=0, must-revalidate` response headers
+* Add an attribute `CachedResponse.revalidated` to indicate if a cached response was revalidated for
+  the current request
+
+βš™οΈ **Session settings:**
+* All settings that affect cache behavior can now be accessed and modified via `CachedSession.settings`
+* Add `always_revalidate` session setting to always revalidate before using a cached response (if a validator is available).
+* Add `only_if_cached` session setting to return only cached results without sending real requests
+* Add `stale_while_revalidate` session setting to return a stale response initially, while a non-blocking request is sent to refresh the response
+* Make behavior for `stale_if_error` partially consistent with `Cache-Control: stale-if-error`: Add support for time values (int, timedelta, etc.) in addition to `True/False`
+
+βš™οΈ **Request settings:**
+* Add `only_if_cached` option to `CachedSession.request()` and `send()` to return only cached results without sending real requests
+* Add `refresh` option to `CachedSession.request()` and `send()` to revalidate with the server before using a cached response
+* Add `force_refresh` option to `CachedSession.request()` and `send()` to awlays make and cache a new request regardless of existing cache contents
+* Make behavior for `expire_after=0` consistent with `Cache-Control: max-age=0`: if the response has a validator, save it to the cache but revalidate on use.
+  * The constant `requests_cache.DO_NOT_CACHE` may be used to completely disable caching for a request
+
+πŸ’Ύ **Backends:**
+* **DynamoDB**:
+  * For better read performance and usage of read throughput:
+    * The cache key is now used as the partition key
+    * Redirects are now cached only in-memory and not persisted
+    * Cache size (`len()`) now uses a fast table estimate instead of a full scan
+  * Store responses in plain (human-readable) document format instead of fully serialized binary
+  * Create default table in on-demand mode instead of provisioned
+  * Add optional integration with DynamoDB TTL to improve performance for removing expired responses
+    * This is enabled by default, but may be disabled
+  * Decode JSON and text response bodies so the saved response can be fully human-readable/editable.
+    May be disabled with `decode_content=False`.
+* **Filesystem**:
+  * The default file format has been changed from pickle to JSON
+  * Decode JSON and text response bodies so the saved response can be fully human-readable/editable.
+    May be disabled with `decode_content=False`.
+* **MongoDB**:
+  * Store responses in plain (human-readable) document format instead of fully serialized binary
+  * Add optional integration with MongoDB TTL to improve performance for removing expired responses
+    * Disabled by default. See 'Backends: MongoDB' docs for details.
+  * Decode JSON and text response bodies so the saved response can be fully human-readable/editable.
+    May be disabled with `decode_content=False`.
+* **Redis**:
+  * Add `ttl_offset` argument to add a delay between cache expiration and deletion
+* **SQLite**:
+  * Improve performance for removing expired responses with `delete()`
+  * Improve performance (slightly) with a large number of threads and high request rate
+  * Add `count()` method to count responses, with option to exclude expired responses (performs a fast indexed count instead of slower in-memory filtering)
+  * Add `size()` method to get estimated size of the database (including in-memory databases)
+  * Add `sorted()` method with sorting and other query options
+  * Add `wal` parameter to enable write-ahead logging
+* **SQLite, Redis, MongoDB, and GridFS**:
+    * Close open database connections when `CachedSession` is used as a contextmanager, or if `CachedSession.close()` is called
+
+↔️ **Request matching:**
+* Add serializer name to cache keys to avoid errors due to switching serializers
+* Always skip both cache read and write for requests excluded by `allowable_methods` (previously only skipped write)
+* Ignore and redact common authentication headers and request parameters by default. This provides
+  some default recommended values for `ignored_parameters`, to avoid accidentally storing common
+  credentials in the cache. This will have no effect if `ignored_parameters` is already set.
+* Support distinct matching for requests that differ only by a parameter in `ignored_parameters`
+  (e.g., for a request sent both with and without authentication)
+* Support distinct matching for requests that differ only by duplicate request params (e.g, `a=1` vs `?a=1&a=2`)
+
+ℹ️ **Convenience methods:**
+* Add `expired` and `invalid` arguments to `BaseCache.delete()` (to replace `remove_expired_responses()`)
+* Add `urls` and `requests` arguments to `BaseCache.delete()` (to replace `delete_url()`)
+* Add `older_than` argument to `BaseCache.delete()` to delete responses older than a given value
+* Add `requests` argument to `BaseCache.delete()` to delete responses matching the given requests
+* Add `BaseCache.contains()` method to check for cached requests either by key or by `requests.Request` object
+* Add `url` argument to `BaseCache.contains()` method (to replace `has_url()`)
+* Add `BaseCache.filter()` method to get responses from the cache with various filters
+* Add `BaseCache.reset_expiration()` method to reset expiration for existing responses
+* Add `BaseCache.recreate_keys()` method to recreate cache keys for all previously cached responses
+  (e.g., to preserve cache data after an update that changes request matching behavior)
+* Update `BaseCache.urls` into a method that takes optional filter params, and returns sorted unique URLs
+
+ℹ️ **Response attributes and type hints:**
+* Add `OriginalResponse` type, which adds type hints to `requests.Response` objects for extra attributes added by requests-cache:
+  * `cache_key`
+  * `created_at`
+  * `expires`
+  * `from_cache`
+  * `is_expired`
+  * `revalidated`
+* `OriginalResponse.cache_key` and `expires` will be populated for any new response that was written to the cache
+* Add request wrapper methods with return type hints for all HTTP methods (`CachedSession.get()`, `head()`, etc.)
+* Set `CachedResponse.cache_key` attribute for responses read from lower-level storage methods
+  (`items()`, `values()`, etc.)
+
+🧩 **Compatibility fixes:**
+* **PyInstaller:** Fix potential `AttributeError` due to undetected imports when requests-cache is bundled in a PyInstaller package
+* **requests-oauthlib:** Add support for header values as bytes for compatibility with OAuth1 features
+* **redis-py:** Fix forwarding connection parameters passed to `RedisCache` for redis-py 4.2 and python <=3.8
+* **pymongo:** Fix forwarding connection parameters passed to `MongoCache` for pymongo 4.1 and python <=3.8
+* **cattrs:** Add compatibility with cattrs 22.2
+* **python:**
+  * Add tests and support for python 3.11
+  * Add tests and support for pypy 3.9
+
+πŸͺ² **Bugfixes:**
+* Fix usage of memory backend with `install_cache()`
+* Fix an uncommon `OperationalError: database is locked` in SQLite backend
+* Fix issue on Windows with occasional missing `CachedResponse.created_at` timestamp
+* Add `CachedRequest.path_url` property for compatibility with `RequestEncodingMixin`
+* Fix potential `AttributeError` due to undetected imports when requests-cache is bundled in a PyInstaller package
+* Fix `AttributeError` when attempting to unpickle a `CachedSession` object, and instead disable pickling by raising a `NotImplementedError`
+* Raise an error for invalid expiration string values (except for headers containing httpdates)
+  * Previously, this would be quietly ignored, and the response would be cached indefinitely
+* Fix behavior for `stale_if_error` if an error response code is added to `allowable_codes`
+
+πŸ“¦ **Dependencies:**
+* Replace `appdirs` with `platformdirs`
+
+⚠️ **Deprecations:**
+
+The following methods are deprecated, and will be removed in a future release. The recommended
+replacements are listed below. If this causes problems for you, please open an issue to discuss.
+* `CachedSession.remove_expired_responses()`: `BaseCache.delete(expired=True)`
+* `BaseCache.remove_expired_responses()`: `BaseCache.delete(expired=True)`
+* `BaseCache.delete_url()`: `BaseCache.delete(urls=[...])`
+* `BaseCache.delete_urls()`: `BaseCache.delete(urls=[...])`
+* `BaseCache.has_key()`: `BaseCache.contains()`
+* `BaseCache.has_url()`: `BaseCache.contains(url=...)`
+* `BaseCache.keys()`: `BaseCache.responses.keys()` (for all keys), or `BaseCache.filter()` (for filtering options)
+* `BaseCache.values()`: `BaseCache.responses.values()` (for all values), or `BaseCache.filter()` (for filtering options)
+* `BaseCache.response_count()`: `len(BaseCache.responses)` (for all responses), or `BaseCache.filter()` (for filtering options)
+
+⚠️ **Breaking changes:**
+
+* After initialization, cache settings can only be accesed and modified via `CachedSession.settings`. Previously, some settings could be modified by setting them on either `CachedSession` or `BaseCache`. In some cases this could silently fail or otherwise have undefined behavior.
+* `BaseCache.urls` has been replaced with a method that returns a list of URLs.
+* DynamoDB table structure has changed. If you are using the DynamoDB backend, you will need to create a new table when upgrading to 1.0. See [DynamoDB backend docs](https://requests-cache.readthedocs.io/en/stable/user_guide/backends/dynamodb.html#dynamodb) for more details.
+
+**Minor breaking changes:**
+
+The following changes only affect advanced or undocumented usage, and are not expected to impact most users:
+* The arguments `match_headers` and `ignored_parameters` must be passed to `CachedSession`. Previously, these could also be passed to a `BaseCache` instance.
+* The `CachedSession` `backend` argument must be either an instance or string alias. Previously it would also accept a backend class.
+* All serializer-specific `BaseStorage` subclasses have been removed, and merged into their respective parent classes. This includes `SQLitePickleDict`, `MongoPickleDict`, and `GridFSPickleDict`.
+  * All `BaseStorage` subclasses now have a `serializer` attribute, which will be unused if set to `None`.
+* The `cache_control` module (added in `0.7`) has been split up into multiple modules in a new `policy` subpackage
+
+### 0.9.8 (2023-01-13)
 * Fix `DeprecationWarning` raised by `BaseCache.urls`
 * Reword ambiguous log message for `BaseCache.delete`
 
@@ -8,7 +166,7 @@ Backport fixes from 1.0:
 * For custom serializers, handle using a cattrs converter that doesn't support `omit_if_default`
 * Raise an error for invalid expiration string values (except for headers containing httpdates)
 
-## 0.9.7 (2022-10-26)
+### 0.9.7 (2022-10-26)
 Backport compatibility fixes from 1.0:
 * **PyInstaller:** Fix potential `AttributeError` due to undetected imports when requests-cache is bundled in a PyInstaller package
 * **requests-oauthlib:** Add support for header values as bytes for compatibility with OAuth1 features
@@ -24,24 +182,24 @@ Add the following for forwards-compatibility with 1.0:
 * `BaseCache.filter()`
 * `CachedSession.settings`
 
-## 0.9.6 (2022-08-24)
+### 0.9.6 (2022-08-24)
 Backport fixes from 1.0:
 * Remove potentially problematic row count from `BaseCache.__str__()`
 * Remove upper version constraints for all non-dev dependencies
 * Make dependency specification consistent between PyPI and Conda-Forge packages
 
-## 0.9.5 (2022-06-29)
+### 0.9.5 (2022-06-29)
 Backport fixes from 1.0:
 * Fix usage of memory backend with `install_cache()`
 * Add `CachedRequest.path_url` property
 * Add compatibility with cattrs 22.1
 
-## 0.9.4 (2022-04-22)
+### 0.9.4 (2022-04-22)
 Backport fixes from 1.0:
 * Fix forwarding connection parameters passed to `RedisCache` for redis-py 4.2 and python <=3.8
 * Fix forwarding connection parameters passed to `MongoCache` for pymongo 4.1 and python <=3.8
 
-## 0.9.3 (2022-02-22)
+### 0.9.3 (2022-02-22)
 * Fix handling BSON serializer differences between pymongo's `bson` and standalone `bson` codec.
 * Handle `CorruptGridFile` error in GridFS backend
 * Fix cache path expansion for user directories (`~/...`) for SQLite and filesystem backends
@@ -53,13 +211,13 @@ Backport fixes from 1.0:
     before it is read by a different thread
   * Fix multiple race conditions in GridFS backend
 
-## 0.9.2 (2022-02-15)
+### 0.9.2 (2022-02-15)
 * Fix serialization in filesystem backend with binary content that is also valid UTF-8
 * Fix some regression bugs introduced in 0.9.0:
   * Add support for `params` as a positional argument to `CachedSession.request()`
   * Add support for disabling expiration for a single request with `CachedSession.request(..., expire_after=-1)`
 
-## 0.9.1 (2022-01-15)
+### 0.9.1 (2022-01-15)
 * Add support for python 3.10.2 and 3.9.10 (regarding resolving `ForwardRef` types during deserialization)
 * Add support for key-only request parameters (regarding hashing request data for cache key creation)
 * Reduce verbosity of log messages when encountering an invalid JSON request body
@@ -67,26 +225,29 @@ Backport fixes from 1.0:
 ## 0.9.0 (2022-01-01)
 [See all issues and PRs for 0.9](https://github.com/requests-cache/requests-cache/milestone/4?closed=1)
 
-**Expiration & Headers:**
+πŸ•— **Expiration & headers:**
 * Use `Cache-Control` **request** headers by default
 * Add support for `Cache-Control: immutable`
 * Add support for immediate expiration + revalidation with `Cache-Control: max-age=0` and `Expires: 0`
 * Reset expiration for cached response when a `304 Not Modified` response is received
-
-**Backends:**
-* Filesystem and SQLite backends: Add better error message if parent path exists but isn't a directory
-* Redis: Improve performance by using native Redis TTL for cache expiration
-
-**Other features:**
 * Support `expire_after` param for `CachedSession.send()`
 
-**Performance:**
+πŸ’Ύ **Backends:**
+* **Filesystem:**
+  * Add better error message if parent path exists but isn't a directory
+* **Redis:**
+  * Add optional integration with Redis TTL to improve performance for removing expired responses
+  * This is enabled by default, but may be disabled
+* **SQLite:**
+  * Add better error message if parent path exists but isn't a directory
+
+πŸš€ **Performance:**
 * Fix duplicate read operation for checking whether to read from redirects cache
 * Skip unnecessary contains check if a key is in the main responses cache
 * Make per-request expiration thread-safe for both `CachedSession.request()` and `CachedSession.send()`
 * Some micro-optimizations for request matching
 
-**Bugfixes:**
+πŸͺ² **Bugfixes:**
 * Fix regression bug causing headers used for cache key to not guarantee sort order
 * Handle some additional corner cases when normalizing request data
 * Add support for `BaseCache` keyword arguments passed along with a backend instance
@@ -94,7 +255,7 @@ Backport fixes from 1.0:
 * Fix license metadata as shown on PyPI
 * Fix `CachedResponse` serialization behavior when using stdlib `pickle` in a custom serializer
 
-## 0.8.1 (2021-09-15)
+### 0.8.1 (2021-09-15)
 * Redact `ingored_parameters` from `CachedResponse.url` (if used for credentials or other sensitive info)
 * Fix an incorrect debug log message about skipping cache write
 * Add some additional aliases for `DbDict`, etc. so fully qualified imports don't break
@@ -102,39 +263,47 @@ Backport fixes from 1.0:
 ## 0.8.0 (2021-09-07)
 [See all issues and PRs for 0.8](https://github.com/requests-cache/requests-cache/milestone/3?closed=1)
 
-**Expiration & Headers:**
+πŸ•— **Expiration & headers:**
 * Add support for conditional requests and cache validation using:
     * `ETag` + `If-None-Match` headers
     * `Last-Modified` + `If-Modified-Since` headers
     * `304 Not Modified` responses
 * If a cached response is expired but contains a validator, a conditional request will by sent, and a new response will be cached and returned only if the remote content has not changed
 
-**Backends:**
-* Filesystem:
+πŸ’Ύ **Backends:**
+* **Filesystem:**
     * Add `FileCache.cache_dir` wrapper property
     * Add `FileCache.paths()` method
     * Add `use_cache_dir` option to use platform-specific user cache directory
     * Return `pathlib.Path` objects for all file paths
     * Use shorter hashes for file names
-* SQLite:
+* **SQLite:**
     * Add `SQLiteCache.db_path` wrapper property
     * Add `use_memory` option and support for in-memory databases
     * Add `use_cache_dir` option to use platform-specific user cache directory
     * Return `pathlib.Path` objects for all file paths
 
-**Serialization:**
+πŸš€ **Performance:**
 * Use `cattrs` by default for optimized serialization
+* Slightly reduce size of serialized responses
 
-**Other features:**
-* Add `BaseCache.update()` method as a shortcut for exporting to a different cache instance
-* Allow `BaseCache.has_url()` and `delete_url()` to optionally take parameters for `requests.Request` instead of just a URL
+↔️ **Request matching:**
 * Allow `create_key()` to optionally accept parameters for `requests.Request` instead of a request object
 * Allow `match_headers` to optionally accept a list of specific headers to match
 * Add support for custom cache key callbacks with `key_fn` parameter
 * By default use blake2 instead of sha256 for generating cache keys
-* Slightly reduce size of serialized responses
 
-**Backwards-compatible API changes:**
+ℹ️ **Cache convenience methods:**
+* Add `BaseCache.update()` method as a shortcut for exporting to a different cache instance
+* Allow `BaseCache.has_url()` and `delete_url()` to optionally take parameters for `requests.Request` instead of just a URL
+
+πŸ“¦ **Dependencies:**
+* Add `appdirs` as a dependency for easier cross-platform usage of user cache directories
+* Update `cattrs` from optional to required dependency
+* Update `itsdangerous` from required to optional (but recommended) dependency
+* Require `requests` 2.22+ and `urllib3` 1.25.5+
+
+⚠️ **Backwards-compatible API changes:**
 
 The following changes are meant to make certain behaviors more obvious for new users, without breaking existing usage:
 * For consistency with `Cache-Control: stale-if-error`, rename `old_data_on_error` to `stale_if_error`
@@ -144,16 +313,8 @@ The following changes are meant to make certain behaviors more obvious for new u
 * For consistency with other backends, rename SQLite backend classes: `backends.sqlite.Db*` -> `SQLiteCache`, `SQLiteDict`, `SQLitePickleDict`
 * Add aliases for all previous parameter/class names for backwards-compatibility
 
-**Dependencies:**
-* Add `appdirs` as a dependency for easier cross-platform usage of user cache directories
-* Update `cattrs` from optional to required dependency
-* Update `itsdangerous` from required to optional (but recommended) dependency
-* Require `requests` 2.22+ and `urllib3` 1.25.5+
-
-**Deprecations & removals:**
+⚠️ **Deprecations & removals:**
 * Drop support for python 3.6
-    * **Note:** python 3.6 support in 0.7.x will continue to be maintained at least until it reaches EOL (2021-12-23)
-    * Any bugfixes for 0.8 that also apply to 0.7 will be backported
 * Remove deprecated `core` module
 * Remove deprecated `BaseCache.remove_old_entries()` method
 
@@ -191,19 +352,7 @@ The following changes are meant to make certain behaviors more obvious for new u
 ## 0.7.0 (2021-07-07)
 [See all issues and PRs for 0.7](https://github.com/requests-cache/requests-cache/milestone/2?closed=1)
 
-**Backends:**
-* Add a filesystem backend that stores responses as local files
-* SQLite and Filesystem: Add `use_temp` option to store files in a temp directory
-* SQLite: Use persistent thread-local connections, and improve performance for bulk operations
-* DynamoDB: Fix `DynamoDbDict.__iter__` to return keys instead of values
-* MongoDB: Remove usage of deprecated pymongo `Collection.find_and_modify()`
-* Allow passing any backend-specific connection kwargs via `CachedSession` to the underlying connection function or object:
-    * SQLite: `sqlite3.connect`
-    * DynamoDB: `boto3.resource`
-    * Redis: `redis.Redis`
-    * MongoDB and GridFS: `pymongo.MongoClient`
-
-**Expiration & Headers:**
+πŸ•— **Expiration & headers:**
 * Add optional support for the following **request** headers:
     * `Cache-Control: max-age`
     * `Cache-Control: no-cache`
@@ -212,12 +361,27 @@ The following changes are meant to make certain behaviors more obvious for new u
     * `Cache-Control: max-age`
     * `Cache-Control: no-store`
     * `Expires`
-* Add `cache_control` option to `CachedSession` to enable usage of cache headers
+* Add `cache_control` option to `CachedSession` to enable setting expiration with cache headers
 * Add support for HTTP timestamps (RFC 5322) in ``expire_after`` parameters
 * Add support for bypassing the cache if `expire_after=0`
 * Add support for making a cache allowlist using URL patterns
 
-**Serialization:**
+πŸ’Ύ **Backends:**
+* Add a filesystem backend that stores responses as local files
+* **DynamoDB:**
+  * Fix `DynamoDbDict.__iter__` to return keys instead of values
+  * Accept connection arguments for `boto3.resource`
+* **MongoDB:**
+  * Remove usage of deprecated pymongo `Collection.find_and_modify()`
+  * Accept connection arguments for `pymongo.MongoClient`
+* **Redis:**
+  * Accept connection arguments for `redis.Redis`
+* **SQLite:**
+  * Use persistent thread-local connections, and improve performance for bulk operations
+  * Add `use_temp` option to store files in a temp directory
+  * Accept connection arguments for `sqlite3.connect`
+
+πŸ’Ύ **Serialization:**
 * Add data models for all serialized objects
 * Add a JSON serializer
 * Add a YAML serializer
@@ -225,20 +389,21 @@ The following changes are meant to make certain behaviors more obvious for new u
 * Add optional support for `cattrs`
 * Add optional support for `ultrajson`
 
-**Other features:**
-* requests-cache is now fully typed and PEP-561 compliant
+↔️ **Request matching:**
+* Add support for caching multipart form uploads
+* Update `ignored_parameters` to also exclude ignored request params, body params, or headers from cached response data (to avoid storing API keys or other credentials)
+* Update `old_data_on_error` option to also handle error response codes
+* Only log request exceptions if `old_data_on_error` is set
+
+ℹ️ **Convenience methods:**
 * Add option to manually cache response objects with `BaseCache.save_response()`
 * Add `BaseCache.keys()` and `values()` methods
 * Add `BaseCache.response_count()` method to get an accurate count of responses (excluding invalid and expired)
 * Show summarized response details with `str(CachedResponse)`
 * Add more detailed repr methods for `CachedSession`, `CachedResponse`, and `BaseCache`
-* Add support for caching multipart form uploads
 * Update `BaseCache.urls` to only skip invalid responses, not delete them (for better performance)
-* Update `ignored_parameters` to also exclude ignored request params, body params, or headers from cached response data (to avoid storing API keys or other credentials)
-* Update `old_data_on_error` option to also handle error response codes
-* Only log request exceptions if `old_data_on_error` is set
 
-**Depedencies:**
+πŸ“¦ **Depedencies:**
 * Add minimum `requests` version of `2.17`
 * Add `attrs` as a dependency for improved serialization models
 * Add `cattrs` as an optional dependency
@@ -250,15 +415,18 @@ The following changes are meant to make certain behaviors more obvious for new u
     * `requests-cache[mongodb]`
     * `requests-cache[redis]`
 
-**Compatibility and packaging:**
+πŸ“¦ **Compatibility and packaging:**
+* requests-cache is now fully typed and PEP-561 compliant
 * Fix some compatibility issues with `requests 2.17` and `2.18`
 * Run pre-release tests for each supported version of `requests`
-* Packaging is now handled with Poetry. For users, installation still works the same. For developers, see [Contributing Guide](https://requests-cache.readthedocs.io/en/stable/contributing.html) for details
+* Packaging is now managed by Poetry
+  * For users, installation still works the same.
+  * For developers, see [Contributing Guide](https://requests-cache.readthedocs.io/en/stable/contributing.html) for details
 
 
 -----
 ### 0.6.4 (2021-06-04)
-Fix a bug in which `filter_fn()` would get called on `response.request` instead of `response`
+* Fix a bug in which `filter_fn()` would get called on `response.request` instead of `response`
 
 ### 0.6.3 (2021-04-21)
 * Fix false positive warning with `include_get_headers`
@@ -283,21 +451,24 @@ Fix a bug in which `filter_fn()` would get called on `response.request` instead
 
 Thanks to [Code Shelter](https://www.codeshelter.co) and [contributors](https://requests-cache.readthedocs.io/en/stable/contributors.html) for making this release possible!
 
-**Backends:**
-* SQLite: Allow passing user paths (`~/path-to-cache`) to database file with `db_path` param
-* SQLite: Add `timeout` parameter
-* Make default table names consistent across backends (`'http_cache'`)
-
-**Expiration:**
-* Cached responses are now stored with an absolute expiration time, so `CachedSession.expire_after` no longer applies retroactively. To revalidate previously cached items with a new expiration time see below:
-* Add support for overriding original expiration (i.e., revalidating) in `CachedSession.remove_expired_responses()`
+πŸ•— **Expiration:**
+* Cached responses are now stored with an absolute expiration time, so `CachedSession.expire_after`
+  no longer applies retroactively. To reset expiration for previously cached items, see below:
+* Add support for overriding original expiration in `CachedSession.remove_expired_responses()`
 * Add support for setting expiration for individual requests
 * Add support for setting expiration based on URL glob patterns
 * Add support for setting expiration as a `datetime`
 * Add support for explicitly disabling expiration with `-1` (Since `None` may be ambiguous in some cases)
 
-**Serialization:**
-* **Note:** Due to the following changes, responses cached with previous versions of requests-cache will be invalid. These **old responses will be treated as expired**, and will be refreshed the next time they are requested. They can also be manually converted or removed, if needed (see notes below).
+πŸ’Ύ **Backends:**
+* **SQLite:**
+  * Allow passing user paths (`~/path-to-cache`) to database file with `db_path` param
+  * Add `timeout` parameter
+* **All:** Make default table names consistent across backends (`'http_cache'`)
+
+πŸ’Ύ **Serialization:**
+
+**Note:** Due to the following changes, responses cached with previous versions of requests-cache will be invalid. These **old responses will be treated as expired**, and will be refreshed the next time they are requested. They can also be manually converted or removed, if needed (see notes below).
 * Add [example script](https://github.com/requests-cache/requests-cache/blob/main/examples/convert_cache.py) to convert an existing cache from previous serialization format to new one
 * When running `remove_expired_responses()`, also remove responses that are invalid due to updated serialization format
 * Add `CachedResponse` class to wrap cached `requests.Response` objects, which makes additional cache information available to client code
@@ -311,7 +482,13 @@ Thanks to [Code Shelter](https://www.codeshelter.co) and [contributors](https://
 * Add `CacheMixin` class to make the features of `CachedSession` usable as a mixin class, for [compatibility with other requests-based libraries](https://requests-cache.readthedocs.io/en/stable/advanced_usage.html#library-compatibility).
 * Add `HEAD` to default `allowable_methods`
 
-**Bugfixes:**
+πŸ“— **Docs & Tests:**
+* Add type annotations to main functions/methods in public API, and include in documentation on [readthedocs](https://requests-cache.readthedocs.io/en/stable/)
+* Add [Contributing Guide](https://requests-cache.readthedocs.io/en/stable/contributing.html), [Security](https://requests-cache.readthedocs.io/en/stable/security.html) info, and more examples & detailed usage info in [User Guide](https://requests-cache.readthedocs.io/en/stable/user_guide.html) and [Advanced Usage](https://requests-cache.readthedocs.io/en/stable/advanced_usage.html) sections.
+* Increase test coverage and rewrite most tests using pytest
+* Add containerized backends for both local and CI integration testing
+
+πŸͺ² **Bugfixes:**
 * Fix caching requests with data specified in `json` parameter
 * Fix caching requests with `verify` parameter
 * Fix duplicate cached responses due to some unhandled variations in URL format
@@ -323,23 +500,17 @@ Thanks to [Code Shelter](https://www.codeshelter.co) and [contributors](https://
 * Update usage of deprecated MongoClient `save()` method
 * Replace some old bugs with new and different bugs, just to keep life interesting
 
-**Depedencies:**
+πŸ“¦ **Depedencies:**
 * Add `itsdangerous` as a dependency for secure serialization
 * Add `url-normalize` as a dependency for better request normalization and reducing duplications
 
-**Deprecations & removals:**
+⚠️ **Deprecations & removals:**
 * Drop support for python 2.7, 3.4, and 3.5
 * Deprecate `core` module; all imports should be made from top-level package instead
     * e.g.: `from requests_cache import CachedSession`
     * Imports `from requests_cache.core` will raise a `DeprecationWarning`, and will be removed in a future release
 * Rename `BaseCache.remove_old_entries()` to `remove_expired_responses()`, to match its wrapper method `CachedSession.remove_expired_responses()`
 
-**Docs & Tests:**
-* Add type annotations to main functions/methods in public API, and include in documentation on [readthedocs](https://requests-cache.readthedocs.io/en/stable/)
-* Add [Contributing Guide](https://requests-cache.readthedocs.io/en/stable/contributing.html), [Security](https://requests-cache.readthedocs.io/en/stable/security.html) info, and more examples & detailed usage info in [User Guide](https://requests-cache.readthedocs.io/en/stable/user_guide.html) and [Advanced Usage](https://requests-cache.readthedocs.io/en/stable/advanced_usage.html) sections.
-* Increase test coverage and rewrite most tests using pytest
-* Add containerized backends for both local and CI integration testing
-
 -----
 ### 0.5.2 (2019-08-14)
 * Fix DeprecationWarning from collections #140
diff --git a/README.md b/README.md
index 64e62c5..3271d64 100644
--- a/README.md
+++ b/README.md
@@ -11,8 +11,8 @@
 [![PyPI - Downloads](https://img.shields.io/pypi/dm/requests-cache?color=blue)](https://pypi.org/project/requests-cache)
 
 ## Summary
-**requests-cache** is a transparent, persistent cache that provides an easy way to get better
-performance with the python [requests](http://python-requests.org) library.
+**requests-cache** is a persistent HTTP cache that provides an easy way to get better
+performance with the python [requests](https://requests.readthedocs.io/) library.
 
 <!-- RTD-IGNORE -->
 Complete project documentation can be found at [requests-cache.readthedocs.io](https://requests-cache.readthedocs.io).
@@ -23,7 +23,7 @@ Complete project documentation can be found at [requests-cache.readthedocs.io](h
   with a [drop-in replacement](https://requests-cache.readthedocs.io/en/stable/user_guide/general.html#sessions)
   for `requests.Session`, or
   [install globally](https://requests-cache.readthedocs.io/en/stable/user_guide/general.html#patching)
-  to add caching to all `requests` functions.
+  to add transparent caching to all `requests` functions.
 * πŸš€ **Performance:** Get sub-millisecond response times for cached responses. When they expire, you
   still save time with
   [conditional requests](https://requests-cache.readthedocs.io/en/stable/user_guide/headers.html#conditional-requests).
@@ -31,15 +31,14 @@ Complete project documentation can be found at [requests-cache.readthedocs.io](h
   [storage backends](https://requests-cache.readthedocs.io/en/stable/user_guide/backends.html)
   including SQLite, Redis, MongoDB, and DynamoDB; or save responses as plain JSON files, YAML,
   and more
+* πŸ•— **Expiration:** Use
+  [Cache-Control](https://requests-cache.readthedocs.io/en/stable/user_guide/headers.html#cache-control)
+  and other standard HTTP headers, define your own expiration schedule, keep your cache clutter-free
+  with backends that natively support TTL, or any combination of strategies
 * βš™οΈ **Customization:** Works out of the box with zero config, but with a robust set of features for
   configuring and extending the library to suit your needs
-* πŸ•— **Expiration:** Keep your cache fresh using
-  [Cache-Control](https://requests-cache.readthedocs.io/en/stable/user_guide/headers.html#cache-control),
-  eagerly cache everything for long-term storage, use
-  [URL patterns](https://requests-cache.readthedocs.io/en/stable/user_guide/expiration.html#expiration-with-url-patterns)
-  for selective caching, or any combination of strategies
-* βœ”οΈ **Compatibility:** Can be combined with other popular
-  [libraries based on requests](https://requests-cache.readthedocs.io/en/stable/user_guide/compatibility.html)
+* 🧩 **Compatibility:** Can be combined with other
+  [popular libraries based on requests](https://requests-cache.readthedocs.io/en/stable/user_guide/compatibility.html)
 
 ## Quickstart
 First, install with pip:
@@ -49,7 +48,7 @@ pip install requests-cache
 
 Then, use [requests_cache.CachedSession](https://requests-cache.readthedocs.io/en/stable/session.html)
 to make your requests. It behaves like a normal
-[requests.Session](https://docs.python-requests.org/en/master/user/advanced/#session-objects),
+[requests.Session](https://requests.readthedocs.io/en/latest/user/advanced/#session-objects),
 but with caching behavior.
 
 To illustrate, we'll call an endpoint that adds a delay of 1 second, simulating a slow or
@@ -61,7 +60,7 @@ import requests
 
 session = requests.Session()
 for i in range(60):
-    session.get('http://httpbin.org/delay/1')
+    session.get('https://httpbin.org/delay/1')
 ```
 
 **This takes 1 second:**
@@ -70,14 +69,13 @@ import requests_cache
 
 session = requests_cache.CachedSession('demo_cache')
 for i in range(60):
-    session.get('http://httpbin.org/delay/1')
+    session.get('https://httpbin.org/delay/1')
 ```
 
 With caching, the response will be fetched once, saved to `demo_cache.sqlite`, and subsequent
 requests will return the cached response near-instantly.
 
-**Patching:**
-
+### Patching
 If you don't want to manage a session object, or just want to quickly test it out in your
 application without modifying any code, requests-cache can also be installed globally, and all
 requests will be transparently cached:
@@ -86,26 +84,58 @@ import requests
 import requests_cache
 
 requests_cache.install_cache('demo_cache')
-requests.get('http://httpbin.org/delay/1')
+requests.get('https://httpbin.org/delay/1')
+```
+
+### Headers and Expiration
+By default, requests-cache will keep cached responses indefinitely. In most cases, you will want to
+use one of the two following strategies to balance cache freshness and performance:
+
+**Define exactly how long to keep responses:**
+
+Use the `expire_after` parameter to set a fixed expiration time for all responses:
+```python
+from requests_cache import CachedSession
+from datetime import timedelta
+
+# Keep responses for 360 seconds
+session = CachedSession('demo_cache', expire_after=360)
+
+# Or use timedelta objects to specify other units of time
+session = CachedSession('demo_cache', expire_after=timedelta(hours=1))
+```
+See [Expiration](https://requests-cache.readthedocs.io/en/stable/user_guide/expiration.html) for
+more features and settings.
+
+**Use Cache-Control headers:**
+
+Use the `cache_control` parameter to enable automatic expiration based on `Cache-Control` and other
+standard HTTP headers sent by the server:
+```python
+from requests_cache import CachedSession
+
+session = CachedSession('demo_cache', cache_control=True)
 ```
+See [Cache Headers](https://requests-cache.readthedocs.io/en/stable/user_guide/headers.html)
+for more details.
 
-**Configuration:**
 
-A quick example of some of the options available:
+### Settings
+The default settings work well for most use cases, but there are plenty of ways to customize
+caching behavior when needed. Here is a quick example of some of the options available:
 ```python
-# fmt: off
 from datetime import timedelta
 from requests_cache import CachedSession
 
 session = CachedSession(
     'demo_cache',
     use_cache_dir=True,                # Save files in the default user cache dir
-    cache_control=True,                # Use Cache-Control headers for expiration, if available
+    cache_control=True,                # Use Cache-Control response headers for expiration, if available
     expire_after=timedelta(days=1),    # Otherwise expire responses after one day
-    allowable_methods=['GET', 'POST'], # Cache POST requests to avoid sending the same data twice
     allowable_codes=[200, 400],        # Cache 400 responses as a solemn reminder of your failures
-    ignored_parameters=['api_key'],    # Don't match this param or save it in the cache
-    match_headers=True,                # Match all request headers
+    allowable_methods=['GET', 'POST'], # Cache whatever HTTP methods you want
+    ignored_parameters=['api_key'],    # Don't match this request param, and redact if from the cache
+    match_headers=['Accept-Language'], # Cache a different response per language
     stale_if_error=True,               # In case of request errors, use stale cache data if possible
 )
 ```
@@ -115,10 +145,7 @@ session = CachedSession(
 To find out more about what you can do with requests-cache, see:
 
 * [User Guide](https://requests-cache.readthedocs.io/en/stable/user_guide.html)
+* [Examples](https://requests-cache.readthedocs.io/en/stable/examples.html)
 * [API Reference](https://requests-cache.readthedocs.io/en/stable/reference.html)
 * [Project Info](https://requests-cache.readthedocs.io/en/stable/project_info.html)
-* A working example at Real Python:
-  [Caching External API Requests](https://realpython.com/blog/python/caching-external-api-requests)
-* More examples in the
-  [examples/](https://github.com/requests-cache/requests-cache/tree/main/examples) folder
 <!-- END-RTD-IGNORE -->
diff --git a/debian/changelog b/debian/changelog
index dc258d1..15e2e15 100644
--- a/debian/changelog
+++ b/debian/changelog
@@ -1,3 +1,9 @@
+python-requests-cache (1.0.1-1) UNRELEASED; urgency=low
+
+  * New upstream release.
+
+ -- Debian Janitor <janitor@jelmer.uk>  Thu, 25 May 2023 19:02:17 -0000
+
 python-requests-cache (0.9.8-1) unstable; urgency=medium
 
   * New upstream release
diff --git a/docs/_static/collapsible_container.css b/docs/_static/collapsible_container.css
deleted file mode 100644
index bcad629..0000000
--- a/docs/_static/collapsible_container.css
+++ /dev/null
@@ -1,26 +0,0 @@
-/* Taken from: https://github.com/plone/training/blob/master/_static/custom.css */
-
-.toggle {
-    /* background: none repeat scroll 0 0 #e7f2fa; */
-    padding: 12px;
-    line-height: 24px;
-    margin-bottom: 24px;
-}
-
-.toggle .admonition-title {
-    display: block;
-    clear: both;
-    cursor: pointer;
-}
-
-.toggle .admonition-title:after {
-    content: " β–Ά";
-}
-
-.toggle .admonition-title.open:after {
-    content: " β–Ό";
-}
-
-.toggle p:last-child {
-    margin-bottom: 0;
-}
diff --git a/docs/_static/collapsible_container.js b/docs/_static/collapsible_container.js
deleted file mode 100644
index 75042d6..0000000
--- a/docs/_static/collapsible_container.js
+++ /dev/null
@@ -1,10 +0,0 @@
-// Taken from: https://github.com/plone/training/blob/master/_templates/page.html
-
-$(document).ready(function() {
-    $(".toggle > *").hide();
-    $(".toggle .admonition-title").show();
-    $(".toggle .admonition-title").click(function() {
-        $(this).parent().children().not(".admonition-title").toggle(400);
-        $(this).parent().children(".admonition-title").toggleClass("open");
-    })
-});
diff --git a/docs/_static/dynamodb_create_table.png b/docs/_static/dynamodb_create_table.png
new file mode 100644
index 0000000..0fcb7a0
Binary files /dev/null and b/docs/_static/dynamodb_create_table.png differ
diff --git a/docs/_static/dynamodb_items.png b/docs/_static/dynamodb_items.png
new file mode 100644
index 0000000..68066d0
Binary files /dev/null and b/docs/_static/dynamodb_items.png differ
diff --git a/docs/_static/dynamodb_response.png b/docs/_static/dynamodb_response.png
new file mode 100644
index 0000000..e0f4d85
Binary files /dev/null and b/docs/_static/dynamodb_response.png differ
diff --git a/docs/_static/memory_32px.png b/docs/_static/memory_32px.png
new file mode 100644
index 0000000..3960ca2
Binary files /dev/null and b/docs/_static/memory_32px.png differ
diff --git a/docs/_static/mongodb_vscode.png b/docs/_static/mongodb_vscode.png
new file mode 100644
index 0000000..cccc18d
Binary files /dev/null and b/docs/_static/mongodb_vscode.png differ
diff --git a/docs/_static/requests-cache-gh-preview.png b/docs/_static/requests-cache-gh-preview.png
new file mode 100644
index 0000000..3f250fd
Binary files /dev/null and b/docs/_static/requests-cache-gh-preview.png differ
diff --git a/docs/_static/requests-cache-icon.png b/docs/_static/requests-cache-icon.png
new file mode 100644
index 0000000..b91e408
Binary files /dev/null and b/docs/_static/requests-cache-icon.png differ
diff --git a/docs/_templates/module.rst_t b/docs/_templates/module.rst_t
index 630efd2..18e07c8 100644
--- a/docs/_templates/module.rst_t
+++ b/docs/_templates/module.rst_t
@@ -12,3 +12,4 @@
    :undoc-members:
    :inherited-members:
    :show-inheritance:
+   :ignore-module-all:
diff --git a/docs/api/requests_cache.session.md b/docs/api/requests_cache.session.md
new file mode 100644
index 0000000..eb37f90
--- /dev/null
+++ b/docs/api/requests_cache.session.md
@@ -0,0 +1,17 @@
+# Session
+```{eval-rst}
+.. automodule:: requests_cache.session
+
+.. autosummary::
+   :nosignatures:
+
+   CachedSession
+   CacheMixin
+
+.. Show inherited method docs on CachedSession instead of CachedMixin
+.. autoclass:: CachedSession
+    :show-inheritance:
+    :inherited-members:
+
+.. autoclass:: CacheMixin
+```
diff --git a/docs/conf.py b/docs/conf.py
index 55b35a7..224e65e 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -1,23 +1,37 @@
-# requests-cache documentation build configuration file
-import logging
+"""requests-cache documentation build config.
+
+Notes:
+
+* MyST-flavored markdown is used instead of rST for all user guide docs
+* API reference docs are generated based on module docstrings
+* Google-style docstrings are used throughout the project
+* apidoc is used to generate source files for the majority of module docs
+* The `api/` directory contains manually formatted sources for some modules
+* The `_templates` directory contains some Sphinx templates that modify auto-generated sources
+"""
 import os
 import sys
-from os.path import abspath, dirname, join
+from os.path import join
+from pathlib import Path
+from shutil import copy
 
 # Add project path
 sys.path.insert(0, os.path.abspath('..'))
 from requests_cache import __version__  # noqa: E402
 
-PROJECT_DIR = abspath(dirname(dirname(__file__)))
-PACKAGE_DIR = join(PROJECT_DIR, 'requests_cache')
-TEMPLATE_DIR = join(PROJECT_DIR, 'docs', '_templates')
+DOCS_DIR = Path(__file__).parent.absolute()
+PROJECT_DIR = DOCS_DIR.parent
+PACKAGE_DIR = PROJECT_DIR / 'requests_cache'
+TEMPLATE_DIR = DOCS_DIR / '_templates'
+EXTRA_APIDOC_DIR = DOCS_DIR / 'api'
+APIDOC_DIR = DOCS_DIR / 'modules'
 
 
 # General information about the project.
 project = 'requests-cache'
-needs_sphinx = '3.0'
+needs_sphinx = '4.0'
 master_doc = 'index'
-source_suffix = ['.rst', '.md']
+source_suffix = ['.md', '.rst']
 version = release = __version__
 html_static_path = ['_static']
 exclude_patterns = ['_build']
@@ -36,9 +50,9 @@ extensions = [
     'sphinx_automodapi.automodapi',
     'sphinx_automodapi.smart_resolver',
     'sphinx_copybutton',
-    'sphinx_inline_tabs',
-    'sphinx_panels',
+    'sphinx_design',
     'sphinxcontrib.apidoc',
+    'sphinxext.opengraph',
     'myst_parser',
     'notfound.extension',
 ]
@@ -52,19 +66,25 @@ myst_enable_extensions = [
     'smartquotes',
 ]
 
-# Exclude auto-generated page for top-level __init__.py
-exclude_patterns = ['_build', 'modules/requests_cache.rst']
+# Ignore a subset of auto-generated pages
+exclude_patterns = [
+    '_build',
+    f'{APIDOC_DIR.stem}/requests_cache.rst',
+    f'{EXTRA_APIDOC_DIR.stem}/*',
+]
 
 # Enable automatic links to other projects' Sphinx docs
 intersphinx_mapping = {
+    'attrs': ('https://www.attrs.org/en/stable/', None),
     'boto3': ('https://boto3.amazonaws.com/v1/documentation/api/latest/', None),
-    'botocore': ('http://botocore.readthedocs.io/en/latest/', None),
+    'botocore': ('https://botocore.readthedocs.io/en/latest/', None),
+    'cattrs': ('https://cattrs.readthedocs.io/en/latest/', None),
     'cryptography': ('https://cryptography.io/en/latest/', None),
     'itsdangerous': ('https://itsdangerous.palletsprojects.com/en/2.0.x/', None),
     'pymongo': ('https://pymongo.readthedocs.io/en/stable/', None),
     'python': ('https://docs.python.org/3', None),
     'redis': ('https://redis-py.readthedocs.io/en/stable/', None),
-    'requests': ('https://docs.python-requests.org/en/master/', None),
+    'requests': ('https://requests.readthedocs.io/en/latest/', None),
     'urllib3': ('https://urllib3.readthedocs.io/en/latest/', None),
 }
 extlinks = {
@@ -83,13 +103,13 @@ copybutton_prompt_is_regexp = True
 # Generate labels in the format <page>:<section>
 autosectionlabel_prefix_document = True
 
-# Use sphinx_autodoc_typehints extension instead of autodoc's built-in type hints
-autodoc_typehints = 'none'
+# Move type hint info to function description instead of signature
+autodoc_typehints = 'description'
 always_document_param_types = True
 
 # Use apidoc to auto-generate rst sources
-apidoc_module_dir = PACKAGE_DIR
-apidoc_output_dir = 'modules'
+apidoc_module_dir = str(PACKAGE_DIR)
+apidoc_output_dir = APIDOC_DIR.stem
 apidoc_excluded_paths = ['session.py']
 apidoc_extra_args = [f'--templatedir={TEMPLATE_DIR}']  # Note: Must be an absolute path
 apidoc_module_first = True
@@ -98,12 +118,9 @@ apidoc_toc_file = False
 
 # HTML general settings
 html_favicon = join('_static', 'favicon.ico')
-html_js_files = ['collapsible_container.js']
 html_css_files = [
-    'collapsible_container.css',
     'table.css',
-    'https://use.fontawesome.com/releases/v5.15.3/css/all.css',
-    'https://use.fontawesome.com/releases/v5.15.3/css/v4-shims.css',
+    'https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.1.1/css/all.min.css',
 ]
 html_show_copyright = False
 html_show_sphinx = False
@@ -132,18 +149,20 @@ def setup(app):
     """Run some additional steps after the Sphinx builder is initialized"""
     app.add_css_file('collapsible_container.css')
     app.connect('builder-inited', patch_automodapi)
+    app.connect('builder-inited', copy_module_docs)
 
 
 def patch_automodapi(app):
     """Monkey-patch the automodapi extension to exclude imported members:
     https://github.com/astropy/sphinx-automodapi/blob/master/sphinx_automodapi/automodsumm.py#L135
-
-    Also patches an unreleased fix for Sphinx 4 compatibility:
-    https://github.com/astropy/sphinx-automodapi/pull/129
     """
     from sphinx_automodapi import automodsumm
-    from sphinx_automodapi.automodsumm import Automodsumm
     from sphinx_automodapi.utils import find_mod_objs
 
     automodsumm.find_mod_objs = lambda *args: find_mod_objs(args[0], onlylocals=True)
-    Automodsumm.warn = lambda *args: logging.getLogger('sphinx_automodapi').warn(*args)
+
+
+def copy_module_docs(app):
+    """Copy manually written doc sources to apidoc directory"""
+    for doc in EXTRA_APIDOC_DIR.iterdir():
+        copy(doc, APIDOC_DIR)
diff --git a/docs/examples.md b/docs/examples.md
index 40115de..d193680 100644
--- a/docs/examples.md
+++ b/docs/examples.md
@@ -1,5 +1,5 @@
 (examples)=
-# {fa}`laptop-code,style=fas` Examples
+# {fas}`laptop-code` Examples
 This section contains some complete examples that demonstrate the main features of requests-cache.
 
 ## Articles
@@ -11,7 +11,13 @@ Some articles and blog posts that discuss requests-cache:
 * Tim O'Hearn: [Pragmatic Usage of requests-cache](https://www.tjohearn.com/2018/02/12/pragmatic-usage-of-requests-cache/)
 * Valdir Stumm Jr: [Tips for boosting your Python scripts](https://stummjr.org/post/building-scripts-in-python/)
 * Python Web Scraping (2nd Edition): [Exploring requests-cache](https://learning.oreilly.com/library/view/python-web-scraping/9781786462589/3fad0dcc-445b-49a4-8d5e-ba5e1ff8e3bb.xhtml)
+* Cui Qingcai: [δΈ€δΈͺη₯žε™¨οΌŒε€§εΉ…ζε‡ηˆ¬ε–ζ•ˆηŽ‡](https://cuiqingcai.com/36052.html) (A package that greatly improves crawling efficiency)
 
+<!--
+Explicit line numbers are added below to include the module docstring in the main doc, and put the
+rest of the module contents in a dropdown box.
+TODO: It might be nice to have a custom extension to do this automatically.
+-->
 ## Scripts
 The following scripts can also be found in the
 [examples/](https://github.com/requests-cache/requests-cache/tree/main/examples) folder on GitHub.
@@ -22,10 +28,14 @@ The following scripts can also be found in the
 :end-line: 4
 ```
 
-:::{admonition} Example: basic_sessions.py
-:class: toggle
+:::{dropdown} Example
+:animate: fade-in-slide-down
+:color: primary
+:icon: file-code
+
+[basic_sessions.py](https://github.com/requests-cache/requests-cache/blob/main/examples/basic_sessions.py)
 ```{literalinclude} ../examples/basic_sessions.py
-:lines: 1,6-
+:lines: 6-
 ```
 :::
 
@@ -35,10 +45,14 @@ The following scripts can also be found in the
 :end-line: 4
 ```
 
-:::{admonition} Example: basic_patching.py
-:class: toggle
+:::{dropdown} Example
+:animate: fade-in-slide-down
+:color: primary
+:icon: file-code
+
+[basic_patching.py](https://github.com/requests-cache/requests-cache/blob/main/examples/basic_patching.py)
 ```{literalinclude} ../examples/basic_patching.py
-:lines: 1,6-
+:lines: 6-
 ```
 :::
 
@@ -48,10 +62,14 @@ The following scripts can also be found in the
 :end-line: 3
 ```
 
-:::{admonition} Example: expiration.py
-:class: toggle
+:::{dropdown} Example
+:animate: fade-in-slide-down
+:color: primary
+:icon: file-code
+
+[expiration.py](https://github.com/requests-cache/requests-cache/blob/main/examples/expiration.py)
 ```{literalinclude} ../examples/expiration.py
-:lines: 1,5-
+:lines: 5-
 ```
 :::
 
@@ -61,10 +79,31 @@ The following scripts can also be found in the
 :end-line: 4
 ```
 
-:::{admonition} Example: /url_patterns.py
-:class: toggle
+:::{dropdown} Example
+:animate: fade-in-slide-down
+:color: primary
+:icon: file-code
+
+[url_patterns.py](https://github.com/requests-cache/requests-cache/blob/main/examples/url_patterns.py)
 ```{literalinclude} ../examples/url_patterns.py
-:lines: 1,6-
+:lines: 6-
+```
+:::
+
+### PyGithub
+```{include} ../examples/pygithub.py
+:start-line: 2
+:end-line: 25
+```
+
+:::{dropdown} Example
+:animate: fade-in-slide-down
+:color: primary
+:icon: file-code
+
+[pygithub.py](https://github.com/requests-cache/requests-cache/blob/main/examples/pygithub.py)
+```{literalinclude} ../examples/pygithub.py
+:lines: 27-
 ```
 :::
 
@@ -74,10 +113,14 @@ The following scripts can also be found in the
 :end-line: 4
 ```
 
-:::{admonition} Example: threads.py
-:class: toggle
+:::{dropdown} Example
+:animate: fade-in-slide-down
+:color: primary
+:icon: file-code
+
+[threads.py](https://github.com/requests-cache/requests-cache/blob/main/examples/threads.py)
 ```{literalinclude} ../examples/threads.py
-:lines: 1,6-
+:lines: 6-
 ```
 :::
 
@@ -87,10 +130,31 @@ The following scripts can also be found in the
 :end-line: 3
 ```
 
-:::{admonition} Example: log_requests.py
-:class: toggle
+:::{dropdown} Example
+:animate: fade-in-slide-down
+:color: primary
+:icon: file-code
+
+[log_requests.py](https://github.com/requests-cache/requests-cache/blob/main/examples/log_requests.py)
 ```{literalinclude} ../examples/log_requests.py
-:lines: 1,5-
+:lines: 5-
+```
+:::
+
+### External configuration
+```{include} ../examples/external_config.py
+:start-line: 2
+:end-line: 8
+```
+
+:::{dropdown} Example
+:animate: fade-in-slide-down
+:color: primary
+:icon: file-code
+
+[external_config.py](https://github.com/requests-cache/requests-cache/blob/main/examples/external_config.py)
+```{literalinclude} ../examples/external_config.py
+:lines: 10-
 ```
 :::
 
@@ -100,10 +164,60 @@ The following scripts can also be found in the
 :end-line: 8
 ```
 
-:::{admonition} Example: benchmark.py
-:class: toggle
+:::{dropdown} Example
+:animate: fade-in-slide-down
+:color: primary
+:icon: file-code
+
+[benchmark.py](https://github.com/requests-cache/requests-cache/blob/main/examples/benchmark.py)
 ```{literalinclude} ../examples/benchmark.py
-:lines: 1,10-
+:lines: 10-
+```
+:::
+
+
+### Requests per second graph
+```{include} ../examples/rps_graph.py
+:start-line: 2
+:end-line: 7
+```
+
+:::{dropdown} Example
+:animate: fade-in-slide-down
+:color: primary
+:icon: file-code
+
+[benchmark.py](https://github.com/requests-cache/requests-cache/blob/main/examples/rps_graph.py)
+```{literalinclude} ../examples/rps_graph.py
+:lines: 9-
+```
+:::
+
+:::{dropdown} Screenshot
+:animate: fade-in-slide-down
+:color: info
+:icon: image
+
+![](../examples/rps_graph.png)
+
+:::
+
+### Using with GitHub Actions
+This example shows how to use requests-cache with [GitHub Actions](https://docs.github.com/en/actions).
+Key points:
+* Create the cache file within the CI project directory
+* You can use [actions/cache](https://github.com/actions/cache) to persist the cache file across
+  workflow runs
+    * You can use a constant cache key within this action to let requests-cache handle expiration
+
+
+:::{dropdown} Example
+:animate: fade-in-slide-down
+:color: primary
+:icon: file-code
+
+[github_actions.yml](https://github.com/requests-cache/requests-cache/blob/main/examples/github_actions.yml)
+```{literalinclude} ../examples/github_actions.yml
 ```
 :::
 
@@ -113,10 +227,14 @@ The following scripts can also be found in the
 :end-line: 4
 ```
 
-:::{admonition} Example: convert_cache.py
-:class: toggle
+:::{dropdown} Example
+:animate: fade-in-slide-down
+:color: primary
+:icon: file-code
+
+[convert_cache.py](https://github.com/requests-cache/requests-cache/blob/main/examples/convert_cache.py)
 ```{literalinclude} ../examples/convert_cache.py
-:lines: 1,6-
+:lines: 6-
 ```
 :::
 
@@ -127,10 +245,14 @@ The following scripts can also be found in the
 :end-line: 15
 ```
 
-:::{admonition} Example: custom_request_matcher.py
-:class: toggle
+:::{dropdown} Example
+:animate: fade-in-slide-down
+:color: primary
+:icon: file-code
+
+[custom_request_matcher.py](https://github.com/requests-cache/requests-cache/blob/main/examples/custom_request_matcher.py)
 ```{literalinclude} ../examples/custom_request_matcher.py
-:lines: 1,17-
+:lines: 17-
 ```
 :::
 
@@ -141,9 +263,31 @@ The following scripts can also be found in the
 :end-line: 4
 ```
 
-:::{admonition} Example: custom_request_matcher.py
-:class: toggle
+:::{dropdown} Example
+:animate: fade-in-slide-down
+:color: primary
+:icon: file-code
+
+[time_machine_backtesting.py](https://github.com/requests-cache/requests-cache/blob/main/examples/time_machine_backtesting.py)
 ```{literalinclude} ../examples/time_machine_backtesting.py
-:lines: 1,6-
+:lines: 6-
+```
+:::
+
+
+### VCR Export
+```{include} ../examples/vcr.py
+:start-line: 2
+:end-line: 5
+```
+
+:::{dropdown} Example
+:animate: fade-in-slide-down
+:color: primary
+:icon: file-code
+
+[vcr.py](https://github.com/requests-cache/requests-cache/blob/main/examples/vcr.py)
+```{literalinclude} ../examples/vcr.py
+:lines: 7-
 ```
 :::
diff --git a/docs/index.md b/docs/index.md
index 9e011f7..8a3e9c3 100644
--- a/docs/index.md
+++ b/docs/index.md
@@ -3,8 +3,11 @@ Pre-release warning to reduce confusion on what '/latest' means;
 TODO: add script to make this conditional
 -->
 <!--
-  ```{admonition} Note
-  :class: warning
+  :::{dropdown} Note
+  :animate: fade-in-slide-down
+  :color: primary
+  :icon: alert
+
   You are viewing the pre-release documentation, which may describe features that are still in development.
   Documentation for the latest stable release can be found at [requests-cache.readthedocs.io](https://requests-cache.readthedocs.io)
   ```
@@ -29,7 +32,7 @@ TODO: add script to make this conditional
 :maxdepth: 2
 
 user_guide
-reference
 examples
+reference
 project_info
 ````
diff --git a/docs/project_info/code_of_conduct.md b/docs/project_info/code_of_conduct.md
index 9977692..4d6d542 100644
--- a/docs/project_info/code_of_conduct.md
+++ b/docs/project_info/code_of_conduct.md
@@ -1,4 +1,4 @@
-# {fa}`handshake` Code of Conduct
+# {fas}`handshake` Code of Conduct
 This Code of Conduct is adapted from
 [Contributor Covenant, version 1.4](https://www.contributor-covenant.org/version/1/4/code-of-conduct.html).
 
diff --git a/docs/project_info/contributing.md b/docs/project_info/contributing.md
index 5d40716..aeb997e 100644
--- a/docs/project_info/contributing.md
+++ b/docs/project_info/contributing.md
@@ -1,5 +1,5 @@
 (contributing)=
-# {fa}`lightbulb` Contributing
+# {fas}`lightbulb` Contributing
 ```{include} ../../CONTRIBUTING.md
 :start-line: 1
 ```
diff --git a/docs/project_info/contributors.md b/docs/project_info/contributors.md
index 5c3aaa1..33dde28 100644
--- a/docs/project_info/contributors.md
+++ b/docs/project_info/contributors.md
@@ -1,4 +1,4 @@
-# {fa}`users` Contributors
+# {fas}`users` Contributors
 ```{include} ../../CONTRIBUTORS.md
 :start-line: 1
 ```
diff --git a/docs/project_info/history.md b/docs/project_info/history.md
index 3bb2b72..f2c0601 100644
--- a/docs/project_info/history.md
+++ b/docs/project_info/history.md
@@ -1,5 +1,5 @@
 (changelog)=
-# {fa}`history` History
+# {fas}`history` History
 ```{include} ../../HISTORY.md
 :start-line: 1
 ```
diff --git a/docs/project_info/related_projects.md b/docs/project_info/related_projects.md
index dd553cc..3a01d19 100644
--- a/docs/project_info/related_projects.md
+++ b/docs/project_info/related_projects.md
@@ -1,5 +1,5 @@
 (related-projects)=
-# {fa}`external-link-alt,style=fas` Related Projects
+# {fas}`external-link-alt` Related Projects
 If requests-cache isn't quite what you need, you can help make it better! See the
 {ref}`Contributing Guide <contributing>` for details.
 
diff --git a/docs/reference.md b/docs/reference.md
index bf61f7a..97ef991 100644
--- a/docs/reference.md
+++ b/docs/reference.md
@@ -11,13 +11,27 @@ from requests_cache import CachedSession, RedisCache, json_serializer
 ```
 :::
 
+<!--
+TODO:
+* move rst backend docs to md
+* Copy/overwrite from extra_modules/ to modules/
+-->
+## Primary Modules
+The following modules include the majority of the API relevant for most users:
+
 ```{toctree}
 :maxdepth: 2
-session
+modules/requests_cache.session
 modules/requests_cache.patcher
 modules/requests_cache.backends
 modules/requests_cache.models
+```
+
+## Secondary Modules
+The following modules are mainly for internal use, and are relevant for contributors and advanced users:
+```{toctree}
+:maxdepth: 2
+modules/requests_cache.cache_keys
 modules/requests_cache.policy
 modules/requests_cache.serializers
-modules/requests_cache.cache_keys
 ```
diff --git a/docs/sample_data/sample_response.json b/docs/sample_data/sample_response_binary.json
similarity index 100%
rename from docs/sample_data/sample_response.json
rename to docs/sample_data/sample_response_binary.json
diff --git a/docs/sample_data/sample_response.yaml b/docs/sample_data/sample_response_binary.yaml
similarity index 100%
rename from docs/sample_data/sample_response.yaml
rename to docs/sample_data/sample_response_binary.yaml
diff --git a/docs/sample_data/sample_response_json.json b/docs/sample_data/sample_response_json.json
new file mode 100644
index 0000000..45ea4e3
--- /dev/null
+++ b/docs/sample_data/sample_response_json.json
@@ -0,0 +1,56 @@
+{
+  "url": "https://httpbin.org/json",
+  "status_code": 200,
+  "reason": "OK",
+  "_decoded_content": {
+    "slideshow": {
+      "author": "Yours Truly",
+      "date": "date of publication",
+      "slides": [
+        {
+          "title": "Wake up to WonderWidgets!",
+          "type": "all"
+        },
+        {
+          "items": [
+            "Why <em>WonderWidgets<\/em> are great",
+            "Who <em>buys<\/em> WonderWidgets"
+          ],
+          "title": "Overview",
+          "type": "all"
+        }
+      ],
+      "title": "Sample Slide Show"
+    }
+  },
+  "cache_key": "4dc151d95200ec91fa77021989f5194e9be47e87f8f228306f3a8d5434b9e547",
+  "created_at": "2021-07-21T22:34:50.343095",
+  "elapsed": 0.242198,
+  "encoding": "utf-8",
+  "headers": {
+    "Date": "Wed, 21 Jul 2021 22:34:50 GMT",
+    "Content-Type": "application/json",
+    "Content-Length": "429",
+    "Connection": "keep-alive",
+    "Server": "gunicorn/19.9.0",
+    "Access-Control-Allow-Origin": "*",
+    "Access-Control-Allow-Credentials": "true"
+  },
+  "request": {
+    "body": "PH%2y",
+    "headers": {
+      "User-Agent": "python-requests/2.26.0",
+      "Accept-Encoding": "gzip, deflate",
+      "Accept": "*/*",
+      "Connection": "keep-alive"
+    },
+    "method": "GET",
+    "url": "https://httpbin.org/json"
+  },
+  "raw": {
+    "decode_content": false,
+    "reason": "OK",
+    "status": 200,
+    "version": 11
+  }
+}
diff --git a/docs/sample_data/sample_response_json.yaml b/docs/sample_data/sample_response_json.yaml
new file mode 100644
index 0000000..4949e31
--- /dev/null
+++ b/docs/sample_data/sample_response_json.yaml
@@ -0,0 +1,42 @@
+url: https://httpbin.org/json
+status_code: 200
+reason: OK
+_decoded_content:
+  slideshow:
+    author: Yours Truly
+    date: date of publication
+    slides:
+    - title: Wake up to WonderWidgets!
+      type: all
+    - items:
+      - Why <em>WonderWidgets</em> are great
+      - Who <em>buys</em> WonderWidgets
+      title: Overview
+      type: all
+    title: Sample Slide Show
+created_at: '2021-07-21T22:32:17.592974'
+elapsed: 0.187586
+encoding: utf-8
+headers:
+  Access-Control-Allow-Credentials: 'true'
+  Access-Control-Allow-Origin: '*'
+  Connection: keep-alive
+  Content-Length: '308'
+  Content-Type: application/json
+  Date: Wed, 21 Jul 2021 22:32:17 GMT
+  Server: gunicorn/19.9.0
+request:
+  method: GET
+  url: https://httpbin.org/json
+  body: !!binary |
+    Tm9uZQ==
+  headers:
+    Accept: '*/*'
+    Accept-Encoding: gzip, deflate
+    Connection: keep-alive
+    User-Agent: python-requests/2.26.0
+raw:
+  decode_content: false
+  reason: OK
+  status: 200
+  version: 11
diff --git a/docs/session.rst b/docs/session.rst
deleted file mode 100644
index efbf260..0000000
--- a/docs/session.rst
+++ /dev/null
@@ -1,3 +0,0 @@
-Session
-=======
-.. automodule:: requests_cache.session
diff --git a/docs/user_guide.md b/docs/user_guide.md
index c36c1e3..240b496 100644
--- a/docs/user_guide.md
+++ b/docs/user_guide.md
@@ -2,21 +2,21 @@
 # {fa}`book` User Guide
 This section covers the main features of requests-cache.
 
+## Basics
 ```{toctree}
-:caption: The Basics
 :maxdepth: 2
 
 user_guide/installation
 user_guide/general
+user_guide/backends
 user_guide/files
 user_guide/troubleshooting
 ```
 
+## Advanced Features & Options
 ```{toctree}
-:caption: Features & Options
 :maxdepth: 2
 
-user_guide/backends
 user_guide/filtering
 user_guide/headers
 user_guide/inspection
diff --git a/docs/user_guide/advanced_requests.md b/docs/user_guide/advanced_requests.md
index 1a18939..2a3c185 100644
--- a/docs/user_guide/advanced_requests.md
+++ b/docs/user_guide/advanced_requests.md
@@ -1,4 +1,4 @@
-# {fa}`info-circle` Advanced Requests
+# {fas}`info-circle` Advanced Requests
 Following are some tips on using requests-cache with some of the more
 [advanced features](https://docs.python-requests.org/en/latest/user/advanced/) of the requests
 library.
@@ -8,13 +8,16 @@ Requests has an [event hook](https://requests.readthedocs.io/en/master/user/adva
 system that can be used to add custom behavior into different parts of the request process.
 It can be used, for example, for request throttling:
 
-:::{admonition} Example code
-:class: toggle
+:::{dropdown} Example
+:animate: fade-in-slide-down
+:color: primary
+:icon: file-code
+
 ```python
 >>> import time
 >>> import requests
 >>> from requests_cache import CachedSession
->>>
+
 >>> def make_throttle_hook(timeout=1.0):
 >>>     """Make a request hook function that adds a custom delay for non-cached requests"""
 >>>     def hook(response, *args, **kwargs):
@@ -23,12 +26,12 @@ It can be used, for example, for request throttling:
 >>>             time.sleep(timeout)
 >>>         return response
 >>>     return hook
->>>
+
 >>> session = CachedSession()
 >>> session.hooks['response'].append(make_throttle_hook(0.1))
 >>> # The first (real) request will have an added delay
->>> session.get('http://httpbin.org/get')
->>> session.get('http://httpbin.org/get')
+>>> session.get('https://httpbin.org/get')
+>>> session.get('https://httpbin.org/get')
 ```
 :::
 
@@ -38,11 +41,14 @@ can use the same code to iterate over both cached and non-cached requests. Cache
 will have already been read (i.e., consumed), but will be available for re-reading so it behaves like
 the original streamed response:
 
-:::{admonition} Example code
-:class: toggle
+:::{dropdown} Example
+:animate: fade-in-slide-down
+:color: primary
+:icon: file-code
+
 ```python
 >>> from requests_cache import CachedSession
->>>
+
 >>> session = CachedSession()
 >>> for i in range(2):
 ...     response = session.get('https://httpbin.org/stream/20', stream=True)
diff --git a/docs/user_guide/backends.md b/docs/user_guide/backends.md
index afd9baf..8eb06a5 100644
--- a/docs/user_guide/backends.md
+++ b/docs/user_guide/backends.md
@@ -1,34 +1,47 @@
 (backends)=
-# {fa}`database` Backends
-![](../_static/sqlite_32px.png)
-![](../_static/redis_32px.png)
-![](../_static/mongodb_32px.png)
-![](../_static/dynamodb_32px.png)
-![](../_static/files-json_32px.png)
-
-Several cache backends are included. The default is SQLite, since it's generally the simplest to
-use, and requires no extra dependencies or configuration.
-
-See {py:mod}`.requests_cache.backends` for usage details for specific backends.
-
-```{note}
-In the rare case that SQLite is not available
-(for example, [on Heroku](https://devcenter.heroku.com/articles/sqlite3)), a non-persistent
-in-memory cache is used by default.
+# {fas}`database` Backends
+This page contains general information about the cache backends supported by requests-cache.
+
+The default backend is SQLite, since it requires no extra dependencies or configuration, and has
+great all-around performance for the most common use cases.
+
+Here is a full list of backends available, and any extra dependencies required:
+
+Backend                                               | Class                      | Alias          | Dependencies
+------------------------------------------------------|----------------------------|----------------|----------------------------------------------------------
+![](../_static/sqlite_32px.png)     {ref}`sqlite`     | {py:class}`.SQLiteCache`   | `'sqlite'`     |
+![](../_static/redis_32px.png)      {ref}`redis`      | {py:class}`.RedisCache`    | `'redis'`      | [redis-py](https://github.com/andymccurdy/redis-py)
+![](../_static/mongodb_32px.png)    {ref}`mongodb`    | {py:class}`.MongoCache`    | `'mongodb'`    | [pymongo](https://github.com/mongodb/mongo-python-driver)
+![](../_static/mongodb_32px.png)    {ref}`gridfs`     | {py:class}`.GridFSCache`   | `'gridfs'`     | [pymongo](https://github.com/mongodb/mongo-python-driver)
+![](../_static/dynamodb_32px.png)   {ref}`dynamodb`   | {py:class}`.DynamoDbCache` | `'dynamodb'`   | [boto3](https://github.com/boto/boto3)
+![](../_static/files-json_32px.png) {ref}`filesystem` | {py:class}`.FileCache`     | `'filesystem'` |
+![](../_static/memory_32px.png) Memory                | {py:class}`.BaseCache`     | `'memory'`     |
+
+<!-- Hidden ToC tree to add pages to sidebar ToC -->
+```{toctree}
+:hidden:
+:glob: true
+
+backends/*
 ```
 
-## Backend Dependencies
-Most of the other backends require some extra dependencies, listed below.
-
-Backend                                                | Class                      | Alias          | Dependencies
--------------------------------------------------------|----------------------------|----------------|-------------
-[SQLite](https://www.sqlite.org)                       | {py:class}`.SQLiteCache`   | `'sqlite'`     |
-[Redis](https://redis.io)                              | {py:class}`.RedisCache`    | `'redis'`      | [redis-py](https://github.com/andymccurdy/redis-py)
-[MongoDB](https://www.mongodb.com)                     | {py:class}`.MongoCache`    | `'mongodb'`    | [pymongo](https://github.com/mongodb/mongo-python-driver)
-[GridFS](https://docs.mongodb.com/manual/core/gridfs/) | {py:class}`.GridFSCache`   | `'gridfs'`     | [pymongo](https://github.com/mongodb/mongo-python-driver)
-[DynamoDB](https://aws.amazon.com/dynamodb)            | {py:class}`.DynamoDbCache` | `'dynamodb'`   | [boto3](https://github.com/boto/boto3)
-Filesystem                                             | {py:class}`.FileCache`     | `'filesystem'` |
-Memory                                                 | {py:class}`.BaseCache`     | `'memory'`     |
+## Choosing a Backend
+Here are some general notes on choosing a backend:
+* All of the backends perform well enough that they usually won't become a bottleneck until you
+  start hitting around **700-1000 requests per second**
+* It's recommended to start with SQLite until you have a specific reason to switch
+* If/when you encounter limitations with SQLite, the next logical choice is usually Redis
+* Each backend has some unique features that make them well suited for specific use cases; see
+  individual backend docs for more details
+
+Here are some specific situations where you may want to choose one of the other backends:
+* Your application is distributed across multiple machines, without access to a common filesystem
+* Your application will make large volumes of concurrent writes (i.e., many nodes/threads/processes caching many different URLs)
+* Your application environment only has slower file storage options (like a magnetic drive, or NFS with high latency)
+* Your application environment has little or no local storage (like some cloud computing services)
+* Your application is already using one of the other backends
+* You want to reuse your cached response data outside of requests-cache
+* You want to use a specific feature available in one of the other backends
 
 ## Specifying a Backend
 You can specify which backend to use with the `backend` parameter for either {py:class}`.CachedSession`
@@ -37,7 +50,7 @@ or {py:func}`.install_cache`. You can specify one by name, using the aliases lis
 >>> session = CachedSession('my_cache', backend='redis')
 ```
 
-Or by instance:
+Or by instance, which is preferable if you want to pass additional backend-specific options:
 ```python
 >>> backend = RedisCache(host='192.168.1.63', port=6379)
 >>> session = CachedSession('my_cache', backend=backend)
@@ -55,24 +68,23 @@ DynamoDB        | Table name
 Filesystem      | Cache directory
 
 Each backend class also accepts optional parameters for the underlying connection. For example,
-{py:class}`.SQLiteCache` accepts parameters for {py:func}`sqlite3.connect`:
-```python
->>> session = CachedSession('my_cache', backend='sqlite', timeout=30)
-```
+the {ref}`sqlite` backend accepts parameters for {py:func}`sqlite3.connect`.
 
 ## Testing Backends
 If you just want to quickly try out all of the available backends for comparison,
 [docker-compose](https://docs.docker.com/compose/) config is included for all supported services.
 First, [install docker](https://docs.docker.com/get-docker/) if you haven't already. Then, run:
 
-:::{tab} Bash (Linux/macOS)
+::::{tab-set}
+
+:::{tab-item} Bash (Linux/macOS)
 ```bash
 pip install -U requests-cache[all] docker-compose
 curl https://raw.githubusercontent.com/requests-cache/requests-cache/main/docker-compose.yml -O docker-compose.yml
 docker-compose up -d
 ```
 :::
-:::{tab} Powershell (Windows)
+:::{tab-item} Powershell (Windows)
 ```ps1
 pip install -U requests-cache[all] docker-compose
 Invoke-WebRequest -Uri https://raw.githubusercontent.com/requests-cache/requests-cache/main/docker-compose.yml -Outfile docker-compose.yml
@@ -80,6 +92,8 @@ docker-compose up -d
 ```
 :::
 
+::::
+
 (exporting)=
 ## Exporting To A Different Backend
 If you have cached data that you want to copy or migrate to a different backend, you can do this
@@ -107,8 +121,11 @@ Or, using backend classes directly:
 ## Custom Backends
 If the built-in backends don't suit your needs, you can create your own by making subclasses of {py:class}`.BaseCache` and {py:class}`.BaseStorage`:
 
-:::{admonition} Example code
-:class: toggle
+:::{dropdown} Example
+:animate: fade-in-slide-down
+:color: primary
+:icon: file-code
+
 ```python
 >>> from requests_cache import CachedSession
 >>> from requests_cache.backends import BaseCache, BaseStorage
diff --git a/docs/user_guide/backends/dynamodb.md b/docs/user_guide/backends/dynamodb.md
new file mode 100644
index 0000000..85192ac
--- /dev/null
+++ b/docs/user_guide/backends/dynamodb.md
@@ -0,0 +1,134 @@
+(dynamodb)=
+# DynamoDB
+```{image} ../../_static/dynamodb.png
+```
+
+[DynamoDB](https://aws.amazon.com/dynamodb) is a fully managed, highly scalable NoSQL document
+database hosted on [Amazon Web Services](https://aws.amazon.com).
+
+## Use Cases
+In terms of features, DynamoDB is roughly comparable to MongoDB and other NoSQL databases. Since
+it's a managed service, no server setup or maintenance is required, and it's very convenient to use
+if your application is already on AWS. It is an especially good fit for serverless applications
+running on [AWS Lambda](https://aws.amazon.com/lambda).
+
+```{warning}
+DynamoDB item sizes are limited to 400KB. If you need to cache larger responses, consider
+using a different backend.
+```
+
+## Usage Example
+Initialize with a {py:class}`.DynamoDbCache` instance:
+```python
+>>> from requests_cache import CachedSession, DynamoDbCache
+>>> session = CachedSession(backend=DynamoDbCache())
+```
+
+Or by alias:
+```python
+>>> session = CachedSession(backend='dynamodb')
+```
+
+## Connection Options
+This backend accepts any keyword arguments for {py:meth}`boto3.session.Session.resource`:
+```python
+>>> backend = DynamoDbCache(region_name='us-west-2')
+>>> session = CachedSession(backend=backend)
+```
+
+## Viewing Responses
+By default, responses are only partially serialized so they can be saved as plain DynamoDB
+documents. Response data can then be easily viewed via the
+[AWS Console](https://aws.amazon.com/console/).
+
+Here is an example of responses listed under **DynamoDB > Tables > Explore Items:**
+:::{dropdown} Screenshot
+:animate: fade-in-slide-down
+:color: primary
+:icon: file-media
+
+```{image} ../../_static/dynamodb_items.png
+```
+:::
+
+And here is an example response:
+:::{dropdown} Screenshot
+:animate: fade-in-slide-down
+:color: primary
+:icon: file-media
+
+```{image} ../../_static/dynamodb_response.png
+```
+:::
+
+It is also possible query these responses with the [AWS CLI](https://aws.amazon.com/cli), for
+example:
+```bash
+aws dynamodb query --table-name http_cache > responses.json
+```
+
+```bash
+aws dynamodb query \
+    --table-name http_cache \
+    --key-condition-expression "namespace = :n1" \
+    --expression-attribute-values '{":n1": {"S": "responses"}}' \
+    > responses.json
+```
+
+## Expiration
+DynamoDB natively supports TTL on a per-item basis, and can automatically remove expired responses from
+the cache. This will be set by by default, according to normal {ref}`expiration settings <expiration>`.
+
+```{warning}
+DynamoDB does not remove expired items immediately. See
+[How It Works: DynamoDB Time to Live](https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/howitworks-ttl.html)
+for more details.
+```
+
+If needed, you can disable this behavior with the `ttl` argument:
+```python
+>>> backend = DynamoDbCache(ttl=False)
+```
+
+## Creating a Table
+A table will be automatically created if one doesn't already exist. This is convienient if you just
+want to quickly test out DynamoDB as a cache backend, but in a production environment you will
+likely want to create the tables yourself, for example with
+[CloudFormation](https://aws.amazon.com/cloudformation/) or [Terraform](https://www.terraform.io/).
+
+You just need a table with a single partition key. A `value` attribute (containing response data)
+will be created dynamically once items are added to the table.
+- Table: `http_cache` (or any other name, as long as it matches the `table_name` parameter for `DynamoDbCache`)
+- Attributes:
+  - `key`: String
+- Keys:
+  - Partition key (aka hash key): `key`
+
+Example of manually creating a table in the console:
+:::{dropdown} Screenshot
+:animate: fade-in-slide-down
+:color: primary
+:icon: file-media
+
+```{image} ../../_static/dynamodb_create_table.png
+```
+:::
+
+### Example CloudFormation Template
+:::{dropdown} Example
+:animate: fade-in-slide-down
+:color: primary
+:icon: file-code
+
+[cloudformation.yml](https://github.com/requests-cache/requests-cache/blob/main/examples/cloudformation.yml)
+```{literalinclude} ../../../examples/cloudformation.yml
+:language: yaml
+```
+:::
+
+To deploy with the [AWS CLI](https://aws.amazon.com/cli):
+```
+aws cloudformation deploy \
+    --stack-name requests-cache \
+    --template-file examples/cloudformation.yml
+```
diff --git a/docs/user_guide/backends/filesystem.md b/docs/user_guide/backends/filesystem.md
new file mode 100644
index 0000000..241dc24
--- /dev/null
+++ b/docs/user_guide/backends/filesystem.md
@@ -0,0 +1,50 @@
+(filesystem)=
+# Filesystem
+```{image} ../../_static/files-generic.png
+```
+
+This backend stores responses in files on the local filesystem, with one file per response.
+
+## Use Cases
+This backend is useful if you would like to use your cached response data outside of requests-cache,
+for example:
+
+- Manually viewing cached responses without the need for extra tools (e.g., with a simple text editor)
+- Using cached responses as sample data for automated tests
+- Reading cached responses directly from another application or library, without depending on requests-cache
+
+## Usage Example
+Initialize with a {py:class}`.FileCache` instance:
+```python
+>>> from requests_cache import CachedSession, FileCache
+>>> session = CachedSession(backend=FileCache())
+```
+
+Or by alias:
+```python
+>>> session = CachedSession(backend='filesystem')
+```
+
+## File Formats
+By default, responses are saved as JSON files. If you prefer a different format, you can use of the
+other available {ref}`serializers` or provide your own. For example, to save responses as
+YAML files (requires `pyyaml`):
+```python
+>>> session = CachedSession('~/http_cache', backend='filesystem', serializer='yaml')
+>>> session.get('https://httpbin.org/get')
+```
+
+## Cache Files
+- See {ref}`files` for general info on specifying cache paths
+- The path for a given response will be in the format `<cache_name>/<cache_key>`
+- Redirects are stored in a separate SQLite database, located at `<cache_name>/redirects.sqlite`
+- Use {py:meth}`.FileCache.paths` to get a list of all cached response paths:
+```python
+>>> print(list(session.cache.paths()))
+> ['/home/user/http_cache/4dc151d95200ec.yaml']
+```
+
+## Performance and Limitations
+- Write performance will vary based on the serializer used, in the range of roughly 1-3ms per write.
+- This backend stores response files in a single directory, and does not currently implement fan-out. This means that on most filesystems, storing a very large number of responses will result in reduced performance.
+- This backend currently uses a simple threading lock rather than a file lock system, so it is not an ideal choice for highly parallel applications.
diff --git a/docs/user_guide/backends/gridfs.md b/docs/user_guide/backends/gridfs.md
new file mode 100644
index 0000000..568d5b9
--- /dev/null
+++ b/docs/user_guide/backends/gridfs.md
@@ -0,0 +1,23 @@
+(gridfs)=
+# GridFS
+```{image} ../../_static/mongodb.png
+```
+
+[GridFS](https://docs.mongodb.com/manual/core/gridfs/) is a specification for storing large files
+in MongoDB.
+
+## Use Cases
+Use this backend if you are using MongoDB and expect to store responses **larger than 16MB**. See
+{py:mod}`~requests_cache.backends.mongodb` for more general info.
+
+## Usage Example
+Initialize with a {py:class}`.GridFSCache` instance:
+```python
+>>> from requests_cache import CachedSession, GridFSCache
+>>> session = CachedSession(backend=GridFSCache())
+```
+
+Or by alias:
+```python
+>>> session = CachedSession(backend='gridfs')
+```
diff --git a/docs/user_guide/backends/mongodb.md b/docs/user_guide/backends/mongodb.md
new file mode 100644
index 0000000..3561f01
--- /dev/null
+++ b/docs/user_guide/backends/mongodb.md
@@ -0,0 +1,103 @@
+(mongodb)=
+# MongoDB
+```{image} ../../_static/mongodb.png
+```
+
+[MongoDB](https://www.mongodb.com) is a NoSQL document database. It stores data in collections
+of documents, which are more flexible and less strictly structured than tables in a relational
+database.
+
+## Use Cases
+MongoDB scales well and is a good option for larger applications. For raw caching performance, it is
+not quite as fast as {py:mod}`~requests_cache.backends.redis`, but may be preferable if you already
+have an instance running, or if it has a specific feature you want to use. See sections below for
+some relevant examples.
+
+## Usage Example
+Initialize with a {py:class}`.MongoCache` instance:
+```python
+>>> from requests_cache import CachedSession, MongoCache
+>>> session = CachedSession(backend=MongoCache())
+```
+
+Or by alias:
+```python
+>>> session = CachedSession(backend='mongodb')
+```
+
+## Connection Options
+This backend accepts any keyword arguments for {py:class}`pymongo.mongo_client.MongoClient`:
+```python
+>>> backend = MongoCache(host='192.168.1.63', port=27017)
+>>> session = CachedSession('http_cache', backend=backend)
+```
+
+## Viewing Responses
+By default, responses are only partially serialized so they can be saved as plain MongoDB documents.
+Response data can be easily viewed via the
+[MongoDB shell](https://www.mongodb.com/docs/mongodb-shell/#mongodb-binary-bin.mongosh),
+[Compass](https://www.mongodb.com/products/compass), or any other interface for MongoDB.
+
+Here is an example response viewed in
+[MongoDB for VSCode](https://code.visualstudio.com/docs/azure/mongodb):
+
+:::{dropdown} Screenshot
+:animate: fade-in-slide-down
+:color: primary
+:icon: file-media
+```{image} ../../_static/mongodb_vscode.png
+```
+:::
+
+## Expiration
+MongoDB [natively supports TTL](https://www.mongodb.com/docs/v4.0/core/index-ttl), and can
+automatically remove expired responses from the cache.
+
+**Notes:**
+- TTL is set for a whole collection, and cannot be set on a per-document basis.
+- It will persist until explicitly removed or overwritten, or if the collection is deleted.
+- Expired items are
+  [not guaranteed to be removed immediately](https://www.mongodb.com/docs/v4.0/core/index-ttl/#timing-of-the-delete-operation).
+  Typically it happens within 60 seconds.
+- If you want, you can rely entirely on MongoDB TTL instead of requests-cache
+  {ref}`expiration settings <expiration>`.
+- Or you can set both values, to be certain that you don't get an expired response before MongoDB
+  removes it.
+- If you intend to reuse expired responses, e.g. with {ref}`conditional-requests` or `stale_if_error`,
+  you can set TTL to a larger value than your session `expire_after`, or disable it altogether.
+
+**Examples:**
+Create a TTL index:
+```python
+>>> backend = MongoCache()
+>>> backend.set_ttl(3600)
+```
+
+Overwrite it with a new value:
+```python
+>>> backend = MongoCache()
+>>> backend.set_ttl(timedelta(days=1), overwrite=True)
+```
+
+Remove the TTL index:
+```python
+>>> backend = MongoCache()
+>>> backend.set_ttl(None, overwrite=True)
+```
+
+Use both MongoDB TTL and requests-cache expiration:
+```python
+>>> ttl = timedelta(days=1)
+>>> backend = MongoCache()
+>>> backend.set_ttl(ttl)
+>>> session = CachedSession(backend=backend, expire_after=ttl)
+```
+
+**Recommended:** Set MongoDB TTL to a longer value than your {py:class}`.CachedSession` expiration.
+This allows expired responses to be eventually cleaned up, but still be reused for conditional
+requests for some period of time:
+```python
+>>> backend = MongoCache()
+>>> backend.set_ttl(timedelta(days=7))
+>>> session = CachedSession(backend=backend, expire_after=timedelta(days=1))
+```
diff --git a/docs/user_guide/backends/redis.md b/docs/user_guide/backends/redis.md
new file mode 100644
index 0000000..56df69a
--- /dev/null
+++ b/docs/user_guide/backends/redis.md
@@ -0,0 +1,73 @@
+(redis)=
+# Redis
+```{image} ../../_static/redis.png
+```
+
+[Redis](https://redis.io) is an in-memory data store with on-disk persistence.
+
+## Use Cases
+Redis offers a high-performace cache that scales exceptionally well, making it an ideal choice for
+larger applications, especially those that make a large volume of concurrent requests.
+
+## Usage Example
+Initialize your session with a {py:class}`.RedisCache` instance:
+```python
+>>> from requests_cache import CachedSession, RedisCache
+>>> session = CachedSession(backend=RedisCache())
+```
+
+Or by alias:
+```python
+>>> session = CachedSession(backend='redis')
+```
+
+## Connection Options
+This backend accepts any keyword arguments for {py:class}`redis.client.Redis`:
+```python
+>>> backend = RedisCache(host='192.168.1.63', port=6379)
+>>> session = CachedSession('http_cache', backend=backend)
+```
+
+Or you can pass an existing `Redis` object:
+```python
+>>> from redis import Redis
+
+>>> connection = Redis(host='192.168.1.63', port=6379)
+>>> backend = RedisCache(connection=connection))
+>>> session = CachedSession('http_cache', backend=backend)
+```
+
+## Persistence
+Redis operates on data in memory, and by default also persists data to snapshots on disk. This is
+optimized for performance, with a minor risk of data loss, and is usually the best configuration
+for a cache. If you need different behavior, the frequency and type of persistence can be customized
+or disabled entirely. See [Redis Persistence](https://redis.io/topics/persistence) for details.
+
+## Expiration
+Redis natively supports TTL on a per-key basis, and can automatically remove expired responses from
+the cache. This will be set by by default, according to normal {ref}`expiration settings <expiration>`.
+See [Redis: EXPIRE](https://redis.io/commands/expire/) docs for more details on internal TTL behavior.
+
+If you intend to reuse expired responses, e.g. with {ref}`conditional-requests` or `stale_if_error`,
+you can use the `ttl_offset` argument to add additional time before deletion (default: 1 hour).
+In other words, this makes backend expiration longer than cache expiration:
+```python
+>>> backend = RedisCache(ttl_offset=3600)
+```
+
+Alternatively, you can disable TTL completely with the `ttl` argument:
+```python
+>>> backend = RedisCache(ttl=False)
+```
+
+## Redislite
+If you can't easily set up your own Redis server, another option is
+[redislite](https://github.com/yahoo/redislite). It contains its own lightweight, embedded Redis
+database, and can be used as a drop-in replacement for redis-py. Usage example:
+```python
+>>> from redislite import Redis
+>>> from requests_cache import CachedSession, RedisCache
+
+>>> backend = RedisCache(connection=Redis())
+>>> session = CachedSession(backend=backend)
+```
diff --git a/docs/user_guide/backends/sqlite.md b/docs/user_guide/backends/sqlite.md
new file mode 100644
index 0000000..c4e0744
--- /dev/null
+++ b/docs/user_guide/backends/sqlite.md
@@ -0,0 +1,84 @@
+(sqlite)=
+# SQLite
+```{image} ../../_static/sqlite.png
+```
+[SQLite](https://www.sqlite.org/) is a fast and lightweight SQL database engine that stores data
+either in memory or in a single file on disk.
+
+## Use Cases
+Despite its simplicity, SQLite is a powerful tool. For example, it's the primary storage system for
+a number of common applications including Firefox, Chrome, and many components of both Android and
+iOS. It's well suited for caching, and requires no extra configuration or dependencies, which is why
+it's 'used as the default backend for requests-cache.
+
+## Usage Example
+SQLite is the default backend, but if you want to pass extra connection options or just want to be
+explicit, initialize your session with a {py:class}`.SQLiteCache` instance:
+```python
+>>> from requests_cache import CachedSession, SQLiteCache
+>>> session = CachedSession(backend=SQLiteCache())
+```
+
+Or by alias:
+```python
+>>> session = CachedSession(backend='sqlite')
+```
+
+## Connection Options
+This backend accepts any keyword arguments for {py:func}`sqlite3.connect`:
+```python
+>>> backend = SQLiteCache('http_cache', timeout=30)
+>>> session = CachedSession(backend=backend)
+```
+
+## Cache Files
+- See {ref}`files` for general info on specifying cache paths
+- If you specify a name without an extension, the default extension `.sqlite` will be used
+
+### In-Memory Caching
+SQLite also supports [in-memory databases](https://www.sqlite.org/inmemorydb.html).
+You can enable this (in "shared" memory mode) with the `use_memory` option:
+```python
+>>> session = CachedSession('http_cache', use_memory=True)
+```
+
+Or specify a memory URI with additional options:
+```python
+>>> session = CachedSession(':file:memdb1?mode=memory')
+```
+
+Or just `:memory:`, if you are only using the cache from a single thread:
+```python
+>>> session = CachedSession(':memory:')
+```
+
+## Performance
+When working with average-sized HTTP responses (\< 1MB) and using a modern SSD for file storage, you
+can expect speeds of around:
+- Write: 2-8ms
+- Read: 0.2-0.6ms
+
+Of course, this will vary based on hardware specs, response size, and other factors.
+
+## Concurrency
+SQLite supports concurrent access, so it is safe to use from a multi-threaded and/or multi-process
+application. It supports unlimited concurrent reads. Writes, however, are queued and run in serial,
+so if you need to make large volumes of concurrent requests, you may want to consider a different
+backend that's specifically made for that kind of workload, like {py:class}`.RedisCache`.
+
+## Hosting Services and Filesystem Compatibility
+There are some caveats to using SQLite with some hosting services, based on what kind of storage is
+available:
+
+- NFS:
+  - SQLite may be used on a NFS, but is usually only safe to use from a single process at a time.
+    See the [SQLite FAQ](https://www.sqlite.org/faq.html#q5) for details.
+  - PythonAnywhere is one example of a host that uses NFS-backed storage. Using SQLite from a
+    multiprocess application will likely result in `sqlite3.OperationalError: database is locked`.
+- Ephemeral storage:
+  - Heroku [explicitly disables SQLite](https://devcenter.heroku.com/articles/sqlite3) on its dynos.
+  - AWS [EC2](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/InstanceStorage.html),
+    [Lambda (depending on configuration)](https://aws.amazon.com/blogs/compute/choosing-between-aws-lambda-data-storage-options-in-web-apps/),
+    and some other AWS services use ephemeral storage that only persists for the lifetime of the
+    instance. This is fine for short-term caching. For longer-term persistance, you can use an
+    [attached EBS volume](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ebs-attaching-volume.html).
diff --git a/docs/user_guide/compatibility.md b/docs/user_guide/compatibility.md
index de73b6e..3ecb809 100644
--- a/docs/user_guide/compatibility.md
+++ b/docs/user_guide/compatibility.md
@@ -1,6 +1,5 @@
-<!-- TODO: Fix relative links -->
 (compatibility)=
-# {fa}`plus-square` Usage with other requests-based libraries
+# {fas}`puzzle-piece` Compatibility with other libraries
 This library works by patching and/or extending {py:class}`requests.Session`. Many other libraries
 out there do the same thing, making it potentially difficult to combine them.
 
@@ -47,11 +46,11 @@ support wrapping an existing session object:
 >>> from requests_cache import CachedSession
 >>> from requests_futures.sessions import FuturesSession
 
->>> session = FutureSession(session=CachedSession())
+>>> session = FuturesSession(session=CachedSession())
 ```
 
-In this case, `FutureSession` must wrap `CachedSession` rather than the other way around, since
-`FutureSession` returns (as you might expect) futures rather than response objects.
+In this case, `FuturesSession` must wrap `CachedSession` rather than the other way around, since
+`FuturesSession` returns (as you might expect) futures rather than response objects.
 See [issue #135](https://github.com/requests-cache/requests-cache/issues/135) for more notes on this.
 
 ## Requests-OAuthlib
@@ -86,10 +85,11 @@ _after_ caching, you get the added benefit of not counting cache hits against yo
 >>> # Limit non-cached requests to 5 requests per second, with unlimited cached requests
 >>> # Optionally use Redis as both the bucket backend and the cache backend
 >>> session = CachedLimiterSession(
-...     rates=RequestRate(5, Duration.SECOND),
+...     per_second=5,
 ...     bucket_class=RedisBucket,
 ...     backend=RedisCache(),
 ... )
+
 ```
 
 ## Internet Archive
@@ -118,8 +118,11 @@ For example, if you are using {py:func}`.install_cache` in your application and
 requests-mock [pytest fixture](https://requests-mock.readthedocs.io/en/latest/pytest.html) in your
 tests, you could wrap it in another fixture that uses {py:func}`.uninstall_cache` or
 {py:func}`.disabled`:
-:::{admonition} Example: test_requests_mock_disable_cache.py
-:class: toggle
+:::{dropdown} Example
+:animate: fade-in-slide-down
+:color: primary
+:icon: file-code
+
 ```{literalinclude} ../../tests/compat/test_requests_mock_disable_cache.py
 ```
 :::
@@ -142,8 +145,12 @@ Or if you use a `CachedSession` object, you could replace it with a regular `Ses
 If you want both caching and mocking features at the same time, you can attach requests-mock's
 [adapter](https://requests-mock.readthedocs.io/en/latest/adapter.html) to a `CachedSession`:
 
-:::{admonition} Example: test_requests_mock_combine_cache.py
-:class: toggle
+:::{dropdown} Example
+:animate: fade-in-slide-down
+:color: primary
+:icon: file-code
+
+`test_requests_mock_combine_cache.py`
 ```{literalinclude} ../../tests/compat/test_requests_mock_combine_cache.py
 ```
 :::
@@ -158,8 +165,12 @@ This has the advantage of only using request-mock's behavior for
 ```
 
 To turn that into a complete example:
-:::{admonition} Example: test_requests_mock_load_cache.py
-:class: toggle
+:::{dropdown} Example
+:animate: fade-in-slide-down
+:color: primary
+:icon: file-code
+
+`test_requests_mock_load_cache.py`
 ```{literalinclude} ../../tests/compat/test_requests_mock_load_cache.py
 ```
 :::
@@ -168,8 +179,28 @@ To turn that into a complete example:
 Usage with the [responses](https://github.com/getsentry/responses) library is similar to the
 requests-mock examples above.
 
-:::{admonition} Example: test_responses_load_cache.py
-:class: toggle
+:::{dropdown} Example
+:animate: fade-in-slide-down
+:color: primary
+:icon: file-code
+
+`test_responses_load_cache.py`
 ```{literalinclude} ../../tests/compat/test_responses_load_cache.py
 ```
 :::
+
+## VCR
+If you would like to reuse your cached response data for unit tests, one option is to convert your
+cache into a format compatible with VCR-vased libraries like
+[vcrpy](https://github.com/kevin1024/vcrpy) and [betamax](https://github.com/betamaxpy/betamax).
+
+:::{dropdown} Example
+:animate: fade-in-slide-down
+:color: primary
+:icon: file-code
+
+`vcr.py`
+```{literalinclude} ../../examples/vcr.py
+:lines: 7-
+```
+:::
diff --git a/docs/user_guide/expiration.md b/docs/user_guide/expiration.md
index 8d1c95a..9b6b50b 100644
--- a/docs/user_guide/expiration.md
+++ b/docs/user_guide/expiration.md
@@ -1,5 +1,5 @@
 (expiration)=
-# {fa}`clock` Expiration
+# {fas}`clock` Expiration
 By default, cached responses will be stored indefinitely. There are a number of options for
 specifying how long to store responses, either with a single expiration value, glob patterns,
 or {ref}`cache headers <headers>`.
@@ -20,52 +20,73 @@ request, the following order of precedence is used:
 2. Cache-Control request headers
 3. Per-request expiration (`expire_after` argument for {py:meth}`.CachedSession.request`)
 4. Per-URL expiration (`urls_expire_after` argument for {py:class}`.CachedSession`)
-5. Per-session expiration (`expire_after` argument for {py:class}`.CacheBackend`)
+5. Per-session expiration (`expire_after` argument for {py:class}`.CachedSession`)
 
 ## Expiration Values
-`expire_after` can be any of the following:
-- `-1` (to never expire)
-- `0` (to "expire immediately," e.g. bypass the cache)
+`expire_after` can be any of the following time values:
 - A positive number (in seconds)
 - A {py:class}`~datetime.timedelta`
 - A {py:class}`~datetime.datetime`
 
+Or one of the following special values:
+- `DO_NOT_CACHE`: Skip both reading from and writing to the cache
+- `EXPIRE_IMMEDIATELY`: Consider the response already expired, but potentially usable
+- `NEVER_EXPIRE`: Store responses indefinitely
+
+```{note}
+A value of 0 or `EXPIRE_IMMEDIATELY` will behave the same as
+[`Cache-Control: max-age=0`](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Cache-Control#response_directives).
+Depending on other settings and headers, an expired response may either be cached and require
+revalidation for each use, or not be cached at all. See {ref}`conditional-requests` for more details.
+```
+
 Examples:
 ```python
->>> # To specify a unit of time other than seconds, use a timedelta
 >>> from datetime import timedelta
+>>> from requests_cache import DO_NOT_CACHE, NEVER_EXPIRE, EXPIRE_IMMEDIATELY, CachedSession
+
+>>> # Specify a simple expiration value in seconds
+>>> session = CachedSession(expire_after=60)
+
+>>> # To specify a unit of time other than seconds, use a timedelta
 >>> session = CachedSession(expire_after=timedelta(days=30))
 
->>> # Update an existing session to disable expiration (i.e., store indefinitely)
->>> session.expire_after = -1
+>>> # Or expire on a specific date and time
+>>> session = CachedSession(expire_after=datetime(2023, 1, 1, 0, 0))
+
+>>> # Update an existing session to store new responses indefinitely
+>>> session.settings.expire_after = NEVER_EXPIRE
 
 >>> # Disable caching by default, unless enabled by other settings
->>> session = CachedSession(expire_after=0)
+>>> session = CachedSession(expire_after=DO_NOT_CACHE)
+
+>>> # Override for a single request: cache the response if it can be revalidated
+>>> session.request(expire_after=EXPIRE_IMMEDIATELY)
 ```
 
 (url-patterns)=
-## Expiration With URL Patterns
-You can use `urls_expire_after` to set different expiration values based on URL glob patterns.
-This allows you to customize caching based on what you know about the resources you're requesting
-or how you intend to use them. For example, you might request one resource that gets updated
-frequently, another that changes infrequently, and another that never changes. Example:
 ```python
 >>> urls_expire_after = {
 ...     '*.site_1.com': 30,
 ...     'site_2.com/resource_1': 60 * 2,
 ...     'site_2.com/resource_2': 60 * 60 * 24,
-...     'site_2.com/static': -1,
+...     re.compile(r'site_2.com/resource_\d'): 60 * 60 * 24 * 7,
+...     'site_2.com/resource_*': 60 * 60,
+...     'site_2.com/static': NEVER_EXPIRE,
 ... }
 >>> session = CachedSession(urls_expire_after=urls_expire_after)
 ```
 
 **Notes:**
-- `urls_expire_after` should be a dict in the format `{'pattern': expire_after}`
-- `expire_after` accepts the same types as `CachedSession.expire_after`
-- Patterns will match request **base URLs without the protocol**, so the pattern `site.com/resource/`
-  is equivalent to `http*://site.com/resource/**`
+- `urls_expire_after` should be a dict in the format `{pattern': expire_after}`
+- `expire_after` accepts the same types as `CachedSession.settings.expire_after`
+- **Glob patterns** will match request **base URLs without the protocol**, so the pattern `site.com/resource/`
+  is equivalent to `http*://site.com/resource/**`.
+  For **regex patterns**, the **whole URL** will be matched, so you _can_ put restrictions on the protocol, e.g.
+  `re.compile(r'https://site.com/.*')`.
 - If there is more than one match, the first match will be used in the order they are defined
-- If no patterns match a request, `CachedSession.expire_after` will be used as a default
+- If no patterns match a request, `CachedSession.settings.expire_after` will be used as a default
+- See {ref}`url-filtering` for an example of using `urls_expire_after` as an allowlist
 
 (request-errors)=
 ## Expiration and Error Handling
@@ -89,32 +110,166 @@ you get a 500. You will then get the expired cache data instead:
 True, True
 ```
 
+Similar to the header `Cache-Control: stale-if-error`, you may also pass time value representing the
+maximum staleness you are willing to accept:
+```python
+# If there is an error on refresh, use a cached response if it expired 5 minutes ago or less
+session = CachedSession(stale_if_error=timedelta(minutes=5))
+```
+
 In addition to HTTP error codes, `stale_if_error` also applies to python exceptions (typically a
 {py:exc}`~requests.RequestException`). See `requests` documentation on
 [Errors and Exceptions](https://2.python-requests.org/en/master/user/quickstart/#errors-and-exceptions)
 for more details on request errors in general.
 
-## Removing Expired Responses
-For better read performance, expired responses won't be removed immediately, but will be removed
-(or replaced) the next time they are requested.
-:::{tip}
-Implementing one or more cache eviction algorithms is being considered. If this is something you are
-interested in, please provide feedback via [issues](https://github.com/requests-cache/requests-cache/issues)!
-:::
+(stale-while-revalidate)=
+## Asynchronous Revalidation
+You can use the `stale_while_revalidate` option to improve performance when refreshing responses.
+This will cause an expired cached response to be returned initially, while a non-blocking request is
+sent to refresh the response for the next time it's requested.
+
+```{note}
+While the corresponding response header `Cache-Control: stale-while-revalidate` only applies to
+{ref}`conditional-requests`, requests-cache extends this behavior to other refresh requests as well
+(even if a validator is not available).
+```
+
+You may either set this to `True` to do this regardless of the cached response's age:
+```python
+session = CachedSession(stale_while_revalidate=True)
+```
+
+Or specify a maximum staleness value you are willing to accept:
+```python
+# Use a cached response while revalidating, if it expired 5 minutes ago or less
+session = CachedSession(stale_while_revalidate=timedelta(minutes=5))
+```
+
+## Removing Responses
+For better read performance, expired responses won't be removed immediately by default.
+Instead, they will be replaced the next time they are requested.
+
+You can manually delete responses according to various conditions, and some backends support
+automatic removal.
+
+(manual_removal)=
+### Manual Removal
+
+To delete **all** cached responses, use {py:meth}`.BaseCache.clear`:
+```python
+>>> session.cache.clear()
+```
+
+To delete expired responses, use {py:meth}`.BaseCache.delete`:
+```python
+>>> session.cache.delete(expired=True)
+```
 
-To manually clear all expired responses, use
-{py:meth}`.CachedSession.remove_expired_responses`:
+Or, if you have patched ``requests`` using {py:func}`.install_cache`:
 ```python
->>> session.remove_expired_responses()
+>>> import requests_cache
+>>> requests_cache.delete(expired=True)
 ```
 
-Or, when using patching:
+You can also remove responses older than a certain time:
 ```python
->>> requests_cache.remove_expired_responses()
+# Remove responses older than 7 days
+session.cache.delete(older_than=timedelta(days=7))
 ```
 
-You can also apply a different `expire_after` to previously cached responses, which will
-revalidate the cache with the new expiration time:
+Or apply a new expiration value to previously cached responses:
 ```python
->>> session.remove_expired_responses(expire_after=timedelta(days=30))
+# Reset expiration for all responses to 30 days from now
+>>> session.cache.reset_expiration(timedelta(days=30))
+```
+
+Finally, you can delete individual responses matching specific requests or
+{ref}`cache keys <custom-matching>`:
+```python
+>>> from requests import Request
+
+# Delete a simple GET request by URL
+>>> session.cache.delete(urls=['https://httpbin.org/json'])
+
+# Delete by additional request values
+>>> request_1 = Request('GET', 'https://httpbin.org/get', params={'key': 'value'})
+>>> request_2 = Request('GET', 'https://httpbin.org/get', headers={'header': 'value'})
+>>> session.cache.delete(requests=[request_1, request_2])
+
+# Delete by cache key
+>>> session.cache.delete('e25f7e6326966e82')
+```
+
+(ttl)=
+### Automatic Removal
+The following backends have native TTL support, which can be used to automatically remove expired
+responses:
+* {py:mod}`DynamoDB <requests_cache.backends.dynamodb>`
+* {py:mod}`MongoDB <requests_cache.backends.mongodb>`
+* {py:mod}`Redis <requests_cache.backends.redis>`
+
+## Request Options
+In addition to the base arguments for {py:func}`requests.request`, requests-cache adds some extra
+cache-related arguments. These apply to {py:meth}`.CachedSession.request`,
+{py:meth}`.CachedSession.send`, and all HTTP method-specific functions (`get()`, `post()`, etc.).
+
+### Per-Request Expiration
+The `expire_after` argument can be used to override the session's expiration for a single request.
+```python
+>>> session = CachedSession(expire_after=300)
+>>> # This request will be cached for 60 seconds, not 300
+>>> session.get('https://httpbin.org/get', expire_after=60)
+```
+
+### Manual Refresh
+If you want to manually refresh a response before it expires, you can use the `refresh` argument.
+
+* This is equivalent to **F5** in most browsers.
+* The response will be saved with a new expiration time, according to the normal expiration rules
+described above.
+* If possible, this will {ref}`revalidate <conditional-requests>` with the server to potentially
+  avoid re-downloading an unchanged response.
+* To force a refresh (e.g., skip revalidation and always send a new request), use the
+  `force_refresh` argument. This is equivalent to **Ctrl-F5** in most browsers.
+
+Example:
+```python
+>>> response_1 = session.get('https://httpbin.org/get')
+>>> response_2 = session.get('https://httpbin.org/get', refresh=True)
+>>> assert response_2.from_cache is False
+```
+
+### Validation-Only Requests
+If you want to always send a conditional request before using a cached response, you can use the
+session setting `always_revalidate`:
+```python
+>>> session = CachedSession(always_revalidate=True)
+```
+
+Unlike the `refresh` option, this only affects cached responses with a validator.
+
+### Cache-Only Requests
+If you want to only use cached responses without making any real requests, you can use the
+`only_if_cached` option. This essentially uses your cache in "offline mode". If a response isn't
+cached or is expired, you will get a `504 Not Cached` response instead.
+```python
+>>> session = CachedSession()
+>>> session.cache.clear()
+>>> response = session.get('https://httpbin.org/get', only_if_cached=True)
+>>> print(response.status_code)
+504
+>>> response.raise_for_status()
+HTTPError: 504 Server Error: Not Cached for url: https://httpbin.org/get
+```
+
+You can also combine this with `stale_if_error` to return cached responses even if they are expired.
+```python
+>>> session = CachedSession(expire_after=1, stale_if_error=True)
+>>> session.get('https://httpbin.org/get')
+>>> time.sleep(1)
+
+>>> # The response will be cached but expired by this point
+>>> response = session.get('https://httpbin.org/get', only_if_cached=True)
+>>> print(response.status_code)
+200
 ```
diff --git a/docs/user_guide/files.md b/docs/user_guide/files.md
index 36cb5fa..94ced46 100644
--- a/docs/user_guide/files.md
+++ b/docs/user_guide/files.md
@@ -1,5 +1,5 @@
 (files)=
-# {fa}`folder-open` Cache Files
+# {fas}`folder-open` Cache Files
 ```{note}
 This section only applies to the {py:mod}`SQLite <requests_cache.backends.sqlite>` and
 {py:mod}`Filesystem <requests_cache.backends.filesystem>` backends.
@@ -38,21 +38,25 @@ If you don't know exactly where you want to put your cache files, your system's
 or **cache directory** is a good choice. Some options are available as shortcuts for these locations.
 
 Use the default temp directory with the `use_temp` option:
-:::{tab} Linux
+::::{tab-set}
+:::{tab-item} Linux
+:sync: linux
 ```python
 >>> session = CachedSession('http_cache', backend='sqlite', use_temp=True)
 >>> print(session.cache.db_path)
 '/tmp/http_cache.sqlite'
 ```
 :::
-:::{tab} macOS
+:::{tab-item} macOS
+:sync: macos
 ```python
 >>> session = CachedSession('http_cache', backend='sqlite', use_temp=True)
 >>> print(session.cache.db_path)
 '/var/folders/xx/http_cache.sqlite'
 ```
 :::
-:::{tab} Windows
+:::{tab-item} Windows
+:sync: windows
 ```python
 >>> session = CachedSession('http_cache', backend='sqlite', use_temp=True)
 >>> print(session.cache.db_path)
@@ -60,28 +64,35 @@ Use the default temp directory with the `use_temp` option:
 ```
 :::
 
+::::
+
 Or use the default cache directory with the `use_cache_dir` option:
-:::{tab} Linux
+::::{tab-set}
+:::{tab-item} Linux
+:sync: linux
 ```python
 >>> session = CachedSession('http_cache', backend='filesystem', use_cache_dir=True)
 >>> print(session.cache.cache_dir)
 '/home/user/.cache/http_cache/'
 ```
 :::
-:::{tab} macOS
+:::{tab-item} macOS
+:sync: macos
 ```python
 >>> session = CachedSession('http_cache', backend='filesystem', use_cache_dir=True)
 >>> print(session.cache.cache_dir)
 '/Users/user/Library/Caches/http_cache/'
 ```
 :::
-:::{tab} Windows
+:::{tab-item} Windows
+:sync: windows
 ```python
 >>> session = CachedSession('http_cache', backend='filesystem', use_cache_dir=True)
 >>> print(session.cache.cache_dir)
 'C:\\Users\\user\\AppData\\Local\\http_cache\\'
 ```
 :::
+::::
 
 ```{note}
 If the cache name is an absolute path, the `use_temp` and `use_cache_dir` options will be ignored.
@@ -89,5 +100,5 @@ If it's a relative path, it will be relative to the temp or cache directory, res
 ```
 
 There are a number of other system default locations that might be appropriate for a cache file. See
-the [appdirs](https://github.com/ActiveState/appdirs) library for an easy cross-platform way to get
-the most commonly used ones.
+the [platformdirs](https://github.com/platformdirs/platformdirs) library for an easy cross-platform
+way to get the most commonly used ones.
diff --git a/docs/user_guide/filtering.md b/docs/user_guide/filtering.md
index 68e65f0..e7a5c06 100644
--- a/docs/user_guide/filtering.md
+++ b/docs/user_guide/filtering.md
@@ -1,5 +1,5 @@
 (filtering)=
-# {fa}`filter` Cache Filtering
+# {fas}`filter` Cache Filtering
 In many cases you will want to choose what you want to cache instead of just caching everything. By
 default, all **read-only** (`GET` and `HEAD`) **requests with a 200 response code** are cached. A
 few options are available to modify this behavior.
@@ -10,35 +10,34 @@ with a regular {py:class}`requests.Session` object, or wrapper functions like
 {py:func}`requests.get`, etc.
 ```
 
-(http-methods)=
-## Cached HTTP Methods
+(http-method-filtering)=
+## Filter by HTTP Methods
 To cache additional HTTP methods, specify them with `allowable_methods`:
 ```python
 >>> session = CachedSession(allowable_methods=('GET', 'POST'))
->>> session.post('http://httpbin.org/post', json={'param': 'value'})
+>>> session.post('https://httpbin.org/post', json={'param': 'value'})
 ```
 
 For example, some APIs use the `POST` method to request data via a JSON-formatted request body, for
 requests that may exceed the max size of a `GET` request. You may also want to cache `POST` requests
 to ensure you don't send the exact same data multiple times.
 
-## Cached Status Codes
+## Filter by Status Codes
 To cache additional status codes, specify them with `allowable_codes`
 ```python
 >>> session = CachedSession(allowable_codes=(200, 418))
->>> session.get('http://httpbin.org/teapot')
+>>> session.get('https://httpbin.org/teapot')
 ```
 
-(selective-caching)=
-## Cached URLs
+(url-filtering)=
+## Filter by URLs
 You can use {ref}`URL patterns <url-patterns>` to define an allowlist for selective caching, by
-using a expiration value of `0` (or `requests_cache.DO_NOT_CACHE`, to be more explicit) for
-non-matching request URLs:
+using a expiration value of `requests_cache.DO_NOT_CACHE` for non-matching request URLs:
 ```python
->>> from requests_cache import DO_NOT_CACHE, CachedSession
+>>> from requests_cache import DO_NOT_CACHE, NEVER_EXPIRE, CachedSession
 >>> urls_expire_after = {
 ...     '*.site_1.com': 30,
-...     'site_2.com/static': -1,
+...     'site_2.com/static': NEVER_EXPIRE,
 ...     '*': DO_NOT_CACHE,
 ... }
 >>> session = CachedSession(urls_expire_after=urls_expire_after)
@@ -51,6 +50,7 @@ expiration to `0`:
 >>> session = CachedSession(urls_expire_after=urls_expire_after, expire_after=0)
 ```
 
+(custom-filtering)=
 ## Custom Cache Filtering
 If you need more advanced behavior for choosing what to cache, you can provide a custom filtering
 function via the `filter_fn` param. This can by any function that takes a
@@ -58,8 +58,6 @@ function via the `filter_fn` param. This can by any function that takes a
 should be cached. It will be applied to both new responses (on write) and previously cached
 responses (on read):
 
-:::{admonition} Example code
-:class: toggle
 ```python
 >>> from sys import getsizeof
 >>> from requests_cache import CachedSession
@@ -70,7 +68,6 @@ responses (on read):
 
 >>> session = CachedSession(filter_fn=filter_by_size)
 ```
-:::
 
 ```{note}
 `filter_fn()` will be used **in addition to** other filtering options.
diff --git a/docs/user_guide/general.md b/docs/user_guide/general.md
index 2932a25..3c8a20c 100644
--- a/docs/user_guide/general.md
+++ b/docs/user_guide/general.md
@@ -1,5 +1,5 @@
 (general)=
-# {fa}`play-circle` General Usage
+# {fas}`play-circle` General Usage
 There are two main ways of using requests-cache:
 - **Sessions:** (recommended) Use {py:class}`.CachedSession` to send your requests
 - **Patching:** Globally patch `requests` using {py:func}`.install_cache()`
@@ -9,23 +9,23 @@ There are two main ways of using requests-cache:
 Basic usage looks like this:
 ```python
 >>> from requests_cache import CachedSession
->>>
+
 >>> session = CachedSession()
->>> session.get('http://httpbin.org/get')
+>>> session.get('https://httpbin.org/get')
 ```
 
-Any {py:class}`requests.Session` method can be used (but see {ref}`http-methods` section for
+Any {py:class}`requests.Session` method can be used (but see {ref}`http-method-filtering` section for
 options):
 ```python
->>> session.request('GET', 'http://httpbin.org/get')
->>> session.head('http://httpbin.org/get')
+>>> session.request('GET', 'https://httpbin.org/get')
+>>> session.head('https://httpbin.org/get')
 ```
 
 Caching can be temporarily disabled for the session with
 {py:meth}`.CachedSession.cache_disabled`:
 ```python
 >>> with session.cache_disabled():
-...     session.get('http://httpbin.org/get')
+...     session.get('https://httpbin.org/get')
 ```
 
 The best way to clean up your cache is through {ref}`expiration` settings, but you can also
@@ -37,19 +37,20 @@ clear out everything at once with {py:meth}`.BaseCache.clear`:
 (patching)=
 ## Patching
 In some situations, it may not be possible or convenient to manage your own session object. In those
-cases, you can use {py:func}`.install_cache` to add caching to all `requests` functions:
+cases, you can use {py:func}`.install_cache`. This adds fully transparent caching to all `requests`
+functions, without the need to modify any existing code:
 ```python
 >>> import requests
 >>> import requests_cache
->>>
+
 >>> requests_cache.install_cache()
->>> requests.get('http://httpbin.org/get')
+>>> requests.get('https://httpbin.org/get')
 ```
 
 As well as session methods:
 ```python
 >>> session = requests.Session()
->>> session.get('http://httpbin.org/get')
+>>> session.get('https://httpbin.org/get')
 ```
 
 {py:func}`.install_cache` accepts all the same parameters as {py:class}`.CachedSession`:
@@ -60,20 +61,20 @@ As well as session methods:
 It can be temporarily {py:func}`.enabled`:
 ```python
 >>> with requests_cache.enabled():
-...     requests.get('http://httpbin.org/get')  # Will be cached
+...     requests.get('https://httpbin.org/get')  # Will be cached
 ```
 
 Or temporarily {py:func}`.disabled`:
 ```python
 >>> requests_cache.install_cache()
 >>> with requests_cache.disabled():
-...     requests.get('http://httpbin.org/get')  # Will not be cached
+...     requests.get('https://httpbin.org/get')  # Will not be cached
 ```
 
 Or completely removed with {py:func}`.uninstall_cache`:
 ```python
 >>> requests_cache.uninstall_cache()
->>> requests.get('http://httpbin.org/get')
+>>> requests.get('https://httpbin.org/get')
 ```
 
 You can also clear out all responses in the cache with {py:func}`.clear`, and check if
@@ -81,13 +82,31 @@ requests-cache is currently installed with {py:func}`.is_installed`.
 
 (monkeypatch-issues)=
 ### Patching Limitations & Potential Issues
-Like any other utility that uses monkey-patching, there are some scenarios where you won't want to
-use {py:func}`.install_cache`:
+There are some scenarios where patching `requests` with {py:func}`.install_cache` is not ideal:
 - When using other libraries that patch {py:class}`requests.Session`
 - In a multi-threaded or multiprocess application
 - In a library that will be imported by other libraries or applications
 - In a larger application that makes requests in several different modules, where it may not be
   obvious what is and isn't being cached
 
-In any of these cases, consider using {py:class}`.CachedSession`, the {py:func}`.enabled`
-contextmanager, or {ref}`selective-caching`.
+In these cases, consider using {py:class}`.CachedSession` instead.
+
+(settings)=
+## Settings
+There are a number of settings that affect cache behavior, which are covered in more detail in the following sections:
+* {ref}`expiration`
+* {ref}`filtering`
+* {ref}`matching`
+
+These can all be passed as keyword arguments to {py:class}`.CachedSession` or
+{py:func}`.install_cache`. When using a session object, these can also be safely modified at any
+time via {py:attr}`.CachedSession.settings`. For example:
+```python
+>>> from requests_cache import CachedSession
+
+>>> session = CachedSession()
+>>> session.settings.expire_after = 360
+>>> session.settings.stale_if_error = True
+```
+
+Note that this does **not** include backend and serializer settings, which cannot be changed after initialization.
diff --git a/docs/user_guide/headers.md b/docs/user_guide/headers.md
index 6ad0220..792b1d2 100644
--- a/docs/user_guide/headers.md
+++ b/docs/user_guide/headers.md
@@ -1,16 +1,11 @@
 (headers)=
-# {fa}`file-code` Cache Headers
-Most common request and response headers related to caching are supported, including
-[Cache-Control](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Cache-Control)
-and [ETags](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/ETag).
-
-```{note}
-requests-cache is not (yet) intended to be strict implementation of HTTP caching according to
-[RFC 2616](https://datatracker.ietf.org/doc/html/rfc2616),
-[RFC 7234](https://datatracker.ietf.org/doc/html/rfc7234), etc. If there is additional behavior you
-would like to see, please create an issue to request it.
-```
+# {fas}`file-code` Cache Headers
+Requests-cache supports most common HTTP caching headers, including
+[ETags](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/ETag),
+[Cache-Control](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Cache-Control),
+and several extensions.
 
+(conditional-requests)=
 ## Conditional Requests
 [Conditional requests](https://developer.mozilla.org/en-US/docs/Web/HTTP/Conditional_requests) are
 automatically sent for any servers that support them. Once a cached response expires, it will only
@@ -21,19 +16,25 @@ requests-cache repo:
 ```python
 >>> # Cache a response that will expire immediately
 >>> url = 'https://api.github.com/repos/requests-cache/requests-cache'
->>> session = CachedSession(expire_after=0.0001)
+>>> session = CachedSession(expire_after=1)
 >>> session.get(url)
->>> time.sleep(0.0001)
+>>> time.sleep(1)
 
 >>> # The cached response will still be used until the remote content actually changes
 >>> response = session.get(url)
->>> print(response.from_cache, response.is_expired)
-True, True
+>>> print(response.from_cache)
+True
 ```
 
+```{note}
+Also see {ref}`stale-while-revalidate` for a variation of this behavior.
+```
+
+(cache-control)=
 ## Cache-Control
 `Cache-Control` **request** headers will always be used if present. This is mainly useful if you are
-adding requests-cache to an existing application or library that already uses caching request headers.
+adding requests-cache to an existing application or library that already sends requests with cache
+headers.
 
 `Cache-Control` **response** headers are an opt-in feature. If enabled, these will take priority over
 any other `expire_after` values. See {ref}`precedence` for the full order of precedence.
@@ -43,19 +44,37 @@ To enable this behavior, use the `cache_control` option:
 ```
 
 ## Supported Headers
+Requests-cache implements the majority of private cache behaviors specified by the following RFCs,
+with some minor variations:
+* [RFC 2616](https://datatracker.ietf.org/doc/html/rfc2616)
+* [RFC 5861](https://datatracker.ietf.org/doc/html/rfc5861)
+* [RFC 7234](https://datatracker.ietf.org/doc/html/rfc7234)
+* [RFC 8246](https://datatracker.ietf.org/doc/html/rfc8246)
+
 The following headers are currently supported:
 
 **Request headers:**
 - `Cache-Control: max-age`: Used as the expiration time in seconds
-- `Cache-Control: no-cache`: Skip reading from the cache
+- `Cache-Control: max-stale`: Accept responses that have been expired for up to this many seconds
+- `Cache-Control: min-fresh`: Don't accept responses if they will expire within this many seconds
+- `Cache-Control: no-cache`: Revalidate with the server before using a cached response
 - `Cache-Control: no-store`: Skip reading from and writing to the cache
-- `If-None-Match`: Automatically added if an `ETag` is available
-- `If-Modified-Since`: Automatically added if `Last-Modified` is available
+- `Cache-Control: only-if-cached`: Only return results from the cache. If not cached, return a 504
+  response instead of sending a new request. Note that this may return a stale response.
+- `Cache-Control: stale-if-error`: If an error occurs while refreshing a cached response, use it
+  if it expired by no more than this many seconds ago
+- `If-None-Match`: Automatically added for revalidation, if an `ETag` is available
+- `If-Modified-Since`: Automatically added for revalidation, if `Last-Modified` is available
 
 **Response headers:**
+- `Cache-Control: immutable`: Cache the response with no expiration
 - `Cache-Control: max-age`: Used as the expiration time in seconds
+- `Cache-Control: must-revalidate`: When used in combination with `max-age=0`, revalidate immediately.
+- `Cache-Control: no-cache`: Revalidate with the server before using a cached response
 - `Cache-Control: no-store` Skip writing to the cache
-- `Cache-Control: immutable`: Cache the response with no expiration
-- `Expires`: Used as an absolute expiration time
-- `ETag`: Return expired cache data if the remote content has not changed (`304 Not Modified` response)
-- `Last-Modified`: Return expired cache data if the remote content has not changed (`304 Not Modified` response)
+- `Cache-Control: stale-if-error`: Same behavior as request header
+- `Cache-Control: stale-while-revalidate`: If expired by less than this many seconds, return the stale response immediately and send an asynchronous revalidation request
+- `Expires`: Used as an absolute expiration datetime
+- `ETag`: Validator used for conditional requests
+- `Last-Modified`: Validator used for conditional requests
+- `Vary`: Used to indicate which request headers to match. See {ref}`matching-headers` for details.
diff --git a/docs/user_guide/inspection.md b/docs/user_guide/inspection.md
index 9160c4e..339afba 100644
--- a/docs/user_guide/inspection.md
+++ b/docs/user_guide/inspection.md
@@ -1,6 +1,6 @@
 <!-- TODO: This could use some more details and examples -->
 (inspection)=
-# {fa}`search` Cache Inspection
+# {fas}`search` Cache Inspection
 Here are some ways to get additional information out of the cache session, backend, and responses:
 
 ## Response Details
@@ -13,20 +13,22 @@ The following attributes are available on responses:
   {ref}`expiration` for details)
 - `is_expired`: indicates if the cached response is expired (if, for example, an old response was returned due to a request error)
 
-Examples:
-:::{admonition} Example code
-:class: toggle
+:::{dropdown} Examples
+:animate: fade-in-slide-down
+:color: primary
+:icon: file-code
+
 ```python
 >>> from requests_cache import CachedSession
 >>> session = CachedSession(expire_after=timedelta(days=1))
 
->>> # Placeholders are added for non-cached responses
->>> response = session.get('http://httpbin.org/get')
+>>> # Placeholder attributes are added for non-cached responses
+>>> response = session.get('https://httpbin.org/get')
 >>> print(response.from_cache, response.created_at, response.expires, response.is_expired)
 False None None None
 
->>> # Values will be populated for cached responses
->>> response = session.get('http://httpbin.org/get')
+>>> # These attributes will be populated for cached responses
+>>> response = session.get('https://httpbin.org/get')
 >>> print(response.from_cache, response.created_at, response.expires, response.is_expired)
 True 2021-01-01 18:00:00 2021-01-02 18:00:00 False
 
@@ -37,28 +39,28 @@ True 2021-01-01 18:00:00 2021-01-02 18:00:00 False
 :::
 
 ## Cache Contents
-You can use `CachedSession.cache.urls` to see all URLs currently in the cache:
-```python
->>> session = CachedSession()
->>> print(session.cache.urls)
-['https://httpbin.org/get', 'https://httpbin.org/stream/100']
-```
 
-If needed, you can get more details on cached responses via `CachedSession.cache.responses`, which
-is a dict-like interface to the cache backend. See {py:class}`.CachedResponse` for a full list of
-attributes available.
+### Checking for responses
+Use {py:meth}`.BaseCache.contains` to check if a given request is cached.
 
-For example, if you wanted to to see all URLs requested with a specific method:
+Check if a specific URL is cached:
 ```python
->>> post_urls = [
-...     response.url for response in session.cache.responses.values()
-...     if response.request.method == 'POST'
-... ]
+>>> print(session.cache.contains(url='https://httpbin.org/get'))
 ```
 
-You can also inspect `CachedSession.cache.redirects`, which maps redirect URLs to keys of the
-responses they redirect to.
+To match additional request values (parameters, headers, etc), you can pass a
+{py:class}`~requests.models.Request` object instead:
+```python
+>>> from requests import Request
 
+>>> request = Request('GET', 'https://httpbin.org/get', params={'k': 'v'})
+>>> print(session.cache.contains(request=request))
+```
+
+You can also check for a specific cache key:
+```python
+>>> print(session.cache.contains('d1e666e9fdfb3f86'))
+```
 
 ### Filtering responses
 Use {py:meth}`.BaseCache.filter` to get responses with optional filters. By default, it returns all
@@ -79,3 +81,29 @@ Get keys for **only** expired responses:
 >>> expired_responses = session.cache.filter(valid=False, expired=True)
 >>> keys = [response.cache_key for response in expired_responses]
 ```
+
+### Deleting responses
+Use {py:meth}`.BaseCache.delete` to manually delete responses. See {ref}`manual_removal` for
+examples.
+
+### Response URLs
+You can use {py:meth}`.BaseCache.urls` to see all URLs currently in the cache:
+```python
+>>> session = CachedSession()
+>>> print(session.cache.urls())
+['https://httpbin.org/get', 'https://httpbin.org/stream/100']
+```
+
+### Other response details
+If needed, you can access all responses via `CachedSession.cache.responses`, which is a dict-like
+interface to the cache backend, where:
+* Keys are cache keys (a hash of matched request information)
+* Values are {py:class}`.CachedResponse` objects
+
+For example, if you wanted to see URLs only for `POST` requests:
+```python
+>>> post_urls = [
+...     response.url for response in session.cache.responses.values()
+...     if response.request.method == 'POST'
+... ]
+```
diff --git a/docs/user_guide/installation.md b/docs/user_guide/installation.md
index 174baf3..8abff9b 100644
--- a/docs/user_guide/installation.md
+++ b/docs/user_guide/installation.md
@@ -1,27 +1,29 @@
-# {fa}`download` Installation
+# {fas}`download` Installation
 Installation instructions:
 
-:::{tab} Pip
+::::{tab-set}
+:::{tab-item} Pip
 Install the latest stable version from [PyPI](https://pypi.org/project/requests-cache/):
 ```
 pip install requests-cache
 ```
 :::
-:::{tab} Conda
+:::{tab-item} Conda
 Or install from [conda-forge](https://anaconda.org/conda-forge/requests-cache), if you prefer:
 ```
 conda install -c conda-forge requests-cache
 ```
 :::
-:::{tab} Pre-release
+:::{tab-item} Pre-release
 If you would like to use the latest development (pre-release) version:
 ```
 pip install --pre requests-cache
 ```
 :::
-:::{tab} Local development
+:::{tab-item} Local development
 See {ref}`contributing` for setup steps for local development
 :::
+::::
 
 (requirements)=
 ## Requirements
@@ -39,4 +41,4 @@ of python, here are the latest compatible versions and their documentation pages
 * **python 2.7:** [requests-cache 0.5.2](https://requests-cache.readthedocs.io/en/v0.5.0)
 * **python 3.4:** [requests-cache 0.5.2](https://requests-cache.readthedocs.io/en/v0.5.0)
 * **python 3.5:** [requests-cache 0.5.2](https://requests-cache.readthedocs.io/en/v0.5.0)
-* **python 3.6:** [requests-cache 0.7.4](https://requests-cache.readthedocs.io/en/v0.7.4)
+* **python 3.6:** [requests-cache 0.7.5](https://requests-cache.readthedocs.io/en/v0.7.5)
diff --git a/docs/user_guide/matching.md b/docs/user_guide/matching.md
index 9c0ee08..0cd0fbd 100644
--- a/docs/user_guide/matching.md
+++ b/docs/user_guide/matching.md
@@ -1,30 +1,22 @@
 (matching)=
-# {fa}`equals,style=fas` Request Matching
+# {fas}`equals` Request Matching
 Requests are matched according to the request method, URL, parameters and body. All of these values
 are normalized to account for any variations that do not modify response content.
 
 There are some additional options to configure how you want requests to be matched.
 
-## Matching Request Headers
-In some cases, different headers may result in different response data, so you may want to cache
-them separately. To enable this, use `match_headers`:
-```python
->>> session = CachedSession(match_headers=True)
->>> # Both of these requests will be sent and cached separately
->>> session.get('http://httpbin.org/headers', {'Accept': 'text/plain'})
->>> session.get('http://httpbin.org/headers', {'Accept': 'application/json'})
-```
-
-If you only want to match specific headers and not others, you can provide them as a list:
-```python
->>> session = CachedSession(match_headers=['Accept', 'Accept-Language'])
-```
-
 (filter-params)=
 ## Selective Parameter Matching
 By default, all normalized request parameters are matched. In some cases, there may be request
-parameters that don't affect the response data, for example authentication tokens or credentials.
-If you want to ignore specific parameters, specify them with the `ignored_parameters` option.
+parameters that you don't want to match. For example, an authentication token will change frequently
+but not change reponse content.
+
+Use the `ignored_parameters` option if you want to ignore specific parameters.
+
+```{note}
+Many common authentication parameters are already ignored by default.
+See {ref}`default-filter-params` for details.
+```
 
 **Request Parameters:**
 
@@ -32,8 +24,8 @@ In this example, only the first request will be sent, and the second request wil
 due to the ignored parameters:
 ```python
 >>> session = CachedSession(ignored_parameters=['auth-token'])
->>> session.get('http://httpbin.org/get', params={'auth-token': '2F63E5DF4F44'})
->>> r = session.get('http://httpbin.org/get', params={'auth-token': 'D9FAEB3449D3'})
+>>> session.get('https://httpbin.org/get', params={'auth-token': '2F63E5DF4F44'})
+>>> r = session.get('https://httpbin.org/get', params={'auth-token': 'D9FAEB3449D3'})
 >>> assert r.from_cache is True
 ```
 
@@ -42,28 +34,52 @@ due to the ignored parameters:
 This also applies to parameters in a JSON-formatted request body:
 ```python
 >>> session = CachedSession(allowable_methods=('GET', 'POST'), ignored_parameters=['auth-token'])
->>> session.post('http://httpbin.org/post', json={'auth-token': '2F63E5DF4F44'})
->>> r = session.post('http://httpbin.org/post', json={'auth-token': 'D9FAEB3449D3'})
+>>> session.post('https://httpbin.org/post', json={'auth-token': '2F63E5DF4F44'})
+>>> r = session.post('https://httpbin.org/post', json={'auth-token': 'D9FAEB3449D3'})
 >>> assert r.from_cache is True
 ```
 
 **Request Headers:**
 
-As well as headers, if `match_headers` is also used:
+As well as headers, if `match_headers=True` is used:
 ```python
 >>> session = CachedSession(ignored_parameters=['auth-token'], match_headers=True)
->>> session.get('http://httpbin.org/get', headers={'auth-token': '2F63E5DF4F44'})
->>> r = session.get('http://httpbin.org/get', headers={'auth-token': 'D9FAEB3449D3'})
+>>> session.get('https://httpbin.org/get', headers={'auth-token': '2F63E5DF4F44'})
+>>> r = session.get('https://httpbin.org/get', headers={'auth-token': 'D9FAEB3449D3'})
 >>> assert r.from_cache is True
 ```
 ```{note}
 Since `ignored_parameters` is most often used for sensitive info like credentials, these values will also be removed from the cached request parameters, body, and headers.
 ```
 
+(matching-headers)=
+## Matching Request Headers
+```{note}
+In some cases, request header values can affect response content. For example, sites that support
+i18n and [content negotiation](https://developer.mozilla.org/en-US/docs/Web/HTTP/Content_negotiation) may use the `Accept-Language` header to determine which language to serve content in.
+
+The server will ideally also send a `Vary` header in the response, which informs caches about
+which request headers to match. By default, requests-cache respects this, so in many cases it
+will already do what you want without extra configuration. Not all servers send `Vary`, however.
+```
+
+Use the `match_headers` option if you want to specify which headers you want to match when `Vary`
+isn't available:
+```python
+>>> session = CachedSession(match_headers=['Accept'])
+>>> # These two requests will be sent and cached separately
+>>> session.get('https://httpbin.org/headers', {'Accept': 'text/plain'})
+>>> session.get('https://httpbin.org/headers', {'Accept': 'application/json'})
+```
+
+If you want to match _all_ request headers, you can use `match_headers=True`.
+
+
 (custom-matching)=
 ## Custom Request Matching
 If you need more advanced behavior, you can implement your own custom request matching.
 
+### Cache Keys
 Request matching is accomplished using a **cache key**, which uniquely identifies a response in the
 cache based on request info. For example, the option `ignored_parameters=['foo']` works by excluding
 the `foo` request parameter from the cache key, meaning these three requests will all use the same
@@ -76,35 +92,90 @@ cached response:
 >>> assert response_1.cache_key == response_2.cache_key == response_3.cache_key
 ```
 
+### Recreating Cache Keys
+There are some situations where request matching behavior may change, which causes previously cached
+responses to become obsolete:
+* You start using a custom cache key, or change other settings that affect request matching
+* A new version of requests-cache is released that includes new or changed request matching behavior
+  (typically, most non-patch releases)
+
+In these cases, if you want to keep using your existing cache data, you can use the
+`recreate_keys` method:
+```python
+>>> session = CachedSession()
+>>> session.cache.recreate_keys()
+```
+
+### Cache Key Functions
 If you want to implement your own request matching, you can provide a cache key function which will
-take a {py:class}`~requests.PreparedRequest` plus optional keyword args, and return a string:
+take a {py:class}`~requests.PreparedRequest` plus optional keyword args for
+{py:func}`~requests.request`, and return a string:
 ```python
 def create_key(request: requests.PreparedRequest, **kwargs) -> str:
     """Generate a custom cache key for the given request"""
 ```
 
-`**kwargs` includes relevant {py:class}`.BaseCache` settings and any other keyword args passed to
-{py:meth}`.CachedSession.send()`. See {py:func}`.create_key` for the reference implementation, and
-see the rest of the {py:mod}`.cache_keys` module for some potentially useful helper functions.
-
 You can then pass this function via the `key_fn` param:
 ```python
 session = CachedSession(key_fn=create_key)
 ```
 
-```{note}
-`key_fn()` will be used **instead of** any other {ref}`matching` options and default matching behavior.
+`**kwargs` includes relevant {py:class}`.BaseCache` settings and any other keyword args passed to
+{py:meth}`.CachedSession.send()`. If you want use a custom matching function _and_ the existing
+options `ignored_parameters` and `match_headers`, you can implement them in `key_fn`:
+```python
+def create_key(
+    request: requests.PreparedRequest,
+    ignored_parameters: List[str] = None,
+    match_headers: List[str] = None,
+    **kwargs,
+) -> str:
+    """Generate a custom cache key for the given request"""
 ```
+
+See {py:func}`.create_key` for the reference implementation, and see the rest of the
+{py:mod}`.cache_keys` module for some potentially useful helper functions.
+
+
 ```{tip}
-See {ref}`Examples<custom_keys>` page for a complete example for custom request matching.
+See {ref}`Examples<custom_keys>` for a complete example for custom request matching.
 ```
 ```{tip}
-As a general rule, if you include less info in your cache keys, you will have more cache hits and
-use less storage space, but risk getting incorrect response data back. For example, if you exclude
-all request parameters, you will get the same cached response back for any combination of request
-parameters.
+As a general rule, if you include less information in your cache keys, you will have more cache hits
+and use less storage space, but risk getting incorrect response data back.
 ```
 ```{warning}
 If you provide a custom key function for a non-empty cache, any responses previously cached with a
-different key function will likely be unused.
+different key function will be unused, so it's recommended to clear the cache first.
+```
+
+### Custom Header Normalization
+When matching request headers (using `match_headers` or `Vary`), requests-cache will normalize minor
+header variations like order, casing, whitespace, etc. In some cases, you may be able to further
+optimize your requests with some additional header normalization.
+
+For example, let's say you're working with a site that supports content negotiation using the
+`Accept-Encoding` header, and the only varation you care about is whether you requested gzip
+encoding. This example will increase cache hits by ignoring variations you don't care about:
+```python
+from requests import PreparedRequest
+from requests_cache import CachedSession, create_key
+
+
+def create_key(request: PreparedRequest, **kwargs) -> str:
+    # Don't modify the original request that's about to be sent
+    request = request.copy()
+
+    # Simplify values like `Accept-Encoding: gzip, compress, br` to just `Accept-Encoding: gzip`
+    if 'gzip' in request.headers.get('Accept-Encoding', ''):
+        request.headers['Accept-Encoding'] = 'gzip'
+    else:
+        request.headers['Accept-Encoding'] = None
+
+    # Use the default key function to do the rest of the work
+    return create_key(request, **kwargs)
+
+
+# Provide your custom request matcher when creating the session
+session = CachedSession(key_fn=create_custom_key)
 ```
diff --git a/docs/user_guide/security.md b/docs/user_guide/security.md
index cad4d3f..68d049d 100644
--- a/docs/user_guide/security.md
+++ b/docs/user_guide/security.md
@@ -1,5 +1,5 @@
 (security)=
-# {fa}`lock` Security
+# {fas}`lock` Security
 
 ## Pickle Vulnerabilities
 :::{warning}
@@ -66,6 +66,14 @@ Then, if you try to get that cached response again (*with* your key), you will g
 BadSignature: Signature b'iFNmzdUOSw5vqrR9Cb_wfI1EoZ8' does not match
 ```
 
+(default-filter-params)=
 ## Removing Sensitive Info
 The {ref}`ignored_parameters <filter-params>` option can be used to prevent credentials and other
 sensitive info from being saved to the cache. It applies to request parameters, body, and headers.
+
+Some are ignored by default, including:
+* `Authorization` header (most authentication systems)
+* `access_token` request param (used by OAuth)
+* `access_token` in POST body (used by OAuth)
+* `X-API-KEY` header (used by OpenAPI spec)
+* `api_key` request param (used by OpenAPI spec)
diff --git a/docs/user_guide/serializers.md b/docs/user_guide/serializers.md
index b2bae92..bbc85f3 100644
--- a/docs/user_guide/serializers.md
+++ b/docs/user_guide/serializers.md
@@ -1,23 +1,23 @@
 (serializers)=
-# {fa}`barcode` Serializers
+# {fas}`barcode` Serializers
 ![](../_static/file-pickle_32px.png)
 ![](../_static/file-json_32px.png)
 ![](../_static/file-yaml_32px.png)
 ![](../_static/file-toml_32px.png)
 
-By default, responses are serialized using {py:mod}`pickle`, but some alternative serializers are
-also included. These are mainly intended for use with {py:class}`.FileCache`, but are compatible
-with the other backends as well.
+Some alternative serializers are included, mainly intended for use with {py:class}`.FileCache`.
 
 :::{note}
-Some serializers require additional dependencies
+Some of these serializers require additional dependencies, listed in the sections below.
 :::
 
 ## Specifying a Serializer
 Similar to {ref}`backends`, you can specify which serializer to use with the `serializer` parameter
 for either {py:class}`.CachedSession` or {py:func}`.install_cache`.
 
-## JSON Serializer
+## Built-in Serializers
+
+### JSON Serializer
 Storing responses as JSON gives you the benefit of making them human-readable and editable, in
 exchange for a minor reduction in read and write speeds.
 
@@ -26,9 +26,21 @@ Usage:
 >>> session = CachedSession('my_cache', serializer='json')
 ```
 
-:::{admonition} Example JSON-serialized Response
-:class: toggle
-```{literalinclude} ../sample_data/sample_response.json
+:::{dropdown} Example JSON-serialized Response (with decoded JSON content)
+:animate: fade-in-slide-down
+:color: primary
+:icon: file-code
+
+```{literalinclude} ../sample_data/sample_response_json.json
+:language: JSON
+```
+:::
+:::{dropdown} Example JSON-serialized Response (with binary content)
+:animate: fade-in-slide-down
+:color: primary
+:icon: file-code
+
+```{literalinclude} ../sample_data/sample_response_binary.json
 :language: JSON
 ```
 :::
@@ -39,7 +51,7 @@ This will use [ultrajson](https://github.com/ultrajson/ultrajson) if installed,
 pip install requests-cache[json]
 ```
 
-## YAML Serializer
+### YAML Serializer
 YAML is another option if you need a human-readable/editable format, with the same tradeoffs as JSON.
 
 Usage:
@@ -47,9 +59,21 @@ Usage:
 >>> session = CachedSession('my_cache', serializer='yaml')
 ```
 
-:::{admonition} Example YAML-serialized Response
-:class: toggle
-```{literalinclude} ../sample_data/sample_response.yaml
+:::{dropdown} Example YAML-serialized Response (with decoded JSON content)
+:animate: fade-in-slide-down
+:color: primary
+:icon: file-code
+
+```{literalinclude} ../sample_data/sample_response_json.yaml
+:language: YAML
+```
+:::
+:::{dropdown} Example YAML-serialized Response (with binary content)
+:animate: fade-in-slide-down
+:color: primary
+:icon: file-code
+
+```{literalinclude} ../sample_data/sample_response_binary.yaml
 :language: YAML
 ```
 :::
@@ -59,7 +83,7 @@ You can install the extra dependencies for this serializer with:
 pip install requests-cache[yaml]
 ```
 
-## BSON Serializer
+### BSON Serializer
 [BSON](https://www.mongodb.com/json-and-bson) is a serialization format originally created for
 MongoDB, but it can also be used independently. Compared to JSON, it has better performance
 (although still not as fast as `pickle`), and adds support for additional data types. It is not
@@ -82,6 +106,14 @@ MongoDB dependencies:
 pip install requests-cache[bson]
 ```
 
+## Response Content Format
+By default, any JSON or text response body will be decoded, so the response is fully
+human-readable/editable. Other content types will be saved as binary data. To save _all_ content as binary, set ``decode_content=False``:
+```python
+>>> backend = FileCache(decode_content=False)
+>>> session = CachedSession('http_cache', backend=backend)
+```
+
 ## Serializer Security
 See {ref}`security` for recommended setup steps for more secure cache serialization, particularly
 when using {py:mod}`pickle`.
@@ -108,8 +140,11 @@ similar methods with different names (e.g. `compress` / `decompress`), those can
 {py:class}`.Stage`.
 
 For example, a compressed pickle serializer can be built as:
-:::{admonition} Example code
-:class: toggle
+:::{dropdown} Example
+:animate: fade-in-slide-down
+:color: primary
+:icon: file-code
+
 ```python
 >>> import gzip
 >>> from requests_cache import CachedSession, SerializerPipeline, Stage, pickle_serializer
@@ -131,8 +166,11 @@ the majority of the work here, and some pre-configured converters are included f
 formats in the {py:mod}`.preconf` module.
 
 For example, a compressed JSON pipeline could be built as follows:
-:::{admonition} Example code
-:class: toggle
+:::{dropdown} Example
+:animate: fade-in-slide-down
+:color: primary
+:icon: file-code
+
 ```python
 >>> import json, gzip
 >>> from requests_cache import CachedSession, SerializerPipeline, Stage, json_serializer, utf8_encoder
diff --git a/docs/user_guide/troubleshooting.md b/docs/user_guide/troubleshooting.md
index fc5154f..899a282 100644
--- a/docs/user_guide/troubleshooting.md
+++ b/docs/user_guide/troubleshooting.md
@@ -1,5 +1,5 @@
 (debug)=
-# {fa}`exclamation-circle` Troubleshooting
+# {fas}`exclamation-circle` Troubleshooting
 Here are a few tips for avoiding and debugging some common problems.
 
 ## General Tips
@@ -31,8 +31,8 @@ logging.basicConfig(
 )
 ```
 
-If you have other libraries installed with verbose debug logging, you can configure only the loggers
-you want with `logger.setLevel()`:
+If you have other libraries installed that have verbose debug logging, you can configure only the
+loggers you want with `logger.setLevel()`:
 ```python
 import logging
 
@@ -65,17 +65,17 @@ Here are some error messages you may see either in the logs or (more rarely) in
 
 * **`Unable to deserialize response with key {cache key}`:** This
   usually means that a response was previously cached in a format that isn't compatible with the
-  current version of requests-cache or one of its dependencies. It could also be the result of switching {ref}`serializers`.
+  current version of requests-cache or one of its dependencies.
   * This message is to help with debugging and can generally be ignored. If you prefer, you can
-    either {py:meth}`~.BaseCache.clear` the cache or {py:meth}`~.BaseCache.remove_expired_responses`
-    to get rid of the invalid responses.
+    either {py:meth}`~.BaseCache.remove` the invalid responses or {py:meth}`~.BaseCache.clear` the
+    entire cache.
 * **`Request for URL {url} failed; using cached response`:** This is just a notification that the
-  {ref}`stale_if_error <request-errors>` option is working as intended
+  {ref}`stale_if_error <request-errors>` option is working as intended.
 * **{py:exc}`~requests.RequestException`:** These are general request errors not specific to
   requests-cache. See `requests` documentation on
   [Errors and Exceptions](https://2.python-requests.org/en/master/user/quickstart/#errors-and-exceptions)
   for more details.
-* **{py:exc}`ModuleNotFoundError`**: `No module named 'requests_cache.core'`: This module was deprecated in `v0.6` and removed in `v0.8`. Just import from `requests_cache` instead of `requests_cache.core`.
+* **{py:exc}`ModuleNotFoundError`**: `No module named 'requests_cache.core'`: This module was deprecated in `v0.6` and removed in `v0.8`. Please import from `requests_cache` instead of `requests_cache.core`.
 * **{py:exc}`ImportError`:** Indicates a missing required or optional dependency.
   * If you see this at **import time**, it means that one or more **required** dependencies are not
     installed
diff --git a/examples/README.md b/examples/README.md
index 09b35e7..3c0c1f4 100644
--- a/examples/README.md
+++ b/examples/README.md
@@ -1,3 +1,4 @@
 # Requests-Cache Examples
 This folder contains some complete examples that demonstrate the main features of requests-cache.
-These are also viewable on [readthedocs](https://requests-cache.readthedocs.io/en/stable/examples.html).
+See the [Examples](https://requests-cache.readthedocs.io/en/stable/examples.html) section of the
+docs for a summary.
diff --git a/examples/basic_patching.py b/examples/basic_patching.py
index 0e94b23..d7ef0d3 100755
--- a/examples/basic_patching.py
+++ b/examples/basic_patching.py
@@ -16,20 +16,20 @@ requests_cache.install_cache('example_cache', backend='sqlite')
 def main():
     # The real request will only be made once; afterward, the cached response is used
     for i in range(5):
-        response = requests.get('http://httpbin.org/get')
+        response = requests.get('https://httpbin.org/get')
 
     # This is more obvious when calling a slow endpoint
     for i in range(5):
-        response = requests.get('http://httpbin.org/delay/2')
+        response = requests.get('https://httpbin.org/delay/2')
 
     # Caching can be disabled if we want to get a fresh page and not cache it
     with requests_cache.disabled():
-        print(requests.get('http://httpbin.org/ip').text)
+        print(requests.get('https://httpbin.org/ip').text)
 
     # Get some debugging info about the cache
     print(requests_cache.get_cache())
     print('Cached URLS:')
-    print('\n'.join(requests_cache.get_cache().urls))
+    print('\n'.join(requests_cache.get_cache().urls()))
 
     # Uninstall to remove caching from all requests functions
     requests_cache.uninstall_cache()
diff --git a/examples/basic_sessions.py b/examples/basic_sessions.py
index 07e14bf..7c448b4 100755
--- a/examples/basic_sessions.py
+++ b/examples/basic_sessions.py
@@ -13,23 +13,23 @@ def main():
 
     # The real request will only be made once; afterward, the cached response is used
     for i in range(5):
-        response = session.get('http://httpbin.org/get')
+        response = session.get('https://httpbin.org/get')
 
     # This is more obvious when calling a slow endpoint
     for i in range(5):
-        response = session.get('http://httpbin.org/delay/2')
+        response = session.get('https://httpbin.org/delay/2')
 
     # Caching can be disabled if we want to get a fresh page and not cache it
     with session.cache_disabled():
-        print(session.get('http://httpbin.org/ip').text)
+        print(session.get('https://httpbin.org/ip').text)
 
     # Get some debugging info about the cache
     print(session.cache)
     print('Cached URLS:')
-    print('\n'.join(session.cache.urls))
+    print('\n'.join(session.cache.urls()))
 
 
-if __name__ == "__main__":
+if __name__ == '__main__':
     t = time.time()
     main()
     print('Elapsed: %.3f seconds' % (time.time() - t))
diff --git a/examples/cloudformation.yml b/examples/cloudformation.yml
new file mode 100644
index 0000000..fa83091
--- /dev/null
+++ b/examples/cloudformation.yml
@@ -0,0 +1,34 @@
+AWSTemplateFormatVersion: "2010-09-09"
+Description: An example of creating a DynamoDB table to use as a requests-cache backend
+
+Parameters:
+  CacheTableName:
+    Type: String
+    Default: http_cache
+    Description: >
+      An alternate DynamoDB table name to use. If provided, this must match the
+      table_name parameter for DynamoDbCache.
+
+Resources:
+  DynamoDBRequestCache:
+    Type: AWS::DynamoDB::Table
+    Properties:
+      TableName: !Ref CacheTableName
+      AttributeDefinitions:
+        - AttributeName: key
+          AttributeType: S
+      KeySchema:
+        - AttributeName: key
+          KeyType: HASH
+      # BillingMode: PAY_PER_REQUEST
+
+      # Optional: Use provisioned throughput instead of on-demand
+      BillingMode: PROVISIONED
+      ProvisionedThroughput:
+          WriteCapacityUnits: 2
+          ReadCapacityUnits: 2
+
+      # Optional: Enable DynamoDB's TTL feature
+      TimeToLiveSpecification:
+        AttributeName: ttl
+        Enabled: true
diff --git a/examples/convert_cache.py b/examples/convert_cache.py
index ed2a0a6..0ee2965 100755
--- a/examples/convert_cache.py
+++ b/examples/convert_cache.py
@@ -43,4 +43,4 @@ def convert_cache(*args, **kwargs):
 
 # Example: convert a cache named 'demo_cache.sqlite' in the current directory
 if __name__ == '__main__':
-    convert_cache('demo_cache', backend='sqlite')
+    convert_cache('demo_cache.sqlite')
diff --git a/examples/expiration.py b/examples/expiration.py
index 942b198..6ca4c5b 100755
--- a/examples/expiration.py
+++ b/examples/expiration.py
@@ -38,7 +38,7 @@ def main():
     assert not response.from_cache
 
 
-if __name__ == "__main__":
+if __name__ == '__main__':
     t = time.perf_counter()
     main()
     print('Elapsed: %.3f seconds' % (time.perf_counter() - t))
diff --git a/examples/external_config.py b/examples/external_config.py
new file mode 100755
index 0000000..0ab6508
--- /dev/null
+++ b/examples/external_config.py
@@ -0,0 +1,29 @@
+#!/usr/bin/env python
+"""
+An example of loading CachedSession settings from an external config file.
+
+Limitations:
+* Does not include backend or serializer settings
+* Does not include settings specified as python expressions, for example `timedelta` objects or
+  callback functions
+"""
+from pathlib import Path
+
+import yaml
+
+from requests_cache import CachedSession, CacheSettings
+
+CONFIG_FILE = Path(__file__).parent / 'external_config.yml'
+
+
+def load_settings() -> CacheSettings:
+    """Load settings from a YAML config file"""
+    with open(CONFIG_FILE) as f:
+        settings = yaml.safe_load(f)
+    return CacheSettings(**settings['cache_settings'])
+
+
+if __name__ == '__main__':
+    session = CachedSession()
+    session.settings = load_settings()
+    print('Loaded settings:\n', session.settings)
diff --git a/examples/external_config.yml b/examples/external_config.yml
new file mode 100644
index 0000000..ce26e29
--- /dev/null
+++ b/examples/external_config.yml
@@ -0,0 +1,16 @@
+# See external_config.py for usage example
+cache_settings:
+  expire_after: 360
+  cache_control: True
+  stale_if_error: True
+  allowable_methods:
+    - GET
+    - HEAD
+    - POST
+  allowable_codes:
+    - 200
+    - 400
+  ignored_parameters:
+    - api_key
+  match_headers:
+    - Accept-Language
diff --git a/examples/generate_test_db.py b/examples/generate_test_db.py
index 397a3d9..5950688 100755
--- a/examples/generate_test_db.py
+++ b/examples/generate_test_db.py
@@ -16,9 +16,12 @@ from requests_cache import ALL_METHODS, CachedResponse, CachedSession
 from requests_cache.models.response import format_file_size
 from tests.conftest import HTTPBIN_FORMATS, HTTPBIN_METHODS
 
-# TODO: If others would find it useful, these settings could be turned into CLI args
 BACKEND = 'sqlite'
 CACHE_NAME = 'rubbish_bin'
+MAX_EXPIRE_AFTER = 30  # In seconds; set to -1 to disable expiration
+MAX_RESPONSE_SIZE = 10000  # In bytes
+N_RESPONSES = 100000
+N_INVALID_RESPONSES = 10
 
 BASE_RESPONSE = requests.get('https://httpbin.org/get')
 HTTPBIN_EXTRA_ENDPOINTS = [
@@ -28,11 +31,6 @@ HTTPBIN_EXTRA_ENDPOINTS = [
     'redirect/5',
     'stream-bytes/1024',
 ]
-MAX_EXPIRE_AFTER = 30  # In seconds; set to -1 to disable expiration
-MAX_RESPONSE_SIZE = 10000  # In bytes
-N_RESPONSES = 100000
-N_INVALID_RESPONSES = 10
-
 logging.basicConfig(level='INFO')
 logger = logging.getLogger('requests_cache')
 
@@ -88,13 +86,13 @@ def get_randomized_response(i=0):
     return new_response
 
 
-def remove_expired_responses(expire_after=None):
+def remove_expired_responses():
     logger.setLevel('DEBUG')
     session = CachedSession(CACHE_NAME)
     total_responses = len(session.cache.responses)
 
     start = time()
-    session.remove_expired_responses(expire_after=expire_after)
+    session.cache.delete(expired=True)
     elapsed = time() - start
     n_removed = total_responses - len(session.cache.responses)
     logger.info(
@@ -123,6 +121,3 @@ if __name__ == '__main__':
 
     # Remove some responses (with randomized expiration)
     # remove_expired_responses()
-
-    # Expire and remove all responses
-    # remove_expired_responses(expire_after=1)
diff --git a/examples/github_actions.yml b/examples/github_actions.yml
new file mode 100644
index 0000000..dffd045
--- /dev/null
+++ b/examples/github_actions.yml
@@ -0,0 +1,27 @@
+name: Test requests-cache with GitHub Actions
+
+on:
+  push:
+    branches: [main]
+
+jobs:
+  test:
+    runs-on: ubuntu-latest
+    steps:
+      - uses: actions/checkout@v3
+      - uses: actions/setup-python@v4
+        with:
+          python-version: '3.10'
+
+      # Persist the SQLite file created by requests-cache across workflow runs
+      - id: cache
+        uses: actions/cache@v3
+        with:
+          path: example_cache.sqlite
+          key: none
+
+      # Install and run basic requests-cache example
+      - run: pip install '.'
+      - run: python examples/basic_sessions.py
+      - run: python examples/basic_sessions.py
+      - run: test -f example_cache.sqlite && echo 'Cache file created' || echo 'Cache file missing'
diff --git a/examples/log_requests.py b/examples/log_requests.py
index 035cc4e..266ffd0 100755
--- a/examples/log_requests.py
+++ b/examples/log_requests.py
@@ -20,7 +20,7 @@ logger = getLogger('requests_cache.examples')
 @contextmanager
 def log_requests():
     """Context manager that mocks and logs all non-cached requests"""
-    real_response = set_response_defaults(requests.get('http://httpbin.org/get'))
+    real_response = set_response_defaults(requests.get('https://httpbin.org/get'))
     with patch.object(OriginalSession, 'send', return_value=real_response) as mock_send:
         session = CachedSession('cache-test', backend='sqlite')
         session.cache.clear()
@@ -38,7 +38,7 @@ def main():
     """Example usage; replace with any other requests you want to test"""
     with log_requests() as session:
         for i in range(10):
-            response = session.get('http://httpbin.org/get')
+            response = session.get('https://httpbin.org/get')
             logger.debug(f'Response {i}: {type(response).__name__}')
 
 
diff --git a/examples/pygithub.py b/examples/pygithub.py
new file mode 100755
index 0000000..cf3622a
--- /dev/null
+++ b/examples/pygithub.py
@@ -0,0 +1,118 @@
+#!/usr/bin/env python
+"""
+An example of caching [GitHub API](https://docs.github.com/en/rest) requests with
+[PyGithub](https://github.com/PyGithub/PyGithub).
+
+This example demonstrates the following features:
+* {ref}`patching`: PyGithub uses `requests`, but the session it uses is not easily accessible.
+  In this case, using {py:func}`.install_cache` is the easiest approach.
+* {ref}`URL Patterns <url-filtering>`: Since we're using patching, this example adds an optional
+  safety measure to avoid unintentionally caching any non-Github requests elsewhere in your code.
+* {ref}`cache-control`: The GitHub API provides `Cache-Control` headers, so we can use those to set
+  expiration.
+* {ref}`conditional-requests`: The GitHub API also supports conditional requests. Even after
+  responses expire, we can still make use of the cache until the remote content actually changes.
+* [Rate limiting](https://docs.github.com/en/rest/overview/resources-in-the-rest-api#rate-limiting):
+  The GitHub API is rate-limited at 5000 requests per hour if authenticated, or only 60 requests per
+  hour otherwise. This makes caching especially useful, because cache hits and `304 Not Modified`
+  responses (from conditional requests) are not counted against the rate limit.
+* {ref}`inspection`: After calling some PyGithub functions, we can take a look at the cache contents
+  to see the actual API requests that were sent.
+* {ref}`Security <default-filter-params>`: If you use a
+  [personal access token](https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/creating-a-personal-access-token),
+  it will be sent to the GitHub API via the `Authorization` header. This is not something you want
+  to store in the cache if your storage backend is unsecured, so `Authorization` and other common
+  auth headers/params are redacted by default. This example shows how to verify this.
+"""
+from time import time
+
+import requests
+from github import Github
+
+from requests_cache import DO_NOT_CACHE, get_cache, install_cache
+
+# (Optional) Add an access token here, if you want higher rate limits and access to private repos
+ACCESS_TOKEN = None
+
+# Or add your own username here (if not using an access token)
+MY_USERNAME = 'test-user'
+
+
+install_cache(
+    cache_control=True,
+    urls_expire_after={
+        '*.github.com': 360,  # Placeholder expiration; should be overridden by Cache-Control
+        '*': DO_NOT_CACHE,  # Don't cache anything other than GitHub requests
+    },
+)
+
+
+def get_user_info():
+    """Display some info about your own resources on GitHub"""
+    gh = Github(ACCESS_TOKEN)
+    my_user = gh.get_user() if ACCESS_TOKEN else gh.get_user(MY_USERNAME)
+
+    # Get links to all of your own repositories
+    print('My repos:')
+    for repo in my_user.get_repos():
+        print(repo.html_url)
+
+    # Get links to all of your own gists
+    print('\nMy gists:')
+    for gist in my_user.get_gists():
+        print(gist.html_url)
+
+    # Get organizations you belong to
+    print('\nMy organizations:')
+    for org in my_user.get_orgs():
+        print(org.html_url)
+
+    # Check how internet-famous you are
+    print('\nMy followers:')
+    for user in my_user.get_followers():
+        print(user.login)
+
+    # Check your API rate limit usage
+    print(f'\nRate limit: {gh.rate_limiting}')
+
+
+def test_non_github_requests():
+    """Test that URL patterns are working, and that non-GitHub requests are not cached"""
+    response = requests.get('https://httpbin.org/json')
+    response = requests.get('https://httpbin.org/json')
+    from_cache = getattr(response, 'from_cache', False)
+    print(f'Non-GitHub requests cached: {from_cache}')
+    assert not from_cache
+
+
+def check_cache():
+    """Check some information on cached requests"""
+    # Show all the GitHub API URLs that PyGithub called
+    print('\nCached URLs:')
+    print('\n'.join(get_cache().urls()))
+
+    # Make sure credentials were redacted from all responses in the cache
+    response = requests.get('https://api.github.com/user/repos')
+    print('\nExample cached request headers:')
+    print(response.request.headers)
+    for response in get_cache().responses.values():
+        assert 'Authorization' not in response.request.headers
+
+
+def main():
+    # Send initial requests
+    start = time()
+    get_user_info()
+    print(f'Elapsed: {time() - start:.2f} seconds')
+
+    # Repeat the same requests and verify that your rate limit usage is unchanged
+    start = time()
+    get_user_info()
+    print(f'Elapsed: {time() - start:.2f} seconds')
+
+    test_non_github_requests()
+    check_cache()
+
+
+if __name__ == '__main__':
+    main()
diff --git a/examples/rps_graph.png b/examples/rps_graph.png
new file mode 100644
index 0000000..ff8b633
Binary files /dev/null and b/examples/rps_graph.png differ
diff --git a/examples/rps_graph.py b/examples/rps_graph.py
new file mode 100644
index 0000000..172ef78
--- /dev/null
+++ b/examples/rps_graph.py
@@ -0,0 +1,123 @@
+#!/usr/bin/env python
+"""
+This example displays a graph of request rates over time. Requests are continuously sent to URLs
+randomly picked from a fixed number of possible URLs. This demonstrates how average request rate
+increases as the proportion of cached requests increases.
+
+Try running this example with different cache settings and URLs to see how the graph changes.
+"""
+from random import randint
+from time import time
+
+from rich.live import Live
+from rich.progress import BarColumn, MofNCompleteColumn, Progress
+from rich.table import Table
+
+from requests_cache import CachedSession
+
+N_UNIQUE_REQUESTS = 200
+
+
+class RPSProgress(Progress):
+    """Display a bar chart of requests per second"""
+
+    def __init__(self, interval: int = 1, scale: int = 500, **kwargs):
+        super().__init__(BarColumn(), '{task.completed}', **kwargs)
+        self.current_task = None
+        self.interval = interval
+        self.interval_start = None
+        self.scale = scale
+        self.total_requests = 0
+        self.next_interval()
+
+    def next_interval(self):
+        """Create a new task to draw the next line on the bar chart"""
+        self.current_task = self.add_task('barchart_line', total=self.scale)
+        self.interval_start = time()
+
+    def count_request(self):
+        if time() - self.interval_start >= self.interval:
+            self.next_interval()
+        self.advance(self.current_task)
+        self.total_requests += 1
+
+
+class CacheRPSProgress:
+    """Track requests per second plus cache size in a single live view"""
+
+    def __init__(self, n_unique_requests: int = 100):
+        self.rps_progress = RPSProgress()
+        self.cache_progress = Progress(
+            BarColumn(complete_style='blue'),
+            '[cyan]Requests cached:',
+            MofNCompleteColumn(),
+        )
+        header = Progress(BarColumn(), '[cyan]Requests per second')
+        header.add_task('')
+        self.cache_task = self.cache_progress.add_task('', total=n_unique_requests)
+        self.n_unique_requests = n_unique_requests
+        self.start_time = time()
+
+        self.table = Table.grid()
+        self.table.add_row(header)
+        self.table.add_row(self.rps_progress)
+        self.table.add_row(self.cache_progress)
+        self.live = Live(self.table, refresh_per_second=10)
+
+    def __enter__(self):
+        """Start live view on ctx enter"""
+        self.live.__enter__()
+        self.log(
+            '[cyan]Measuring request rate with '
+            f'[white]{self.n_unique_requests}[cyan] total unique requests'
+        )
+        self.log('[cyan]Press [white]Ctrl+C[cyan] to exit')
+        return self
+
+    def __exit__(self, *args):
+        """Show stats on ctx exit"""
+        self.live.__exit__(*args)
+        elapsed = time() - self.start_time
+        self.log(
+            f'[cyan]Sent a total of [white]{self.total_requests}[cyan] '
+            f'requests in [white]{elapsed:.2f}[cyan] seconds '
+        )
+
+        self.log(f'[cyan]Average: [white]{int(self.total_requests/elapsed)}[cyan] requests/second')
+
+    @property
+    def total_requests(self):
+        return self.rps_progress.total_requests
+
+    def count_request(self):
+        self.rps_progress.count_request()
+
+    def update_cache_size(self, size: int):
+        self.cache_progress.update(self.cache_task, completed=size)
+
+    def log(self, msg: str):
+        self.cache_progress.log(msg)
+
+
+def test_rps(session):
+    session.cache.clear()
+
+    # Send a request to one of a fixed number of unique URLs
+    def random_request():
+        request_number = randint(1, N_UNIQUE_REQUESTS)
+        session.get(f'https://httpbin.org/get?page={request_number}')
+
+    # Show request rate over time and total cached (unexpired) requests
+    with CacheRPSProgress(N_UNIQUE_REQUESTS) as progress:
+        while True:
+            try:
+                random_request()
+                progress.count_request()
+                progress.update_cache_size(session.cache.responses.count(expired=False))
+            except KeyboardInterrupt:
+                break
+
+
+if __name__ == '__main__':
+    session = CachedSession(use_temp=True, expire_after=30)
+    test_rps(session)
diff --git a/examples/time_machine_backtesting.py b/examples/time_machine_backtesting.py
old mode 100644
new mode 100755
diff --git a/examples/url_patterns.py b/examples/url_patterns.py
index 0fddb86..e4f0f31 100755
--- a/examples/url_patterns.py
+++ b/examples/url_patterns.py
@@ -5,23 +5,23 @@ An example of {ref}`url-patterns`
 """
 from datetime import timedelta
 
-from requests_cache import CachedSession
+from requests_cache import DO_NOT_CACHE, NEVER_EXPIRE, CachedSession
 
 default_expire_after = 60 * 60               # By default, cached responses expire in an hour
 urls_expire_after = {
     'httpbin.org/image': timedelta(days=7),  # Requests for this base URL will expire in a week
-    '*.fillmurray.com': -1,                  # Requests matching this pattern will never expire
-    '*.placeholder.com/*': 0,                # Requests matching this pattern will not be cached
+    '*.fillmurray.com': NEVER_EXPIRE,        # Requests matching this pattern will never expire
+    '*.placeholder.com/*': DO_NOT_CACHE,     # Requests matching this pattern will not be cached
 }
 urls = [
     'https://httpbin.org/get',               # Will expire in an hour
     'https://httpbin.org/image/jpeg',        # Will expire in a week
-    'http://www.fillmurray.com/460/300',     # Will never expire
+    'https://www.fillmurray.com/460/300',    # Will never expire
     'https://via.placeholder.com/350x150',   # Will not be cached
 ]
 
 
-def main():
+def send_requests():
     session = CachedSession(
         cache_name='example_cache',
         expire_after=default_expire_after,
@@ -39,11 +39,15 @@ def _expires_str(response):
         return response.expires.isoformat()
 
 
-if __name__ == "__main__":
-    original_responses = main()
-    cached_responses = main()
+def main():
+    send_requests()
+    cached_responses = send_requests()
     for response in cached_responses:
         print(
             f'{response.url:40} From cache: {response.from_cache:}'
             f'\tExpires: {_expires_str(response)}'
         )
+
+
+if __name__ == '__main__':
+    main()
diff --git a/examples/vcr.py b/examples/vcr.py
new file mode 100755
index 0000000..94e4fdb
--- /dev/null
+++ b/examples/vcr.py
@@ -0,0 +1,99 @@
+#!/usr/bin/env python
+"""
+Example utilities to export responses to a format compatible with VCR-based libraries, including:
+* [vcrpy](https://github.com/kevin1024/vcrpy)
+* [betamax](https://github.com/betamaxpy/betamax)
+"""
+from os import makedirs
+from os.path import abspath, dirname, expanduser, join
+from typing import Any, Dict, Iterable
+from urllib.parse import urlparse
+
+import yaml
+
+from requests_cache import BaseCache, CachedResponse, CachedSession, __version__
+from requests_cache.serializers.preconf import yaml_preconf_stage
+
+
+def to_vcr_cassette(cache: BaseCache, path: str):
+    """Export cached responses to a VCR-compatible YAML file (cassette)
+
+    Args:
+        cache: Cache instance containing response data to export
+        path: Path for new cassette file
+    """
+
+    responses = cache.responses.values()
+    write_cassette(to_vcr_cassette_dict(responses), path)
+
+
+def to_vcr_cassettes_by_host(cache: BaseCache, cassette_dir: str = '.'):
+    """Export cached responses as VCR-compatible YAML files (cassettes), split into separate files
+    based on request host
+
+    Args:
+        cache: Cache instance containing response data to export
+        cassette_dir: Base directory for cassette library
+    """
+    responses = cache.responses.values()
+    for host, cassette in to_vcr_cassette_dicts_by_host(responses).items():
+        write_cassette(cassette, join(cassette_dir, f'{host}.yml'))
+
+
+def to_vcr_cassette_dict(responses: Iterable[CachedResponse]) -> Dict:
+    """Convert responses to a VCR cassette dict"""
+    return {
+        'http_interactions': [to_vcr_episode(r) for r in responses],
+        'recorded_with': f'requests-cache {__version__}',
+    }
+
+
+def to_vcr_episode(response: CachedResponse) -> Dict:
+    """Convert a single response to a VCR-compatible response ("episode") dict"""
+    # Do most of the work with cattrs + default YAML conversions
+    response_dict = yaml_preconf_stage.dumps(response)
+
+    def _to_multidict(d):
+        return {k: [v] for k, v in d.items()}
+
+    # Translate requests.Response structure into VCR format
+    return {
+        'request': {
+            'body': response_dict['request']['body'],
+            'headers': _to_multidict(response_dict['request']['headers']),
+            'method': response_dict['request']['method'],
+            'uri': response_dict['request']['url'],
+        },
+        'response': {
+            'body': {'string': response_dict['_content'], 'encoding': response_dict['encoding']},
+            'headers': _to_multidict(response_dict['headers']),
+            'status': {'code': response_dict['status_code'], 'message': response_dict['reason']},
+            'url': response_dict['url'],
+        },
+        'recorded_at': response_dict['created_at'],
+    }
+
+
+def to_vcr_cassette_dicts_by_host(responses: Iterable[CachedResponse]) -> Dict[str, Dict]:
+    responses_by_host: Dict[str, Any] = {}
+    for response in responses:
+        host = urlparse(response.request.url).netloc
+        responses_by_host.setdefault(host, [])
+        responses_by_host[host].append(response)
+    return {host: to_vcr_cassette_dict(responses) for host, responses in responses_by_host.items()}
+
+
+def write_cassette(cassette, path):
+    path = abspath(expanduser(path))
+    makedirs(dirname(path), exist_ok=True)
+    with open(path, 'w') as f:
+        f.write(yaml.safe_dump(cassette))
+
+
+# Create an example cache and export it to a cassette
+if __name__ == '__main__':
+    cache_dir = 'example_cache'
+    session = CachedSession(join(cache_dir, 'http_cache.sqlite'))
+    session.get('https://httpbin.org/get')
+    session.get('https://httpbin.org/json')
+    to_vcr_cassette(session.cache, join(cache_dir, 'http_cache.yaml'))
diff --git a/noxfile.py b/noxfile.py
index ddc94e0..997aa32 100644
--- a/noxfile.py
+++ b/noxfile.py
@@ -1,9 +1,14 @@
-"""Notes:
-* 'test-<python version>' commands: nox will use poetry.lock to determine dependency versions
+"""Runner script for tools used in local development and CI.
+
+Notes:
+* 'test' and 'test-<python version>' commands: nox will create separate virtualenvs per python
+  version, and use `poetry.lock` to determine dependency versions
 * 'lint' command: tools and environments are managed by pre-commit
 * All other commands: the current environment will be used instead of creating new ones
 * Run `nox -l` to see all available commands
 """
+import platform
+from os import getenv
 from os.path import join
 from shutil import rmtree
 
@@ -18,25 +23,35 @@ LIVE_DOCS_IGNORE = ['*.pyc', '*.tmp', join('**', 'modules', '*')]
 LIVE_DOCS_WATCH = ['requests_cache', 'examples']
 CLEAN_DIRS = ['dist', 'build', join('docs', '_build'), join('docs', 'modules')]
 
+PYTHON_VERSIONS = ['3.7', '3.8', '3.9', '3.10', '3.11', 'pypy3.9']
 UNIT_TESTS = join('tests', 'unit')
 INTEGRATION_TESTS = join('tests', 'integration')
 STRESS_TEST_MULTIPLIER = 10
-COVERAGE_ARGS = (
-    '--cov --cov-report=term --cov-report=html'  # Generate HTML + stdout coverage report
-)
-XDIST_ARGS = '--numprocesses=auto --dist=loadfile'  # Run tests in parallel, grouped by test module
+DEFAULT_COVERAGE_FORMATS = ['html', 'term']
+# Run tests in parallel, grouped by test module
+XDIST_ARGS = '--numprocesses=auto --dist=loadfile'
+
+IS_PYPY = platform.python_implementation() == 'PyPy'
 
 
-@session(python=['3.7', '3.8', '3.9', '3.10'])
+@session(python=PYTHON_VERSIONS)
 def test(session):
-    """Run tests for a specific python version"""
-    test_paths = session.posargs or [UNIT_TESTS]
-    session.install('.', 'pytest', 'pytest-xdist', 'requests-mock', 'timeout-decorator')
+    """Run tests in a separate virtualenv per python version"""
+    test_paths = session.posargs or [UNIT_TESTS, INTEGRATION_TESTS]
+    session.install('.', 'pytest', 'pytest-xdist', 'requests-mock', 'rich', 'timeout-decorator')
 
     cmd = f'pytest -rs {XDIST_ARGS}'
     session.run(*cmd.split(' '), *test_paths)
 
 
+@session(python=False, name='test-current')
+def test_current(session):
+    """Run tests using the current virtualenv"""
+    test_paths = session.posargs or [UNIT_TESTS, INTEGRATION_TESTS]
+    cmd = f'pytest -rs {XDIST_ARGS}'
+    session.run(*cmd.split(' '), *test_paths)
+
+
 @session(python=False)
 def clean(session):
     """Clean up temporary build + documentation files"""
@@ -48,19 +63,29 @@ def clean(session):
 @session(python=False, name='cov')
 def coverage(session):
     """Run tests and generate coverage report"""
-    cmd_1 = f'pytest {UNIT_TESTS} -rs {XDIST_ARGS} {COVERAGE_ARGS}'
-    cmd_2 = f'pytest {INTEGRATION_TESTS} -rs {XDIST_ARGS} {COVERAGE_ARGS} --cov-append'
-    session.run(*cmd_1.split(' '))
-    session.run(*cmd_2.split(' '))
+    cmd = f'pytest {UNIT_TESTS} {INTEGRATION_TESTS} -rs --cov'.split(' ')
+    if not IS_PYPY:
+        cmd += XDIST_ARGS.split(' ')
+
+    # Add coverage formats
+    cov_formats = session.posargs or DEFAULT_COVERAGE_FORMATS
+    cmd += [f'--cov-report={f}' for f in cov_formats]
+
+    # Add verbose flag, if set by environment
+    if getenv('PYTEST_VERBOSE'):
+        cmd += ['--verbose']
+    session.run(*cmd)
 
 
 @session(python=False, name='stress')
 def stress_test(session):
     """Run concurrency tests with a higher stress test multiplier"""
     cmd = f'pytest {INTEGRATION_TESTS} -rs -k concurrency'
+    multiplier = session.posargs[0] if session.posargs else STRESS_TEST_MULTIPLIER
+
     session.run(
         *cmd.split(' '),
-        env={'STRESS_TEST_MULTIPLIER': str(STRESS_TEST_MULTIPLIER)},
+        env={'STRESS_TEST_MULTIPLIER': str(multiplier)},
     )
 
 
diff --git a/poetry.lock b/poetry.lock
index c4c9b2c..fe298cf 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -1,32 +1,25 @@
 [[package]]
 name = "alabaster"
-version = "0.7.12"
+version = "0.7.13"
 description = "A configurable sidebar-enabled Sphinx theme"
 category = "main"
 optional = false
-python-versions = "*"
-
-[[package]]
-name = "appdirs"
-version = "1.4.4"
-description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"."
-category = "main"
-optional = false
-python-versions = "*"
+python-versions = ">=3.6"
 
 [[package]]
 name = "argcomplete"
-version = "1.12.3"
+version = "2.1.2"
 description = "Bash tab completion for argparse"
 category = "dev"
 optional = false
-python-versions = "*"
+python-versions = ">=3.6"
 
 [package.dependencies]
-importlib-metadata = {version = ">=0.23,<5", markers = "python_version == \"3.7\""}
+importlib-metadata = {version = ">=0.23,<6", markers = "python_version == \"3.7\""}
 
 [package.extras]
-test = ["coverage", "flake8", "pexpect", "wheel"]
+lint = ["flake8", "mypy"]
+test = ["coverage", "flake8", "mypy", "pexpect", "wheel"]
 
 [[package]]
 name = "async-timeout"
@@ -39,42 +32,35 @@ python-versions = ">=3.6"
 [package.dependencies]
 typing-extensions = {version = ">=3.6.5", markers = "python_version < \"3.8\""}
 
-[[package]]
-name = "atomicwrites"
-version = "1.4.1"
-description = "Atomic file writes."
-category = "dev"
-optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
-
 [[package]]
 name = "attrs"
-version = "21.4.0"
+version = "22.2.0"
 description = "Classes Without Boilerplate"
 category = "main"
 optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
+python-versions = ">=3.6"
 
 [package.extras]
-dev = ["cloudpickle", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "six", "sphinx", "sphinx-notfound-page", "zope.interface"]
-docs = ["furo", "sphinx", "sphinx-notfound-page", "zope.interface"]
-tests = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "six", "zope.interface"]
-tests-no-zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "six"]
+cov = ["attrs[tests]", "coverage-enable-subprocess", "coverage[toml] (>=5.3)"]
+dev = ["attrs[docs,tests]"]
+docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope.interface"]
+tests = ["attrs[tests-no-zope]", "zope.interface"]
+tests-no-zope = ["cloudpickle", "cloudpickle", "hypothesis", "hypothesis", "mypy (>=0.971,<0.990)", "mypy (>=0.971,<0.990)", "pympler", "pympler", "pytest (>=4.3.0)", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-mypy-plugins", "pytest-xdist[psutil]", "pytest-xdist[psutil]"]
 
 [[package]]
 name = "babel"
-version = "2.11.0"
+version = "2.12.1"
 description = "Internationalization utilities"
 category = "main"
 optional = false
-python-versions = ">=3.6"
+python-versions = ">=3.7"
 
 [package.dependencies]
-pytz = ">=2015.7"
+pytz = {version = ">=2015.7", markers = "python_version < \"3.9\""}
 
 [[package]]
 name = "beautifulsoup4"
-version = "4.11.1"
+version = "4.12.0"
 description = "Screen-scraping library"
 category = "main"
 optional = true
@@ -89,14 +75,14 @@ lxml = ["lxml"]
 
 [[package]]
 name = "boto3"
-version = "1.26.29"
+version = "1.26.99"
 description = "The AWS SDK for Python"
 category = "main"
 optional = true
 python-versions = ">= 3.7"
 
 [package.dependencies]
-botocore = ">=1.29.29,<1.30.0"
+botocore = ">=1.29.99,<1.30.0"
 jmespath = ">=0.7.1,<2.0.0"
 s3transfer = ">=0.6.0,<0.7.0"
 
@@ -105,7 +91,7 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"]
 
 [[package]]
 name = "botocore"
-version = "1.29.29"
+version = "1.29.99"
 description = "Low-level, data-driven core of boto 3."
 category = "main"
 optional = true
@@ -117,7 +103,7 @@ python-dateutil = ">=2.1,<3.0.0"
 urllib3 = ">=1.25.4,<1.27"
 
 [package.extras]
-crt = ["awscrt (==0.15.3)"]
+crt = ["awscrt (==0.16.9)"]
 
 [[package]]
 name = "bson"
@@ -162,14 +148,11 @@ python-versions = ">=3.6.1"
 
 [[package]]
 name = "charset-normalizer"
-version = "2.1.1"
+version = "3.1.0"
 description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
 category = "main"
 optional = false
-python-versions = ">=3.6.0"
-
-[package.extras]
-unicode-backport = ["unicodedata2"]
+python-versions = ">=3.7.0"
 
 [[package]]
 name = "colorama"
@@ -193,20 +176,9 @@ colorama = {version = "*", markers = "sys_platform == \"win32\""}
 [package.extras]
 development = ["black", "flake8", "mypy", "pytest", "types-colorama"]
 
-[[package]]
-name = "commonmark"
-version = "0.9.1"
-description = "Python parser for the CommonMark Markdown spec"
-category = "dev"
-optional = false
-python-versions = "*"
-
-[package.extras]
-test = ["flake8 (==3.7.8)", "hypothesis (==3.55.3)"]
-
 [[package]]
 name = "coverage"
-version = "6.5.0"
+version = "7.2.2"
 description = "Code coverage measurement for Python"
 category = "dev"
 optional = false
@@ -218,6 +190,14 @@ tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.1
 [package.extras]
 toml = ["tomli"]
 
+[[package]]
+name = "cycler"
+version = "0.11.0"
+description = "Composable style cycles"
+category = "main"
+optional = true
+python-versions = ">=3.6"
+
 [[package]]
 name = "distlib"
 version = "0.3.6"
@@ -228,31 +208,32 @@ python-versions = "*"
 
 [[package]]
 name = "dnspython"
-version = "2.2.1"
+version = "2.3.0"
 description = "DNS toolkit"
 category = "main"
 optional = true
-python-versions = ">=3.6,<4.0"
+python-versions = ">=3.7,<4.0"
 
 [package.extras]
 curio = ["curio (>=1.2,<2.0)", "sniffio (>=1.1,<2.0)"]
-dnssec = ["cryptography (>=2.6,<37.0)"]
-doh = ["h2 (>=4.1.0)", "httpx (>=0.21.1)", "requests (>=2.23.0,<3.0.0)", "requests-toolbelt (>=0.9.1,<0.10.0)"]
+dnssec = ["cryptography (>=2.6,<40.0)"]
+doh = ["h2 (>=4.1.0)", "httpx (>=0.21.1)", "requests (>=2.23.0,<3.0.0)", "requests-toolbelt (>=0.9.1,<0.11.0)"]
+doq = ["aioquic (>=0.9.20)"]
 idna = ["idna (>=2.1,<4.0)"]
-trio = ["trio (>=0.14,<0.20)"]
+trio = ["trio (>=0.14,<0.23)"]
 wmi = ["wmi (>=1.5.1,<2.0.0)"]
 
 [[package]]
 name = "docutils"
-version = "0.17.1"
+version = "0.19"
 description = "Docutils -- Python Documentation Utilities"
 category = "main"
 optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
+python-versions = ">=3.7"
 
 [[package]]
 name = "exceptiongroup"
-version = "1.0.4"
+version = "1.1.1"
 description = "Backport of PEP 654 (exception groups)"
 category = "main"
 optional = false
@@ -274,19 +255,41 @@ testing = ["pre-commit"]
 
 [[package]]
 name = "filelock"
-version = "3.8.2"
+version = "3.10.4"
 description = "A platform independent file lock."
 category = "dev"
 optional = false
 python-versions = ">=3.7"
 
 [package.extras]
-docs = ["furo (>=2022.9.29)", "sphinx (>=5.3)", "sphinx-autodoc-typehints (>=1.19.5)"]
-testing = ["covdefaults (>=2.2.2)", "coverage (>=6.5)", "pytest (>=7.2)", "pytest-cov (>=4)", "pytest-timeout (>=2.1)"]
+docs = ["furo (>=2022.12.7)", "sphinx (>=6.1.3)", "sphinx-autodoc-typehints (>=1.22,!=1.23.4)"]
+testing = ["covdefaults (>=2.3)", "coverage (>=7.2.2)", "pytest (>=7.2.2)", "pytest-cov (>=4)", "pytest-timeout (>=2.1)"]
+
+[[package]]
+name = "fonttools"
+version = "4.38.0"
+description = "Tools to manipulate font files"
+category = "main"
+optional = true
+python-versions = ">=3.7"
+
+[package.extras]
+all = ["brotli (>=1.0.1)", "brotlicffi (>=0.8.0)", "fs (>=2.2.0,<3)", "lxml (>=4.0,<5)", "lz4 (>=1.7.4.2)", "matplotlib", "munkres", "scipy", "skia-pathops (>=0.5.0)", "sympy", "uharfbuzz (>=0.23.0)", "unicodedata2 (>=14.0.0)", "xattr", "zopfli (>=0.1.4)"]
+graphite = ["lz4 (>=1.7.4.2)"]
+interpolatable = ["munkres", "scipy"]
+lxml = ["lxml (>=4.0,<5)"]
+pathops = ["skia-pathops (>=0.5.0)"]
+plot = ["matplotlib"]
+repacker = ["uharfbuzz (>=0.23.0)"]
+symfont = ["sympy"]
+type1 = ["xattr"]
+ufo = ["fs (>=2.2.0,<3)"]
+unicode = ["unicodedata2 (>=14.0.0)"]
+woff = ["brotli (>=1.0.1)", "brotlicffi (>=0.8.0)", "zopfli (>=0.1.4)"]
 
 [[package]]
 name = "furo"
-version = "2022.9.29"
+version = "2022.12.7"
 description = "A clean customisable Sphinx documentation theme."
 category = "main"
 optional = true
@@ -295,12 +298,12 @@ python-versions = ">=3.7"
 [package.dependencies]
 beautifulsoup4 = "*"
 pygments = ">=2.7"
-sphinx = ">=4.0,<6.0"
+sphinx = ">=5.0,<7.0"
 sphinx-basic-ng = "*"
 
 [[package]]
 name = "identify"
-version = "2.5.9"
+version = "2.5.22"
 description = "File identification library for Python"
 category = "dev"
 optional = false
@@ -327,7 +330,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
 
 [[package]]
 name = "importlib-metadata"
-version = "4.13.0"
+version = "5.2.0"
 description = "Read metadata from Python packages"
 category = "main"
 optional = false
@@ -338,17 +341,17 @@ typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""}
 zipp = ">=0.5"
 
 [package.extras]
-docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)"]
+docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
 perf = ["ipython"]
 testing = ["flake8 (<5)", "flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)"]
 
 [[package]]
 name = "iniconfig"
-version = "1.1.1"
-description = "iniconfig: brain-dead simple config-ini parsing"
+version = "2.0.0"
+description = "brain-dead simple config-ini parsing"
 category = "dev"
 optional = false
-python-versions = "*"
+python-versions = ">=3.7"
 
 [[package]]
 name = "itsdangerous"
@@ -380,9 +383,20 @@ category = "main"
 optional = true
 python-versions = ">=3.7"
 
+[[package]]
+name = "kiwisolver"
+version = "1.4.4"
+description = "A fast implementation of the Cassowary constraint solver"
+category = "main"
+optional = true
+python-versions = ">=3.7"
+
+[package.dependencies]
+typing-extensions = {version = "*", markers = "python_version < \"3.8\""}
+
 [[package]]
 name = "linkify-it-py"
-version = "1.0.3"
+version = "2.0.0"
 description = "Links recognition library with FULL unicode support."
 category = "main"
 optional = true
@@ -411,69 +425,100 @@ tornado = {version = "*", markers = "python_version > \"2.7\""}
 
 [[package]]
 name = "markdown-it-py"
-version = "1.1.0"
+version = "2.2.0"
 description = "Python port of markdown-it. Markdown parsing, done right!"
 category = "main"
-optional = true
-python-versions = "~=3.6"
+optional = false
+python-versions = ">=3.7"
 
 [package.dependencies]
-attrs = ">=19,<22"
-typing-extensions = {version = ">=3.7.4", markers = "python_version < \"3.8\""}
+mdurl = ">=0.1,<1.0"
+typing_extensions = {version = ">=3.7.4", markers = "python_version < \"3.8\""}
 
 [package.extras]
-code-style = ["pre-commit (==2.6)"]
-compare = ["commonmark (>=0.9.1,<0.10.0)", "markdown (>=3.2.2,<3.3.0)", "mistletoe-ebp (>=0.10.0,<0.11.0)", "mistune (>=0.8.4,<0.9.0)", "panflute (>=1.12,<2.0)"]
-linkify = ["linkify-it-py (>=1.0,<2.0)"]
+benchmarking = ["psutil", "pytest", "pytest-benchmark"]
+code-style = ["pre-commit (>=3.0,<4.0)"]
+compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"]
+linkify = ["linkify-it-py (>=1,<3)"]
 plugins = ["mdit-py-plugins"]
-rtd = ["myst-nb (==0.13.0a1)", "pyyaml", "sphinx (>=2,<4)", "sphinx-book-theme", "sphinx-copybutton", "sphinx-panels (>=0.4.0,<0.5.0)"]
-testing = ["coverage", "psutil", "pytest (>=3.6,<4)", "pytest-benchmark (>=3.2,<4.0)", "pytest-cov", "pytest-regressions"]
+profiling = ["gprof2dot"]
+rtd = ["attrs", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"]
+testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"]
 
 [[package]]
 name = "markupsafe"
-version = "2.1.1"
+version = "2.1.2"
 description = "Safely add untrusted strings to HTML/XML markup."
 category = "main"
 optional = false
 python-versions = ">=3.7"
 
+[[package]]
+name = "matplotlib"
+version = "3.5.3"
+description = "Python plotting package"
+category = "main"
+optional = true
+python-versions = ">=3.7"
+
+[package.dependencies]
+cycler = ">=0.10"
+fonttools = ">=4.22.0"
+kiwisolver = ">=1.0.1"
+numpy = ">=1.17"
+packaging = ">=20.0"
+pillow = ">=6.2.0"
+pyparsing = ">=2.2.1"
+python-dateutil = ">=2.7"
+setuptools_scm = ">=4,<7"
+
 [[package]]
 name = "mdit-py-plugins"
-version = "0.2.8"
+version = "0.3.5"
 description = "Collection of plugins for markdown-it-py"
 category = "main"
 optional = true
-python-versions = "~=3.6"
+python-versions = ">=3.7"
 
 [package.dependencies]
-markdown-it-py = ">=1.0,<2.0"
+markdown-it-py = ">=1.0.0,<3.0.0"
 
 [package.extras]
-code-style = ["pre-commit (==2.6)"]
-rtd = ["myst-parser (==0.14.0a3)", "sphinx-book-theme (>=0.1.0,<0.2.0)"]
-testing = ["coverage", "pytest (>=3.6,<4)", "pytest-cov", "pytest-regressions"]
+code-style = ["pre-commit"]
+rtd = ["attrs", "myst-parser (>=0.16.1,<0.17.0)", "sphinx-book-theme (>=0.1.0,<0.2.0)"]
+testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"]
+
+[[package]]
+name = "mdurl"
+version = "0.1.2"
+description = "Markdown URL utilities"
+category = "main"
+optional = false
+python-versions = ">=3.7"
 
 [[package]]
 name = "myst-parser"
-version = "0.15.2"
-description = "An extended commonmark compliant parser, with bridges to docutils & sphinx."
+version = "1.0.0"
+description = "An extended [CommonMark](https://spec.commonmark.org/) compliant parser,"
 category = "main"
 optional = true
-python-versions = ">=3.6"
+python-versions = ">=3.7"
 
 [package.dependencies]
-docutils = ">=0.15,<0.18"
+docutils = ">=0.15,<0.20"
 jinja2 = "*"
-markdown-it-py = ">=1.0.0,<2.0.0"
-mdit-py-plugins = ">=0.2.8,<0.3.0"
+markdown-it-py = ">=1.0.0,<3.0.0"
+mdit-py-plugins = ">=0.3.4,<0.4.0"
 pyyaml = "*"
-sphinx = ">=3.1,<5"
+sphinx = ">=5,<7"
+typing-extensions = {version = "*", markers = "python_version < \"3.8\""}
 
 [package.extras]
-code-style = ["pre-commit (>=2.12,<3.0)"]
+code-style = ["pre-commit (>=3.0,<4.0)"]
 linkify = ["linkify-it-py (>=1.0,<2.0)"]
-rtd = ["ipython", "sphinx-book-theme (>=0.1.0,<0.2.0)", "sphinx-panels (>=0.5.2,<0.6.0)", "sphinxcontrib-bibtex (>=2.1,<3.0)", "sphinxcontrib.mermaid (>=0.6.3,<0.7.0)", "sphinxext-opengraph (>=0.4.2,<0.5.0)", "sphinxext-rediraffe (>=0.2,<1.0)"]
-testing = ["beautifulsoup4", "coverage", "docutils (>=0.17.0,<0.18.0)", "pytest (>=3.6,<4)", "pytest-cov", "pytest-regressions"]
+rtd = ["ipython", "pydata-sphinx-theme (==v0.13.0rc4)", "sphinx-autodoc2 (>=0.4.2,<0.5.0)", "sphinx-book-theme (==1.0.0rc2)", "sphinx-copybutton", "sphinx-design2", "sphinx-pyscript", "sphinx-tippy (>=0.3.1)", "sphinx-togglebutton", "sphinxext-opengraph (>=0.7.5,<0.8.0)", "sphinxext-rediraffe (>=0.2.7,<0.3.0)"]
+testing = ["beautifulsoup4", "coverage[toml]", "pytest (>=7,<8)", "pytest-cov", "pytest-param-files (>=0.3.4,<0.4.0)", "pytest-regressions", "sphinx-pytest"]
+testing-docutils = ["pygments", "pytest (>=7,<8)", "pytest-param-files (>=0.3.4,<0.4.0)"]
 
 [[package]]
 name = "nodeenv"
@@ -488,27 +533,26 @@ setuptools = "*"
 
 [[package]]
 name = "nox"
-version = "2021.10.1"
+version = "2022.11.21"
 description = "Flexible test automation."
 category = "dev"
 optional = false
-python-versions = ">=3.6"
+python-versions = ">=3.7"
 
 [package.dependencies]
-argcomplete = ">=1.9.4,<2.0"
+argcomplete = ">=1.9.4,<3.0"
 colorlog = ">=2.6.1,<7.0.0"
 importlib-metadata = {version = "*", markers = "python_version < \"3.8\""}
 packaging = ">=20.9"
-py = ">=1.4.0,<2.0.0"
 typing-extensions = {version = ">=3.7.4", markers = "python_version < \"3.8\""}
-virtualenv = ">=14.0.0"
+virtualenv = ">=14"
 
 [package.extras]
 tox-to-nox = ["jinja2", "tox"]
 
 [[package]]
 name = "nox-poetry"
-version = "0.9.0"
+version = "1.0.2"
 description = "nox-poetry"
 category = "dev"
 optional = false
@@ -517,11 +561,19 @@ python-versions = ">=3.7,<4.0"
 [package.dependencies]
 nox = ">=2020.8.22"
 packaging = ">=20.9"
-tomlkit = ">=0.7.0,<0.8.0"
+tomlkit = ">=0.7"
+
+[[package]]
+name = "numpy"
+version = "1.21.1"
+description = "NumPy is the fundamental package for array computing with Python."
+category = "main"
+optional = true
+python-versions = ">=3.7"
 
 [[package]]
 name = "packaging"
-version = "22.0"
+version = "23.0"
 description = "Core utilities for Python packages"
 category = "main"
 optional = false
@@ -529,23 +581,38 @@ python-versions = ">=3.7"
 
 [[package]]
 name = "pbr"
-version = "5.11.0"
+version = "5.11.1"
 description = "Python Build Reasonableness"
 category = "main"
 optional = true
 python-versions = ">=2.6"
 
+[[package]]
+name = "pillow"
+version = "9.4.0"
+description = "Python Imaging Library (Fork)"
+category = "main"
+optional = true
+python-versions = ">=3.7"
+
+[package.extras]
+docs = ["furo", "olefile", "sphinx (>=2.4)", "sphinx-copybutton", "sphinx-inline-tabs", "sphinx-issues (>=3.0.1)", "sphinx-removed-in", "sphinxext-opengraph"]
+tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout"]
+
 [[package]]
 name = "platformdirs"
-version = "2.6.0"
+version = "3.1.1"
 description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"."
-category = "dev"
+category = "main"
 optional = false
 python-versions = ">=3.7"
 
+[package.dependencies]
+typing-extensions = {version = ">=4.4", markers = "python_version < \"3.8\""}
+
 [package.extras]
-docs = ["furo (>=2022.9.29)", "proselint (>=0.13)", "sphinx (>=5.3)", "sphinx-autodoc-typehints (>=1.19.4)"]
-test = ["appdirs (==1.4.4)", "pytest (>=7.2)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"]
+docs = ["furo (>=2022.12.7)", "proselint (>=0.13)", "sphinx (>=6.1.3)", "sphinx-autodoc-typehints (>=1.22,!=1.23.4)"]
+test = ["appdirs (==1.4.4)", "covdefaults (>=2.2.2)", "pytest (>=7.2.1)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"]
 
 [[package]]
 name = "pluggy"
@@ -572,7 +639,7 @@ python-versions = "*"
 
 [[package]]
 name = "pre-commit"
-version = "2.20.0"
+version = "2.21.0"
 description = "A framework for managing and maintaining multi-language pre-commit hooks."
 category = "dev"
 optional = false
@@ -584,8 +651,7 @@ identify = ">=1.0.0"
 importlib-metadata = {version = "*", markers = "python_version < \"3.8\""}
 nodeenv = ">=0.11.1"
 pyyaml = ">=5.1"
-toml = "*"
-virtualenv = ">=20.0.8"
+virtualenv = ">=20.10.0"
 
 [[package]]
 name = "psutil"
@@ -598,17 +664,9 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
 [package.extras]
 test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"]
 
-[[package]]
-name = "py"
-version = "1.11.0"
-description = "library with cross-python path, ini-parsing, io, code, log facilities"
-category = "dev"
-optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
-
 [[package]]
 name = "pygments"
-version = "2.13.0"
+version = "2.14.0"
 description = "Pygments is a syntax highlighting package written in Python."
 category = "main"
 optional = false
@@ -636,27 +694,37 @@ ocsp = ["certifi", "pyopenssl (>=17.2.0)", "requests (<3.0.0)", "service-identit
 snappy = ["python-snappy"]
 zstd = ["zstandard"]
 
+[[package]]
+name = "pyparsing"
+version = "3.0.9"
+description = "pyparsing module - Classes and methods to define and execute parsing grammars"
+category = "main"
+optional = true
+python-versions = ">=3.6.8"
+
+[package.extras]
+diagrams = ["jinja2", "railroad-diagrams"]
+
 [[package]]
 name = "pytest"
-version = "6.2.5"
+version = "7.2.2"
 description = "pytest: simple powerful testing with Python"
 category = "dev"
 optional = false
-python-versions = ">=3.6"
+python-versions = ">=3.7"
 
 [package.dependencies]
-atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""}
 attrs = ">=19.2.0"
 colorama = {version = "*", markers = "sys_platform == \"win32\""}
+exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""}
 importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""}
 iniconfig = "*"
 packaging = "*"
 pluggy = ">=0.12,<2.0"
-py = ">=1.8.2"
-toml = "*"
+tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""}
 
 [package.extras]
-testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"]
+testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"]
 
 [[package]]
 name = "pytest-clarity"
@@ -701,7 +769,7 @@ pytest = ">=5.3"
 
 [[package]]
 name = "pytest-xdist"
-version = "3.1.0"
+version = "3.2.1"
 description = "pytest xdist plugin for distributed testing, most importantly across multiple CPUs"
 category = "dev"
 optional = false
@@ -729,7 +797,7 @@ six = ">=1.5"
 
 [[package]]
 name = "pytz"
-version = "2022.6"
+version = "2022.7.1"
 description = "World timezone definitions, modern and historical"
 category = "main"
 optional = false
@@ -745,14 +813,14 @@ python-versions = ">=3.6"
 
 [[package]]
 name = "redis"
-version = "4.4.0"
+version = "4.5.3"
 description = "Python client for Redis database and key-value store"
 category = "main"
 optional = true
 python-versions = ">=3.7"
 
 [package.dependencies]
-async-timeout = ">=4.0.2"
+async-timeout = {version = ">=4.0.2", markers = "python_version < \"3.11\""}
 importlib-metadata = {version = ">=1.0", markers = "python_version < \"3.8\""}
 typing-extensions = {version = "*", markers = "python_version < \"3.8\""}
 
@@ -762,7 +830,7 @@ ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==20.0.1)", "requests (>=2.26.0)"
 
 [[package]]
 name = "requests"
-version = "2.28.1"
+version = "2.28.2"
 description = "Python HTTP for Humans."
 category = "main"
 optional = false
@@ -770,7 +838,7 @@ python-versions = ">=3.7, <4"
 
 [package.dependencies]
 certifi = ">=2017.4.17"
-charset-normalizer = ">=2,<3"
+charset-normalizer = ">=2,<4"
 idna = ">=2.5,<4"
 urllib3 = ">=1.21.1,<1.27"
 
@@ -796,35 +864,34 @@ test = ["fixtures", "mock", "purl", "pytest", "requests-futures", "sphinx", "tes
 
 [[package]]
 name = "responses"
-version = "0.16.0"
+version = "0.19.0"
 description = "A utility library for mocking out the `requests` Python library."
 category = "dev"
 optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
+python-versions = ">=3.7"
 
 [package.dependencies]
-requests = ">=2.0"
-six = "*"
+requests = ">=2.0,<3.0"
 urllib3 = ">=1.25.10"
 
 [package.extras]
-tests = ["coverage (>=3.7.1,<6.0.0)", "flake8", "mypy", "pytest (>=4.6)", "pytest (>=4.6,<5.0)", "pytest-cov", "pytest-localserver", "types-mock", "types-requests", "types-six"]
+tests = ["coverage (>=6.0.0)", "flake8", "mypy", "pytest (>=7.0.0)", "pytest-asyncio", "pytest-cov", "pytest-localserver", "types-mock", "types-requests"]
 
 [[package]]
 name = "rich"
-version = "12.6.0"
+version = "13.3.2"
 description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal"
 category = "dev"
 optional = false
-python-versions = ">=3.6.3,<4.0.0"
+python-versions = ">=3.7.0"
 
 [package.dependencies]
-commonmark = ">=0.9.0,<0.10.0"
-pygments = ">=2.6.0,<3.0.0"
+markdown-it-py = ">=2.2.0,<3.0.0"
+pygments = ">=2.13.0,<3.0.0"
 typing-extensions = {version = ">=4.0.0,<5.0", markers = "python_version < \"3.9\""}
 
 [package.extras]
-jupyter = ["ipywidgets (>=7.5.1,<8.0.0)"]
+jupyter = ["ipywidgets (>=7.5.1,<9)"]
 
 [[package]]
 name = "s3transfer"
@@ -842,17 +909,34 @@ crt = ["botocore[crt] (>=1.20.29,<2.0a.0)"]
 
 [[package]]
 name = "setuptools"
-version = "65.6.3"
+version = "67.6.0"
 description = "Easily download, build, install, upgrade, and uninstall Python packages"
 category = "main"
 optional = false
 python-versions = ">=3.7"
 
 [package.extras]
-docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"]
+docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"]
 testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"]
 testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"]
 
+[[package]]
+name = "setuptools-scm"
+version = "6.4.2"
+description = "the blessed package to manage your versions by scm tags"
+category = "main"
+optional = true
+python-versions = ">=3.6"
+
+[package.dependencies]
+packaging = ">=20.0"
+setuptools = "*"
+tomli = ">=1.0.0"
+
+[package.extras]
+test = ["pytest (>=6.2)", "virtualenv (>20)"]
+toml = ["setuptools (>=42)"]
+
 [[package]]
 name = "six"
 version = "1.16.0"
@@ -871,15 +955,15 @@ python-versions = "*"
 
 [[package]]
 name = "soupsieve"
-version = "2.3.2.post1"
+version = "2.4"
 description = "A modern CSS selector implementation for Beautiful Soup."
 category = "main"
 optional = true
-python-versions = ">=3.6"
+python-versions = ">=3.7"
 
 [[package]]
 name = "sphinx"
-version = "4.3.0"
+version = "5.3.0"
 description = "Python documentation generator"
 category = "main"
 optional = false
@@ -887,16 +971,16 @@ python-versions = ">=3.6"
 
 [package.dependencies]
 alabaster = ">=0.7,<0.8"
-babel = ">=1.3"
-colorama = {version = ">=0.3.5", markers = "sys_platform == \"win32\""}
-docutils = ">=0.14,<0.18"
-imagesize = "*"
-Jinja2 = ">=2.3"
-packaging = "*"
-Pygments = ">=2.0"
+babel = ">=2.9"
+colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""}
+docutils = ">=0.14,<0.20"
+imagesize = ">=1.3"
+importlib-metadata = {version = ">=4.8", markers = "python_version < \"3.10\""}
+Jinja2 = ">=3.0"
+packaging = ">=21.0"
+Pygments = ">=2.12"
 requests = ">=2.5.0"
-setuptools = "*"
-snowballstemmer = ">=1.1"
+snowballstemmer = ">=2.0"
 sphinxcontrib-applehelp = "*"
 sphinxcontrib-devhelp = "*"
 sphinxcontrib-htmlhelp = ">=2.0.0"
@@ -906,8 +990,8 @@ sphinxcontrib-serializinghtml = ">=1.1.5"
 
 [package.extras]
 docs = ["sphinxcontrib-websupport"]
-lint = ["docutils-stubs", "flake8 (>=3.5.0)", "isort", "mypy (>=0.900)", "types-pkg-resources", "types-requests", "types-typed-ast"]
-test = ["cython", "html5lib", "pytest", "pytest-cov", "typed-ast"]
+lint = ["docutils-stubs", "flake8 (>=3.5.0)", "flake8-bugbear", "flake8-comprehensions", "flake8-simplify", "isort", "mypy (>=0.981)", "sphinx-lint", "types-requests", "types-typed-ast"]
+test = ["cython", "html5lib", "pytest (>=4.6)", "typed_ast"]
 
 [[package]]
 name = "sphinx-autobuild"
@@ -927,22 +1011,23 @@ test = ["pytest", "pytest-cov"]
 
 [[package]]
 name = "sphinx-autodoc-typehints"
-version = "1.17.1"
+version = "1.22"
 description = "Type hints (PEP 484) support for the Sphinx autodoc extension"
 category = "main"
 optional = true
 python-versions = ">=3.7"
 
 [package.dependencies]
-Sphinx = ">=4"
+sphinx = ">=5.3"
 
 [package.extras]
-testing = ["covdefaults (>=2)", "coverage (>=6)", "diff-cover (>=6.4)", "nptyping (>=1,<2)", "pytest (>=6)", "pytest-cov (>=3)", "sphobjinv (>=2)", "typing-extensions (>=3.5)"]
-type-comments = ["typed-ast (>=1.4.0)"]
+docs = ["furo (>=2022.12.7)", "sphinx (>=6.1.3)", "sphinx-autodoc-typehints (>=1.21)"]
+testing = ["covdefaults (>=2.2.2)", "coverage (>=7.0.5)", "diff-cover (>=7.3)", "nptyping (>=2.4.1)", "pytest (>=7.2.1)", "pytest-cov (>=4)", "sphobjinv (>=2.3.1)", "typing-extensions (>=4.4)"]
+type-comment = ["typed-ast (>=1.5.4)"]
 
 [[package]]
 name = "sphinx-automodapi"
-version = "0.14.1"
+version = "0.15.0"
 description = "Sphinx extension for auto-generating API documentation for entire modules"
 category = "main"
 optional = true
@@ -970,33 +1055,38 @@ docs = ["furo", "ipython", "myst-parser", "sphinx-copybutton", "sphinx-inline-ta
 
 [[package]]
 name = "sphinx-copybutton"
-version = "0.4.0"
+version = "0.5.1"
 description = "Add a copy button to each of your code cells."
 category = "main"
 optional = true
-python-versions = ">=3.6"
+python-versions = ">=3.7"
 
 [package.dependencies]
 sphinx = ">=1.8"
 
 [package.extras]
 code-style = ["pre-commit (==2.12.1)"]
-rtd = ["ipython", "sphinx", "sphinx-book-theme"]
+rtd = ["ipython", "myst-nb", "sphinx", "sphinx-book-theme", "sphinx-examples"]
 
 [[package]]
-name = "sphinx-inline-tabs"
-version = "2022.1.2b11"
-description = "Add inline tabbed content to your Sphinx documentation."
+name = "sphinx-design"
+version = "0.3.0"
+description = "A sphinx extension for designing beautiful, view size responsive web components."
 category = "main"
 optional = true
-python-versions = ">=3.8"
+python-versions = ">=3.7"
 
 [package.dependencies]
-sphinx = ">=3"
+sphinx = ">=4,<6"
 
 [package.extras]
-doc = ["furo", "myst-parser"]
-test = ["pytest", "pytest-cov", "pytest-xdist"]
+code-style = ["pre-commit (>=2.12,<3.0)"]
+rtd = ["myst-parser (>=0.18.0,<0.19.0)"]
+testing = ["myst-parser (>=0.18.0,<0.19.0)", "pytest (>=7.1,<8.0)", "pytest-cov", "pytest-regressions"]
+theme-furo = ["furo (>=2022.06.04,<2022.07)"]
+theme-pydata = ["pydata-sphinx-theme (>=0.9.0,<0.10.0)"]
+theme-rtd = ["sphinx-rtd-theme (>=1.0,<2.0)"]
+theme-sbt = ["sphinx-book-theme (>=0.3.0,<0.4.0)"]
 
 [[package]]
 name = "sphinx-notfound-page"
@@ -1013,24 +1103,6 @@ sphinx = ">=1.8"
 doc = ["sphinx", "sphinx-autoapi", "sphinx-notfound-page", "sphinx-prompt", "sphinx-rtd-theme", "sphinx-tabs", "sphinxemoji"]
 test = ["tox"]
 
-[[package]]
-name = "sphinx-panels"
-version = "0.6.0"
-description = "A sphinx extension for creating panels in a grid layout."
-category = "main"
-optional = true
-python-versions = "*"
-
-[package.dependencies]
-docutils = "*"
-sphinx = ">=2,<5"
-
-[package.extras]
-code-style = ["pre-commit (>=2.7.0,<2.8.0)"]
-live-dev = ["sphinx-autobuild", "web-compile (>=0.2.0,<0.3.0)"]
-testing = ["pytest (>=6.0.1,<6.1.0)", "pytest-regressions (>=2.0.1,<2.1.0)"]
-themes = ["myst-parser (>=0.12.9,<0.13.0)", "pydata-sphinx-theme (>=0.4.0,<0.5.0)", "sphinx-book-theme (>=0.0.36,<0.1.0)", "sphinx-rtd-theme"]
-
 [[package]]
 name = "sphinxcontrib-apidoc"
 version = "0.3.0"
@@ -1115,36 +1187,51 @@ lint = ["docutils-stubs", "flake8", "mypy"]
 test = ["pytest"]
 
 [[package]]
-name = "timeout-decorator"
-version = "0.5.0"
-description = "Timeout decorator"
+name = "sphinxext-opengraph"
+version = "0.8.1"
+description = "Sphinx Extension to enable OGP support"
+category = "main"
+optional = true
+python-versions = ">=3.7"
+
+[package.dependencies]
+matplotlib = "*"
+sphinx = ">=4.0"
+
+[[package]]
+name = "tenacity"
+version = "8.2.2"
+description = "Retry code until it succeeds"
 category = "dev"
 optional = false
-python-versions = "*"
+python-versions = ">=3.6"
+
+[package.extras]
+doc = ["reno", "sphinx", "tornado (>=4.5)"]
 
 [[package]]
-name = "toml"
-version = "0.10.2"
-description = "Python Library for Tom's Obvious, Minimal Language"
+name = "timeout-decorator"
+version = "0.5.0"
+description = "Timeout decorator"
 category = "dev"
 optional = false
-python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*"
+python-versions = "*"
 
 [[package]]
 name = "tomli"
 version = "2.0.1"
 description = "A lil' TOML parser"
-category = "dev"
+category = "main"
 optional = false
 python-versions = ">=3.7"
 
 [[package]]
 name = "tomlkit"
-version = "0.7.2"
+version = "0.11.6"
 description = "Style preserving TOML library"
 category = "dev"
 optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
+python-versions = ">=3.6"
 
 [[package]]
 name = "tornado"
@@ -1156,7 +1243,7 @@ python-versions = ">= 3.7"
 
 [[package]]
 name = "typing-extensions"
-version = "4.4.0"
+version = "4.5.0"
 description = "Backported and Experimental Type Hints for Python 3.7+"
 category = "main"
 optional = false
@@ -1175,7 +1262,7 @@ test = ["coverage", "pytest", "pytest-cov"]
 
 [[package]]
 name = "ujson"
-version = "5.6.0"
+version = "5.7.0"
 description = "Ultra fast JSON encoder and decoder for Python"
 category = "main"
 optional = true
@@ -1194,7 +1281,7 @@ six = "*"
 
 [[package]]
 name = "urllib3"
-version = "1.26.13"
+version = "1.26.15"
 description = "HTTP library with thread-safe connection pooling, file post, and more."
 category = "main"
 optional = false
@@ -1207,38 +1294,38 @@ socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"]
 
 [[package]]
 name = "virtualenv"
-version = "20.17.1"
+version = "20.21.0"
 description = "Virtual Python Environment builder"
 category = "dev"
 optional = false
-python-versions = ">=3.6"
+python-versions = ">=3.7"
 
 [package.dependencies]
 distlib = ">=0.3.6,<1"
 filelock = ">=3.4.1,<4"
 importlib-metadata = {version = ">=4.8.3", markers = "python_version < \"3.8\""}
-platformdirs = ">=2.4,<3"
+platformdirs = ">=2.4,<4"
 
 [package.extras]
-docs = ["proselint (>=0.13)", "sphinx (>=5.3)", "sphinx-argparse (>=0.3.2)", "sphinx-rtd-theme (>=1)", "towncrier (>=22.8)"]
-testing = ["coverage (>=6.2)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=21.3)", "pytest (>=7.0.1)", "pytest-env (>=0.6.2)", "pytest-freezegun (>=0.4.2)", "pytest-mock (>=3.6.1)", "pytest-randomly (>=3.10.3)", "pytest-timeout (>=2.1)"]
+docs = ["furo (>=2022.12.7)", "proselint (>=0.13)", "sphinx (>=6.1.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=22.12)"]
+test = ["covdefaults (>=2.2.2)", "coverage (>=7.1)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23)", "pytest (>=7.2.1)", "pytest-env (>=0.8.1)", "pytest-freezegun (>=0.4.2)", "pytest-mock (>=3.10)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)"]
 
 [[package]]
 name = "zipp"
-version = "3.11.0"
+version = "3.15.0"
 description = "Backport of pathlib-compatible object wrapper for zip files"
 category = "main"
 optional = false
 python-versions = ">=3.7"
 
 [package.extras]
-docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)"]
-testing = ["flake8 (<5)", "func-timeout", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"]
+docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
+testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"]
 
 [extras]
 all = ["boto3", "botocore", "itsdangerous", "pymongo", "pyyaml", "redis", "ujson"]
 bson = ["bson"]
-docs = ["furo", "linkify-it-py", "myst-parser", "sphinx", "sphinx-autodoc-typehints", "sphinx-automodapi", "sphinx-copybutton", "sphinx-inline-tabs", "sphinx-panels", "sphinx-notfound-page", "sphinxcontrib-apidoc"]
+docs = ["furo", "linkify-it-py", "myst-parser", "sphinx", "sphinx-autodoc-typehints", "sphinx-automodapi", "sphinx-copybutton", "sphinx-design", "sphinx-notfound-page", "sphinxcontrib-apidoc", "sphinxext-opengraph"]
 dynamodb = ["boto3", "botocore"]
 json = ["ujson"]
 mongodb = ["pymongo"]
@@ -1249,47 +1336,40 @@ yaml = ["pyyaml"]
 [metadata]
 lock-version = "1.1"
 python-versions = "^3.7"     # requests requires python >=3.7, <4.0
-content-hash = "199552cde5c659e2b8bfeb1fa94c25ea3f6293f6480e0babfcaa0811e29e5cb2"
+content-hash = "67f15c23ae98e3778d06dae302e6569812dae27f51cf7726382dc4aab1e1c3e0"
 
 [metadata.files]
 alabaster = [
-    {file = "alabaster-0.7.12-py2.py3-none-any.whl", hash = "sha256:446438bdcca0e05bd45ea2de1668c1d9b032e1a9154c2c259092d77031ddd359"},
-    {file = "alabaster-0.7.12.tar.gz", hash = "sha256:a661d72d58e6ea8a57f7a86e37d86716863ee5e92788398526d58b26a4e4dc02"},
-]
-appdirs = [
-    {file = "appdirs-1.4.4-py2.py3-none-any.whl", hash = "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128"},
-    {file = "appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41"},
+    {file = "alabaster-0.7.13-py3-none-any.whl", hash = "sha256:1ee19aca801bbabb5ba3f5f258e4422dfa86f82f3e9cefb0859b283cdd7f62a3"},
+    {file = "alabaster-0.7.13.tar.gz", hash = "sha256:a27a4a084d5e690e16e01e03ad2b2e552c61a65469419b907243193de1a84ae2"},
 ]
 argcomplete = [
-    {file = "argcomplete-1.12.3-py2.py3-none-any.whl", hash = "sha256:291f0beca7fd49ce285d2f10e4c1c77e9460cf823eef2de54df0c0fec88b0d81"},
-    {file = "argcomplete-1.12.3.tar.gz", hash = "sha256:2c7dbffd8c045ea534921e63b0be6fe65e88599990d8dc408ac8c542b72a5445"},
+    {file = "argcomplete-2.1.2-py3-none-any.whl", hash = "sha256:4ba9cdaa28c361d251edce884cd50b4b1215d65cdc881bd204426cdde9f52731"},
+    {file = "argcomplete-2.1.2.tar.gz", hash = "sha256:fc82ef070c607b1559b5c720529d63b54d9dcf2dcfc2632b10e6372314a34457"},
 ]
 async-timeout = [
     {file = "async-timeout-4.0.2.tar.gz", hash = "sha256:2163e1640ddb52b7a8c80d0a67a08587e5d245cc9c553a74a847056bc2976b15"},
     {file = "async_timeout-4.0.2-py3-none-any.whl", hash = "sha256:8ca1e4fcf50d07413d66d1a5e416e42cfdf5851c981d679a09851a6853383b3c"},
 ]
-atomicwrites = [
-    {file = "atomicwrites-1.4.1.tar.gz", hash = "sha256:81b2c9071a49367a7f770170e5eec8cb66567cfbbc8c73d20ce5ca4a8d71cf11"},
-]
 attrs = [
-    {file = "attrs-21.4.0-py2.py3-none-any.whl", hash = "sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4"},
-    {file = "attrs-21.4.0.tar.gz", hash = "sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd"},
+    {file = "attrs-22.2.0-py3-none-any.whl", hash = "sha256:29e95c7f6778868dbd49170f98f8818f78f3dc5e0e37c0b1f474e3561b240836"},
+    {file = "attrs-22.2.0.tar.gz", hash = "sha256:c9227bfc2f01993c03f68db37d1d15c9690188323c067c641f1a35ca58185f99"},
 ]
 babel = [
-    {file = "Babel-2.11.0-py3-none-any.whl", hash = "sha256:1ad3eca1c885218f6dce2ab67291178944f810a10a9b5f3cb8382a5a232b64fe"},
-    {file = "Babel-2.11.0.tar.gz", hash = "sha256:5ef4b3226b0180dedded4229651c8b0e1a3a6a2837d45a073272f313e4cf97f6"},
+    {file = "Babel-2.12.1-py3-none-any.whl", hash = "sha256:b4246fb7677d3b98f501a39d43396d3cafdc8eadb045f4a31be01863f655c610"},
+    {file = "Babel-2.12.1.tar.gz", hash = "sha256:cc2d99999cd01d44420ae725a21c9e3711b3aadc7976d6147f622d8581963455"},
 ]
 beautifulsoup4 = [
-    {file = "beautifulsoup4-4.11.1-py3-none-any.whl", hash = "sha256:58d5c3d29f5a36ffeb94f02f0d786cd53014cf9b3b3951d42e0080d8a9498d30"},
-    {file = "beautifulsoup4-4.11.1.tar.gz", hash = "sha256:ad9aa55b65ef2808eb405f46cf74df7fcb7044d5cbc26487f96eb2ef2e436693"},
+    {file = "beautifulsoup4-4.12.0-py3-none-any.whl", hash = "sha256:2130a5ad7f513200fae61a17abb5e338ca980fa28c439c0571014bc0217e9591"},
+    {file = "beautifulsoup4-4.12.0.tar.gz", hash = "sha256:c5fceeaec29d09c84970e47c65f2f0efe57872f7cff494c9691a26ec0ff13234"},
 ]
 boto3 = [
-    {file = "boto3-1.26.29-py3-none-any.whl", hash = "sha256:2e5e80daae3873185b046d1fabc13676aea519e891faf4f27ca71d287bc26039"},
-    {file = "boto3-1.26.29.tar.gz", hash = "sha256:4fb4a0ce2679e5dc1719441192b45687654201d5de76224f2c376b689c9ed4aa"},
+    {file = "boto3-1.26.99-py3-none-any.whl", hash = "sha256:536d9e7a074f4f16cc87b426f91b3079edd5c6927541a04f7e3fa28c53293532"},
+    {file = "boto3-1.26.99.tar.gz", hash = "sha256:d9fd57d6e98fd919cdbd613428f685e05b48c71477fda1aa7fbf51867262c7d1"},
 ]
 botocore = [
-    {file = "botocore-1.29.29-py3-none-any.whl", hash = "sha256:dca2daf108aae6c847d8ec99b7e918b46ae81713bf70b2199ab94627faf935a1"},
-    {file = "botocore-1.29.29.tar.gz", hash = "sha256:97a6d059e688ff9caa7c0a4e30cc58fa27be8bf3347578ce7c62fb808380fb55"},
+    {file = "botocore-1.29.99-py3-none-any.whl", hash = "sha256:15c205e4578253da1e8cc247b9d4755042f5f873f68ac6e5fed48f4bd6f008c6"},
+    {file = "botocore-1.29.99.tar.gz", hash = "sha256:d1770b4fe5531870af7a81e9897b2092d2f89e4ba8cb7abbbaf3ab952f6b8a6f"},
 ]
 bson = [
     {file = "bson-0.5.10.tar.gz", hash = "sha256:d6511b2ab051139a9123c184de1a04227262173ad593429d21e443d6462d6590"},
@@ -1307,8 +1387,81 @@ cfgv = [
     {file = "cfgv-3.3.1.tar.gz", hash = "sha256:f5a830efb9ce7a445376bb66ec94c638a9787422f96264c98edc6bdeed8ab736"},
 ]
 charset-normalizer = [
-    {file = "charset-normalizer-2.1.1.tar.gz", hash = "sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845"},
-    {file = "charset_normalizer-2.1.1-py3-none-any.whl", hash = "sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f"},
+    {file = "charset-normalizer-3.1.0.tar.gz", hash = "sha256:34e0a2f9c370eb95597aae63bf85eb5e96826d81e3dcf88b8886012906f509b5"},
+    {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e0ac8959c929593fee38da1c2b64ee9778733cdf03c482c9ff1d508b6b593b2b"},
+    {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d7fc3fca01da18fbabe4625d64bb612b533533ed10045a2ac3dd194bfa656b60"},
+    {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:04eefcee095f58eaabe6dc3cc2262f3bcd776d2c67005880894f447b3f2cb9c1"},
+    {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20064ead0717cf9a73a6d1e779b23d149b53daf971169289ed2ed43a71e8d3b0"},
+    {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1435ae15108b1cb6fffbcea2af3d468683b7afed0169ad718451f8db5d1aff6f"},
+    {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c84132a54c750fda57729d1e2599bb598f5fa0344085dbde5003ba429a4798c0"},
+    {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75f2568b4189dda1c567339b48cba4ac7384accb9c2a7ed655cd86b04055c795"},
+    {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11d3bcb7be35e7b1bba2c23beedac81ee893ac9871d0ba79effc7fc01167db6c"},
+    {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:891cf9b48776b5c61c700b55a598621fdb7b1e301a550365571e9624f270c203"},
+    {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5f008525e02908b20e04707a4f704cd286d94718f48bb33edddc7d7b584dddc1"},
+    {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:b06f0d3bf045158d2fb8837c5785fe9ff9b8c93358be64461a1089f5da983137"},
+    {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:49919f8400b5e49e961f320c735388ee686a62327e773fa5b3ce6721f7e785ce"},
+    {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:22908891a380d50738e1f978667536f6c6b526a2064156203d418f4856d6e86a"},
+    {file = "charset_normalizer-3.1.0-cp310-cp310-win32.whl", hash = "sha256:12d1a39aa6b8c6f6248bb54550efcc1c38ce0d8096a146638fd4738e42284448"},
+    {file = "charset_normalizer-3.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:65ed923f84a6844de5fd29726b888e58c62820e0769b76565480e1fdc3d062f8"},
+    {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9a3267620866c9d17b959a84dd0bd2d45719b817245e49371ead79ed4f710d19"},
+    {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6734e606355834f13445b6adc38b53c0fd45f1a56a9ba06c2058f86893ae8017"},
+    {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f8303414c7b03f794347ad062c0516cee0e15f7a612abd0ce1e25caf6ceb47df"},
+    {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aaf53a6cebad0eae578f062c7d462155eada9c172bd8c4d250b8c1d8eb7f916a"},
+    {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3dc5b6a8ecfdc5748a7e429782598e4f17ef378e3e272eeb1340ea57c9109f41"},
+    {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e1b25e3ad6c909f398df8921780d6a3d120d8c09466720226fc621605b6f92b1"},
+    {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ca564606d2caafb0abe6d1b5311c2649e8071eb241b2d64e75a0d0065107e62"},
+    {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b82fab78e0b1329e183a65260581de4375f619167478dddab510c6c6fb04d9b6"},
+    {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bd7163182133c0c7701b25e604cf1611c0d87712e56e88e7ee5d72deab3e76b5"},
+    {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:11d117e6c63e8f495412d37e7dc2e2fff09c34b2d09dbe2bee3c6229577818be"},
+    {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:cf6511efa4801b9b38dc5546d7547d5b5c6ef4b081c60b23e4d941d0eba9cbeb"},
+    {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:abc1185d79f47c0a7aaf7e2412a0eb2c03b724581139193d2d82b3ad8cbb00ac"},
+    {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cb7b2ab0188829593b9de646545175547a70d9a6e2b63bf2cd87a0a391599324"},
+    {file = "charset_normalizer-3.1.0-cp311-cp311-win32.whl", hash = "sha256:c36bcbc0d5174a80d6cccf43a0ecaca44e81d25be4b7f90f0ed7bcfbb5a00909"},
+    {file = "charset_normalizer-3.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:cca4def576f47a09a943666b8f829606bcb17e2bc2d5911a46c8f8da45f56755"},
+    {file = "charset_normalizer-3.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0c95f12b74681e9ae127728f7e5409cbbef9cd914d5896ef238cc779b8152373"},
+    {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fca62a8301b605b954ad2e9c3666f9d97f63872aa4efcae5492baca2056b74ab"},
+    {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac0aa6cd53ab9a31d397f8303f92c42f534693528fafbdb997c82bae6e477ad9"},
+    {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c3af8e0f07399d3176b179f2e2634c3ce9c1301379a6b8c9c9aeecd481da494f"},
+    {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a5fc78f9e3f501a1614a98f7c54d3969f3ad9bba8ba3d9b438c3bc5d047dd28"},
+    {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:628c985afb2c7d27a4800bfb609e03985aaecb42f955049957814e0491d4006d"},
+    {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:74db0052d985cf37fa111828d0dd230776ac99c740e1a758ad99094be4f1803d"},
+    {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1e8fcdd8f672a1c4fc8d0bd3a2b576b152d2a349782d1eb0f6b8e52e9954731d"},
+    {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:04afa6387e2b282cf78ff3dbce20f0cc071c12dc8f685bd40960cc68644cfea6"},
+    {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:dd5653e67b149503c68c4018bf07e42eeed6b4e956b24c00ccdf93ac79cdff84"},
+    {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d2686f91611f9e17f4548dbf050e75b079bbc2a82be565832bc8ea9047b61c8c"},
+    {file = "charset_normalizer-3.1.0-cp37-cp37m-win32.whl", hash = "sha256:4155b51ae05ed47199dc5b2a4e62abccb274cee6b01da5b895099b61b1982974"},
+    {file = "charset_normalizer-3.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:322102cdf1ab682ecc7d9b1c5eed4ec59657a65e1c146a0da342b78f4112db23"},
+    {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e633940f28c1e913615fd624fcdd72fdba807bf53ea6925d6a588e84e1151531"},
+    {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3a06f32c9634a8705f4ca9946d667609f52cf130d5548881401f1eb2c39b1e2c"},
+    {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7381c66e0561c5757ffe616af869b916c8b4e42b367ab29fedc98481d1e74e14"},
+    {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3573d376454d956553c356df45bb824262c397c6e26ce43e8203c4c540ee0acb"},
+    {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e89df2958e5159b811af9ff0f92614dabf4ff617c03a4c1c6ff53bf1c399e0e1"},
+    {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:78cacd03e79d009d95635e7d6ff12c21eb89b894c354bd2b2ed0b4763373693b"},
+    {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de5695a6f1d8340b12a5d6d4484290ee74d61e467c39ff03b39e30df62cf83a0"},
+    {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c60b9c202d00052183c9be85e5eaf18a4ada0a47d188a83c8f5c5b23252f649"},
+    {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f645caaf0008bacf349875a974220f1f1da349c5dbe7c4ec93048cdc785a3326"},
+    {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ea9f9c6034ea2d93d9147818f17c2a0860d41b71c38b9ce4d55f21b6f9165a11"},
+    {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:80d1543d58bd3d6c271b66abf454d437a438dff01c3e62fdbcd68f2a11310d4b"},
+    {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:73dc03a6a7e30b7edc5b01b601e53e7fc924b04e1835e8e407c12c037e81adbd"},
+    {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6f5c2e7bc8a4bf7c426599765b1bd33217ec84023033672c1e9a8b35eaeaaaf8"},
+    {file = "charset_normalizer-3.1.0-cp38-cp38-win32.whl", hash = "sha256:12a2b561af122e3d94cdb97fe6fb2bb2b82cef0cdca131646fdb940a1eda04f0"},
+    {file = "charset_normalizer-3.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:3160a0fd9754aab7d47f95a6b63ab355388d890163eb03b2d2b87ab0a30cfa59"},
+    {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:38e812a197bf8e71a59fe55b757a84c1f946d0ac114acafaafaf21667a7e169e"},
+    {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6baf0baf0d5d265fa7944feb9f7451cc316bfe30e8df1a61b1bb08577c554f31"},
+    {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8f25e17ab3039b05f762b0a55ae0b3632b2e073d9c8fc88e89aca31a6198e88f"},
+    {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3747443b6a904001473370d7810aa19c3a180ccd52a7157aacc264a5ac79265e"},
+    {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b116502087ce8a6b7a5f1814568ccbd0e9f6cfd99948aa59b0e241dc57cf739f"},
+    {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d16fd5252f883eb074ca55cb622bc0bee49b979ae4e8639fff6ca3ff44f9f854"},
+    {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fa558996782fc226b529fdd2ed7866c2c6ec91cee82735c98a197fae39f706"},
+    {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6f6c7a8a57e9405cad7485f4c9d3172ae486cfef1344b5ddd8e5239582d7355e"},
+    {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ac3775e3311661d4adace3697a52ac0bab17edd166087d493b52d4f4f553f9f0"},
+    {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:10c93628d7497c81686e8e5e557aafa78f230cd9e77dd0c40032ef90c18f2230"},
+    {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:6f4f4668e1831850ebcc2fd0b1cd11721947b6dc7c00bf1c6bd3c929ae14f2c7"},
+    {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:0be65ccf618c1e7ac9b849c315cc2e8a8751d9cfdaa43027d4f6624bd587ab7e"},
+    {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:53d0a3fa5f8af98a1e261de6a3943ca631c526635eb5817a87a59d9a57ebf48f"},
+    {file = "charset_normalizer-3.1.0-cp39-cp39-win32.whl", hash = "sha256:a04f86f41a8916fe45ac5024ec477f41f886b3c435da2d4e3d2709b22ab02af1"},
+    {file = "charset_normalizer-3.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:830d2948a5ec37c386d3170c483063798d7879037492540f10a475e3fd6f244b"},
+    {file = "charset_normalizer-3.1.0-py3-none-any.whl", hash = "sha256:3d9098b479e78c85080c98e1e35ff40b4a31d8953102bb0fd7d1b6f8a2111a3d"},
 ]
 colorama = [
     {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"},
@@ -1318,93 +1471,98 @@ colorlog = [
     {file = "colorlog-6.7.0-py2.py3-none-any.whl", hash = "sha256:0d33ca236784a1ba3ff9c532d4964126d8a2c44f1f0cb1d2b0728196f512f662"},
     {file = "colorlog-6.7.0.tar.gz", hash = "sha256:bd94bd21c1e13fac7bd3153f4bc3a7dc0eb0974b8bc2fdf1a989e474f6e582e5"},
 ]
-commonmark = [
-    {file = "commonmark-0.9.1-py2.py3-none-any.whl", hash = "sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9"},
-    {file = "commonmark-0.9.1.tar.gz", hash = "sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60"},
-]
 coverage = [
-    {file = "coverage-6.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ef8674b0ee8cc11e2d574e3e2998aea5df5ab242e012286824ea3c6970580e53"},
-    {file = "coverage-6.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:784f53ebc9f3fd0e2a3f6a78b2be1bd1f5575d7863e10c6e12504f240fd06660"},
-    {file = "coverage-6.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4a5be1748d538a710f87542f22c2cad22f80545a847ad91ce45e77417293eb4"},
-    {file = "coverage-6.5.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:83516205e254a0cb77d2d7bb3632ee019d93d9f4005de31dca0a8c3667d5bc04"},
-    {file = "coverage-6.5.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af4fffaffc4067232253715065e30c5a7ec6faac36f8fc8d6f64263b15f74db0"},
-    {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:97117225cdd992a9c2a5515db1f66b59db634f59d0679ca1fa3fe8da32749cae"},
-    {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a1170fa54185845505fbfa672f1c1ab175446c887cce8212c44149581cf2d466"},
-    {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:11b990d520ea75e7ee8dcab5bc908072aaada194a794db9f6d7d5cfd19661e5a"},
-    {file = "coverage-6.5.0-cp310-cp310-win32.whl", hash = "sha256:5dbec3b9095749390c09ab7c89d314727f18800060d8d24e87f01fb9cfb40b32"},
-    {file = "coverage-6.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:59f53f1dc5b656cafb1badd0feb428c1e7bc19b867479ff72f7a9dd9b479f10e"},
-    {file = "coverage-6.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4a5375e28c5191ac38cca59b38edd33ef4cc914732c916f2929029b4bfb50795"},
-    {file = "coverage-6.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4ed2820d919351f4167e52425e096af41bfabacb1857186c1ea32ff9983ed75"},
-    {file = "coverage-6.5.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:33a7da4376d5977fbf0a8ed91c4dffaaa8dbf0ddbf4c8eea500a2486d8bc4d7b"},
-    {file = "coverage-6.5.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8fb6cf131ac4070c9c5a3e21de0f7dc5a0fbe8bc77c9456ced896c12fcdad91"},
-    {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a6b7d95969b8845250586f269e81e5dfdd8ff828ddeb8567a4a2eaa7313460c4"},
-    {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1ef221513e6f68b69ee9e159506d583d31aa3567e0ae84eaad9d6ec1107dddaa"},
-    {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cca4435eebea7962a52bdb216dec27215d0df64cf27fc1dd538415f5d2b9da6b"},
-    {file = "coverage-6.5.0-cp311-cp311-win32.whl", hash = "sha256:98e8a10b7a314f454d9eff4216a9a94d143a7ee65018dd12442e898ee2310578"},
-    {file = "coverage-6.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:bc8ef5e043a2af066fa8cbfc6e708d58017024dc4345a1f9757b329a249f041b"},
-    {file = "coverage-6.5.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4433b90fae13f86fafff0b326453dd42fc9a639a0d9e4eec4d366436d1a41b6d"},
-    {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4f05d88d9a80ad3cac6244d36dd89a3c00abc16371769f1340101d3cb899fc3"},
-    {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:94e2565443291bd778421856bc975d351738963071e9b8839ca1fc08b42d4bef"},
-    {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:027018943386e7b942fa832372ebc120155fd970837489896099f5cfa2890f79"},
-    {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:255758a1e3b61db372ec2736c8e2a1fdfaf563977eedbdf131de003ca5779b7d"},
-    {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:851cf4ff24062c6aec510a454b2584f6e998cada52d4cb58c5e233d07172e50c"},
-    {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:12adf310e4aafddc58afdb04d686795f33f4d7a6fa67a7a9d4ce7d6ae24d949f"},
-    {file = "coverage-6.5.0-cp37-cp37m-win32.whl", hash = "sha256:b5604380f3415ba69de87a289a2b56687faa4fe04dbee0754bfcae433489316b"},
-    {file = "coverage-6.5.0-cp37-cp37m-win_amd64.whl", hash = "sha256:4a8dbc1f0fbb2ae3de73eb0bdbb914180c7abfbf258e90b311dcd4f585d44bd2"},
-    {file = "coverage-6.5.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d900bb429fdfd7f511f868cedd03a6bbb142f3f9118c09b99ef8dc9bf9643c3c"},
-    {file = "coverage-6.5.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2198ea6fc548de52adc826f62cb18554caedfb1d26548c1b7c88d8f7faa8f6ba"},
-    {file = "coverage-6.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c4459b3de97b75e3bd6b7d4b7f0db13f17f504f3d13e2a7c623786289dd670e"},
-    {file = "coverage-6.5.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:20c8ac5386253717e5ccc827caad43ed66fea0efe255727b1053a8154d952398"},
-    {file = "coverage-6.5.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b07130585d54fe8dff3d97b93b0e20290de974dc8177c320aeaf23459219c0b"},
-    {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:dbdb91cd8c048c2b09eb17713b0c12a54fbd587d79adcebad543bc0cd9a3410b"},
-    {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:de3001a203182842a4630e7b8d1a2c7c07ec1b45d3084a83d5d227a3806f530f"},
-    {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e07f4a4a9b41583d6eabec04f8b68076ab3cd44c20bd29332c6572dda36f372e"},
-    {file = "coverage-6.5.0-cp38-cp38-win32.whl", hash = "sha256:6d4817234349a80dbf03640cec6109cd90cba068330703fa65ddf56b60223a6d"},
-    {file = "coverage-6.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:7ccf362abd726b0410bf8911c31fbf97f09f8f1061f8c1cf03dfc4b6372848f6"},
-    {file = "coverage-6.5.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:633713d70ad6bfc49b34ead4060531658dc6dfc9b3eb7d8a716d5873377ab745"},
-    {file = "coverage-6.5.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:95203854f974e07af96358c0b261f1048d8e1083f2de9b1c565e1be4a3a48cfc"},
-    {file = "coverage-6.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9023e237f4c02ff739581ef35969c3739445fb059b060ca51771e69101efffe"},
-    {file = "coverage-6.5.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:265de0fa6778d07de30bcf4d9dc471c3dc4314a23a3c6603d356a3c9abc2dfcf"},
-    {file = "coverage-6.5.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f830ed581b45b82451a40faabb89c84e1a998124ee4212d440e9c6cf70083e5"},
-    {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7b6be138d61e458e18d8e6ddcddd36dd96215edfe5f1168de0b1b32635839b62"},
-    {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:42eafe6778551cf006a7c43153af1211c3aaab658d4d66fa5fcc021613d02518"},
-    {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:723e8130d4ecc8f56e9a611e73b31219595baa3bb252d539206f7bbbab6ffc1f"},
-    {file = "coverage-6.5.0-cp39-cp39-win32.whl", hash = "sha256:d9ecf0829c6a62b9b573c7bb6d4dcd6ba8b6f80be9ba4fc7ed50bf4ac9aecd72"},
-    {file = "coverage-6.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:fc2af30ed0d5ae0b1abdb4ebdce598eafd5b35397d4d75deb341a614d333d987"},
-    {file = "coverage-6.5.0-pp36.pp37.pp38-none-any.whl", hash = "sha256:1431986dac3923c5945271f169f59c45b8802a114c8f548d611f2015133df77a"},
-    {file = "coverage-6.5.0.tar.gz", hash = "sha256:f642e90754ee3e06b0e7e51bce3379590e76b7f76b708e1a71ff043f87025c84"},
+    {file = "coverage-7.2.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c90e73bdecb7b0d1cea65a08cb41e9d672ac6d7995603d6465ed4914b98b9ad7"},
+    {file = "coverage-7.2.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e2926b8abedf750c2ecf5035c07515770944acf02e1c46ab08f6348d24c5f94d"},
+    {file = "coverage-7.2.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:57b77b9099f172804e695a40ebaa374f79e4fb8b92f3e167f66facbf92e8e7f5"},
+    {file = "coverage-7.2.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:efe1c0adad110bf0ad7fb59f833880e489a61e39d699d37249bdf42f80590169"},
+    {file = "coverage-7.2.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2199988e0bc8325d941b209f4fd1c6fa007024b1442c5576f1a32ca2e48941e6"},
+    {file = "coverage-7.2.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:81f63e0fb74effd5be736cfe07d710307cc0a3ccb8f4741f7f053c057615a137"},
+    {file = "coverage-7.2.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:186e0fc9cf497365036d51d4d2ab76113fb74f729bd25da0975daab2e107fd90"},
+    {file = "coverage-7.2.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:420f94a35e3e00a2b43ad5740f935358e24478354ce41c99407cddd283be00d2"},
+    {file = "coverage-7.2.2-cp310-cp310-win32.whl", hash = "sha256:38004671848b5745bb05d4d621526fca30cee164db42a1f185615f39dc997292"},
+    {file = "coverage-7.2.2-cp310-cp310-win_amd64.whl", hash = "sha256:0ce383d5f56d0729d2dd40e53fe3afeb8f2237244b0975e1427bfb2cf0d32bab"},
+    {file = "coverage-7.2.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3eb55b7b26389dd4f8ae911ba9bc8c027411163839dea4c8b8be54c4ee9ae10b"},
+    {file = "coverage-7.2.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d2b96123a453a2d7f3995ddb9f28d01fd112319a7a4d5ca99796a7ff43f02af5"},
+    {file = "coverage-7.2.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:299bc75cb2a41e6741b5e470b8c9fb78d931edbd0cd009c58e5c84de57c06731"},
+    {file = "coverage-7.2.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5e1df45c23d4230e3d56d04414f9057eba501f78db60d4eeecfcb940501b08fd"},
+    {file = "coverage-7.2.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:006ed5582e9cbc8115d2e22d6d2144a0725db542f654d9d4fda86793832f873d"},
+    {file = "coverage-7.2.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d683d230b5774816e7d784d7ed8444f2a40e7a450e5720d58af593cb0b94a212"},
+    {file = "coverage-7.2.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:8efb48fa743d1c1a65ee8787b5b552681610f06c40a40b7ef94a5b517d885c54"},
+    {file = "coverage-7.2.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4c752d5264053a7cf2fe81c9e14f8a4fb261370a7bb344c2a011836a96fb3f57"},
+    {file = "coverage-7.2.2-cp311-cp311-win32.whl", hash = "sha256:55272f33da9a5d7cccd3774aeca7a01e500a614eaea2a77091e9be000ecd401d"},
+    {file = "coverage-7.2.2-cp311-cp311-win_amd64.whl", hash = "sha256:92ebc1619650409da324d001b3a36f14f63644c7f0a588e331f3b0f67491f512"},
+    {file = "coverage-7.2.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:5afdad4cc4cc199fdf3e18088812edcf8f4c5a3c8e6cb69127513ad4cb7471a9"},
+    {file = "coverage-7.2.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0484d9dd1e6f481b24070c87561c8d7151bdd8b044c93ac99faafd01f695c78e"},
+    {file = "coverage-7.2.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d530191aa9c66ab4f190be8ac8cc7cfd8f4f3217da379606f3dd4e3d83feba69"},
+    {file = "coverage-7.2.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4ac0f522c3b6109c4b764ffec71bf04ebc0523e926ca7cbe6c5ac88f84faced0"},
+    {file = "coverage-7.2.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ba279aae162b20444881fc3ed4e4f934c1cf8620f3dab3b531480cf602c76b7f"},
+    {file = "coverage-7.2.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:53d0fd4c17175aded9c633e319360d41a1f3c6e352ba94edcb0fa5167e2bad67"},
+    {file = "coverage-7.2.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c99cb7c26a3039a8a4ee3ca1efdde471e61b4837108847fb7d5be7789ed8fd9"},
+    {file = "coverage-7.2.2-cp37-cp37m-win32.whl", hash = "sha256:5cc0783844c84af2522e3a99b9b761a979a3ef10fb87fc4048d1ee174e18a7d8"},
+    {file = "coverage-7.2.2-cp37-cp37m-win_amd64.whl", hash = "sha256:817295f06eacdc8623dc4df7d8b49cea65925030d4e1e2a7c7218380c0072c25"},
+    {file = "coverage-7.2.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6146910231ece63facfc5984234ad1b06a36cecc9fd0c028e59ac7c9b18c38c6"},
+    {file = "coverage-7.2.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:387fb46cb8e53ba7304d80aadca5dca84a2fbf6fe3faf6951d8cf2d46485d1e5"},
+    {file = "coverage-7.2.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:046936ab032a2810dcaafd39cc4ef6dd295df1a7cbead08fe996d4765fca9fe4"},
+    {file = "coverage-7.2.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e627dee428a176ffb13697a2c4318d3f60b2ccdde3acdc9b3f304206ec130ccd"},
+    {file = "coverage-7.2.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4fa54fb483decc45f94011898727802309a109d89446a3c76387d016057d2c84"},
+    {file = "coverage-7.2.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:3668291b50b69a0c1ef9f462c7df2c235da3c4073f49543b01e7eb1dee7dd540"},
+    {file = "coverage-7.2.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:7c20b731211261dc9739bbe080c579a1835b0c2d9b274e5fcd903c3a7821cf88"},
+    {file = "coverage-7.2.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5764e1f7471cb8f64b8cda0554f3d4c4085ae4b417bfeab236799863703e5de2"},
+    {file = "coverage-7.2.2-cp38-cp38-win32.whl", hash = "sha256:4f01911c010122f49a3e9bdc730eccc66f9b72bd410a3a9d3cb8448bb50d65d3"},
+    {file = "coverage-7.2.2-cp38-cp38-win_amd64.whl", hash = "sha256:c448b5c9e3df5448a362208b8d4b9ed85305528313fca1b479f14f9fe0d873b8"},
+    {file = "coverage-7.2.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bfe7085783cda55e53510482fa7b5efc761fad1abe4d653b32710eb548ebdd2d"},
+    {file = "coverage-7.2.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9d22e94e6dc86de981b1b684b342bec5e331401599ce652900ec59db52940005"},
+    {file = "coverage-7.2.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:507e4720791977934bba016101579b8c500fb21c5fa3cd4cf256477331ddd988"},
+    {file = "coverage-7.2.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bc4803779f0e4b06a2361f666e76f5c2e3715e8e379889d02251ec911befd149"},
+    {file = "coverage-7.2.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db8c2c5ace167fd25ab5dd732714c51d4633f58bac21fb0ff63b0349f62755a8"},
+    {file = "coverage-7.2.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4f68ee32d7c4164f1e2c8797535a6d0a3733355f5861e0f667e37df2d4b07140"},
+    {file = "coverage-7.2.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d52f0a114b6a58305b11a5cdecd42b2e7f1ec77eb20e2b33969d702feafdd016"},
+    {file = "coverage-7.2.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:797aad79e7b6182cb49c08cc5d2f7aa7b2128133b0926060d0a8889ac43843be"},
+    {file = "coverage-7.2.2-cp39-cp39-win32.whl", hash = "sha256:db45eec1dfccdadb179b0f9ca616872c6f700d23945ecc8f21bb105d74b1c5fc"},
+    {file = "coverage-7.2.2-cp39-cp39-win_amd64.whl", hash = "sha256:8dbe2647bf58d2c5a6c5bcc685f23b5f371909a5624e9f5cd51436d6a9f6c6ef"},
+    {file = "coverage-7.2.2-pp37.pp38.pp39-none-any.whl", hash = "sha256:872d6ce1f5be73f05bea4df498c140b9e7ee5418bfa2cc8204e7f9b817caa968"},
+    {file = "coverage-7.2.2.tar.gz", hash = "sha256:36dd42da34fe94ed98c39887b86db9d06777b1c8f860520e21126a75507024f2"},
+]
+cycler = [
+    {file = "cycler-0.11.0-py3-none-any.whl", hash = "sha256:3a27e95f763a428a739d2add979fa7494c912a32c17c4c38c4d5f082cad165a3"},
+    {file = "cycler-0.11.0.tar.gz", hash = "sha256:9c87405839a19696e837b3b818fed3f5f69f16f1eec1a1ad77e043dcea9c772f"},
 ]
 distlib = [
     {file = "distlib-0.3.6-py2.py3-none-any.whl", hash = "sha256:f35c4b692542ca110de7ef0bea44d73981caeb34ca0b9b6b2e6d7790dda8f80e"},
     {file = "distlib-0.3.6.tar.gz", hash = "sha256:14bad2d9b04d3a36127ac97f30b12a19268f211063d8f8ee4f47108896e11b46"},
 ]
 dnspython = [
-    {file = "dnspython-2.2.1-py3-none-any.whl", hash = "sha256:a851e51367fb93e9e1361732c1d60dab63eff98712e503ea7d92e6eccb109b4f"},
-    {file = "dnspython-2.2.1.tar.gz", hash = "sha256:0f7569a4a6ff151958b64304071d370daa3243d15941a7beedf0c9fe5105603e"},
+    {file = "dnspython-2.3.0-py3-none-any.whl", hash = "sha256:89141536394f909066cabd112e3e1a37e4e654db00a25308b0f130bc3152eb46"},
+    {file = "dnspython-2.3.0.tar.gz", hash = "sha256:224e32b03eb46be70e12ef6d64e0be123a64e621ab4c0822ff6d450d52a540b9"},
 ]
 docutils = [
-    {file = "docutils-0.17.1-py2.py3-none-any.whl", hash = "sha256:cf316c8370a737a022b72b56874f6602acf974a37a9fba42ec2876387549fc61"},
-    {file = "docutils-0.17.1.tar.gz", hash = "sha256:686577d2e4c32380bb50cbb22f575ed742d58168cee37e99117a854bcd88f125"},
+    {file = "docutils-0.19-py3-none-any.whl", hash = "sha256:5e1de4d849fee02c63b040a4a3fd567f4ab104defd8a5511fbbc24a8a017efbc"},
+    {file = "docutils-0.19.tar.gz", hash = "sha256:33995a6753c30b7f577febfc2c50411fec6aac7f7ffeb7c4cfe5991072dcf9e6"},
 ]
 exceptiongroup = [
-    {file = "exceptiongroup-1.0.4-py3-none-any.whl", hash = "sha256:542adf9dea4055530d6e1279602fa5cb11dab2395fa650b8674eaec35fc4a828"},
-    {file = "exceptiongroup-1.0.4.tar.gz", hash = "sha256:bd14967b79cd9bdb54d97323216f8fdf533e278df937aa2a90089e7d6e06e5ec"},
+    {file = "exceptiongroup-1.1.1-py3-none-any.whl", hash = "sha256:232c37c63e4f682982c8b6459f33a8981039e5fb8756b2074364e5055c498c9e"},
+    {file = "exceptiongroup-1.1.1.tar.gz", hash = "sha256:d484c3090ba2889ae2928419117447a14daf3c1231d5e30d0aae34f354f01785"},
 ]
 execnet = [
     {file = "execnet-1.9.0-py2.py3-none-any.whl", hash = "sha256:a295f7cc774947aac58dde7fdc85f4aa00c42adf5d8f5468fc630c1acf30a142"},
     {file = "execnet-1.9.0.tar.gz", hash = "sha256:8f694f3ba9cc92cab508b152dcfe322153975c29bda272e2fd7f3f00f36e47c5"},
 ]
 filelock = [
-    {file = "filelock-3.8.2-py3-none-any.whl", hash = "sha256:8df285554452285f79c035efb0c861eb33a4bcfa5b7a137016e32e6a90f9792c"},
-    {file = "filelock-3.8.2.tar.gz", hash = "sha256:7565f628ea56bfcd8e54e42bdc55da899c85c1abfe1b5bcfd147e9188cebb3b2"},
+    {file = "filelock-3.10.4-py3-none-any.whl", hash = "sha256:6d332dc5c896f18ba93a21d987155e97c434a96d3fe4042ca70d0b3b46e3b470"},
+    {file = "filelock-3.10.4.tar.gz", hash = "sha256:9fc1734dbddcdcd4aaa02c160dd94db5272b92dfa859b44ec8df28e160b751f0"},
+]
+fonttools = [
+    {file = "fonttools-4.38.0-py3-none-any.whl", hash = "sha256:820466f43c8be8c3009aef8b87e785014133508f0de64ec469e4efb643ae54fb"},
+    {file = "fonttools-4.38.0.zip", hash = "sha256:2bb244009f9bf3fa100fc3ead6aeb99febe5985fa20afbfbaa2f8946c2fbdaf1"},
 ]
 furo = [
-    {file = "furo-2022.9.29-py3-none-any.whl", hash = "sha256:559ee17999c0f52728481dcf6b1b0cf8c9743e68c5e3a18cb45a7992747869a9"},
-    {file = "furo-2022.9.29.tar.gz", hash = "sha256:d4238145629c623609c2deb5384f8d036e2a1ee2a101d64b67b4348112470dbd"},
+    {file = "furo-2022.12.7-py3-none-any.whl", hash = "sha256:7cb76c12a25ef65db85ab0743df907573d03027a33631f17d267e598ebb191f7"},
+    {file = "furo-2022.12.7.tar.gz", hash = "sha256:d8008f8efbe7587a97ba533c8b2df1f9c21ee9b3e5cad0d27f61193d38b1a986"},
 ]
 identify = [
-    {file = "identify-2.5.9-py2.py3-none-any.whl", hash = "sha256:a390fb696e164dbddb047a0db26e57972ae52fbd037ae68797e5ae2f4492485d"},
-    {file = "identify-2.5.9.tar.gz", hash = "sha256:906036344ca769539610436e40a684e170c3648b552194980bb7b617a8daeb9f"},
+    {file = "identify-2.5.22-py2.py3-none-any.whl", hash = "sha256:f0faad595a4687053669c112004178149f6c326db71ee999ae4636685753ad2f"},
+    {file = "identify-2.5.22.tar.gz", hash = "sha256:f7a93d6cf98e29bd07663c60728e7a4057615068d7a639d132dc883b2d54d31e"},
 ]
 idna = [
     {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"},
@@ -1415,12 +1573,12 @@ imagesize = [
     {file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"},
 ]
 importlib-metadata = [
-    {file = "importlib_metadata-4.13.0-py3-none-any.whl", hash = "sha256:8a8a81bcf996e74fee46f0d16bd3eaa382a7eb20fd82445c3ad11f4090334116"},
-    {file = "importlib_metadata-4.13.0.tar.gz", hash = "sha256:dd0173e8f150d6815e098fd354f6414b0f079af4644ddfe90c71e2fc6174346d"},
+    {file = "importlib_metadata-5.2.0-py3-none-any.whl", hash = "sha256:0eafa39ba42bf225fc00e67f701d71f85aead9f878569caf13c3724f704b970f"},
+    {file = "importlib_metadata-5.2.0.tar.gz", hash = "sha256:404d48d62bba0b7a77ff9d405efd91501bef2e67ff4ace0bed40a0cf28c3c7cd"},
 ]
 iniconfig = [
-    {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"},
-    {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"},
+    {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"},
+    {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"},
 ]
 itsdangerous = [
     {file = "itsdangerous-2.1.2-py3-none-any.whl", hash = "sha256:2c2349112351b88699d8d4b6b075022c0808887cb7ad10069318a8b0bc88db44"},
@@ -1434,90 +1592,320 @@ jmespath = [
     {file = "jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980"},
     {file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"},
 ]
+kiwisolver = [
+    {file = "kiwisolver-1.4.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:2f5e60fabb7343a836360c4f0919b8cd0d6dbf08ad2ca6b9cf90bf0c76a3c4f6"},
+    {file = "kiwisolver-1.4.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:10ee06759482c78bdb864f4109886dff7b8a56529bc1609d4f1112b93fe6423c"},
+    {file = "kiwisolver-1.4.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c79ebe8f3676a4c6630fd3f777f3cfecf9289666c84e775a67d1d358578dc2e3"},
+    {file = "kiwisolver-1.4.4-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:abbe9fa13da955feb8202e215c4018f4bb57469b1b78c7a4c5c7b93001699938"},
+    {file = "kiwisolver-1.4.4-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:7577c1987baa3adc4b3c62c33bd1118c3ef5c8ddef36f0f2c950ae0b199e100d"},
+    {file = "kiwisolver-1.4.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8ad8285b01b0d4695102546b342b493b3ccc6781fc28c8c6a1bb63e95d22f09"},
+    {file = "kiwisolver-1.4.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8ed58b8acf29798b036d347791141767ccf65eee7f26bde03a71c944449e53de"},
+    {file = "kiwisolver-1.4.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a68b62a02953b9841730db7797422f983935aeefceb1679f0fc85cbfbd311c32"},
+    {file = "kiwisolver-1.4.4-cp310-cp310-win32.whl", hash = "sha256:e92a513161077b53447160b9bd8f522edfbed4bd9759e4c18ab05d7ef7e49408"},
+    {file = "kiwisolver-1.4.4-cp310-cp310-win_amd64.whl", hash = "sha256:3fe20f63c9ecee44560d0e7f116b3a747a5d7203376abeea292ab3152334d004"},
+    {file = "kiwisolver-1.4.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e0ea21f66820452a3f5d1655f8704a60d66ba1191359b96541eaf457710a5fc6"},
+    {file = "kiwisolver-1.4.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:bc9db8a3efb3e403e4ecc6cd9489ea2bac94244f80c78e27c31dcc00d2790ac2"},
+    {file = "kiwisolver-1.4.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d5b61785a9ce44e5a4b880272baa7cf6c8f48a5180c3e81c59553ba0cb0821ca"},
+    {file = "kiwisolver-1.4.4-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c2dbb44c3f7e6c4d3487b31037b1bdbf424d97687c1747ce4ff2895795c9bf69"},
+    {file = "kiwisolver-1.4.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6295ecd49304dcf3bfbfa45d9a081c96509e95f4b9d0eb7ee4ec0530c4a96514"},
+    {file = "kiwisolver-1.4.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4bd472dbe5e136f96a4b18f295d159d7f26fd399136f5b17b08c4e5f498cd494"},
+    {file = "kiwisolver-1.4.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bf7d9fce9bcc4752ca4a1b80aabd38f6d19009ea5cbda0e0856983cf6d0023f5"},
+    {file = "kiwisolver-1.4.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78d6601aed50c74e0ef02f4204da1816147a6d3fbdc8b3872d263338a9052c51"},
+    {file = "kiwisolver-1.4.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:877272cf6b4b7e94c9614f9b10140e198d2186363728ed0f701c6eee1baec1da"},
+    {file = "kiwisolver-1.4.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:db608a6757adabb32f1cfe6066e39b3706d8c3aa69bbc353a5b61edad36a5cb4"},
+    {file = "kiwisolver-1.4.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:5853eb494c71e267912275e5586fe281444eb5e722de4e131cddf9d442615626"},
+    {file = "kiwisolver-1.4.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:f0a1dbdb5ecbef0d34eb77e56fcb3e95bbd7e50835d9782a45df81cc46949750"},
+    {file = "kiwisolver-1.4.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:283dffbf061a4ec60391d51e6155e372a1f7a4f5b15d59c8505339454f8989e4"},
+    {file = "kiwisolver-1.4.4-cp311-cp311-win32.whl", hash = "sha256:d06adcfa62a4431d404c31216f0f8ac97397d799cd53800e9d3efc2fbb3cf14e"},
+    {file = "kiwisolver-1.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:e7da3fec7408813a7cebc9e4ec55afed2d0fd65c4754bc376bf03498d4e92686"},
+    {file = "kiwisolver-1.4.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:62ac9cc684da4cf1778d07a89bf5f81b35834cb96ca523d3a7fb32509380cbf6"},
+    {file = "kiwisolver-1.4.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41dae968a94b1ef1897cb322b39360a0812661dba7c682aa45098eb8e193dbdf"},
+    {file = "kiwisolver-1.4.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:02f79693ec433cb4b5f51694e8477ae83b3205768a6fb48ffba60549080e295b"},
+    {file = "kiwisolver-1.4.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d0611a0a2a518464c05ddd5a3a1a0e856ccc10e67079bb17f265ad19ab3c7597"},
+    {file = "kiwisolver-1.4.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:db5283d90da4174865d520e7366801a93777201e91e79bacbac6e6927cbceede"},
+    {file = "kiwisolver-1.4.4-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:1041feb4cda8708ce73bb4dcb9ce1ccf49d553bf87c3954bdfa46f0c3f77252c"},
+    {file = "kiwisolver-1.4.4-cp37-cp37m-win32.whl", hash = "sha256:a553dadda40fef6bfa1456dc4be49b113aa92c2a9a9e8711e955618cd69622e3"},
+    {file = "kiwisolver-1.4.4-cp37-cp37m-win_amd64.whl", hash = "sha256:03baab2d6b4a54ddbb43bba1a3a2d1627e82d205c5cf8f4c924dc49284b87166"},
+    {file = "kiwisolver-1.4.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:841293b17ad704d70c578f1f0013c890e219952169ce8a24ebc063eecf775454"},
+    {file = "kiwisolver-1.4.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f4f270de01dd3e129a72efad823da90cc4d6aafb64c410c9033aba70db9f1ff0"},
+    {file = "kiwisolver-1.4.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f9f39e2f049db33a908319cf46624a569b36983c7c78318e9726a4cb8923b26c"},
+    {file = "kiwisolver-1.4.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c97528e64cb9ebeff9701e7938653a9951922f2a38bd847787d4a8e498cc83ae"},
+    {file = "kiwisolver-1.4.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d1573129aa0fd901076e2bfb4275a35f5b7aa60fbfb984499d661ec950320b0"},
+    {file = "kiwisolver-1.4.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ad881edc7ccb9d65b0224f4e4d05a1e85cf62d73aab798943df6d48ab0cd79a1"},
+    {file = "kiwisolver-1.4.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b428ef021242344340460fa4c9185d0b1f66fbdbfecc6c63eff4b7c29fad429d"},
+    {file = "kiwisolver-1.4.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:2e407cb4bd5a13984a6c2c0fe1845e4e41e96f183e5e5cd4d77a857d9693494c"},
+    {file = "kiwisolver-1.4.4-cp38-cp38-win32.whl", hash = "sha256:75facbe9606748f43428fc91a43edb46c7ff68889b91fa31f53b58894503a191"},
+    {file = "kiwisolver-1.4.4-cp38-cp38-win_amd64.whl", hash = "sha256:5bce61af018b0cb2055e0e72e7d65290d822d3feee430b7b8203d8a855e78766"},
+    {file = "kiwisolver-1.4.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8c808594c88a025d4e322d5bb549282c93c8e1ba71b790f539567932722d7bd8"},
+    {file = "kiwisolver-1.4.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f0a71d85ecdd570ded8ac3d1c0f480842f49a40beb423bb8014539a9f32a5897"},
+    {file = "kiwisolver-1.4.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b533558eae785e33e8c148a8d9921692a9fe5aa516efbdff8606e7d87b9d5824"},
+    {file = "kiwisolver-1.4.4-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:efda5fc8cc1c61e4f639b8067d118e742b812c930f708e6667a5ce0d13499e29"},
+    {file = "kiwisolver-1.4.4-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:7c43e1e1206cd421cd92e6b3280d4385d41d7166b3ed577ac20444b6995a445f"},
+    {file = "kiwisolver-1.4.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bc8d3bd6c72b2dd9decf16ce70e20abcb3274ba01b4e1c96031e0c4067d1e7cd"},
+    {file = "kiwisolver-1.4.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4ea39b0ccc4f5d803e3337dd46bcce60b702be4d86fd0b3d7531ef10fd99a1ac"},
+    {file = "kiwisolver-1.4.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:968f44fdbf6dd757d12920d63b566eeb4d5b395fd2d00d29d7ef00a00582aac9"},
+    {file = "kiwisolver-1.4.4-cp39-cp39-win32.whl", hash = "sha256:da7e547706e69e45d95e116e6939488d62174e033b763ab1496b4c29b76fabea"},
+    {file = "kiwisolver-1.4.4-cp39-cp39-win_amd64.whl", hash = "sha256:ba59c92039ec0a66103b1d5fe588fa546373587a7d68f5c96f743c3396afc04b"},
+    {file = "kiwisolver-1.4.4-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:91672bacaa030f92fc2f43b620d7b337fd9a5af28b0d6ed3f77afc43c4a64b5a"},
+    {file = "kiwisolver-1.4.4-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:787518a6789009c159453da4d6b683f468ef7a65bbde796bcea803ccf191058d"},
+    {file = "kiwisolver-1.4.4-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da152d8cdcab0e56e4f45eb08b9aea6455845ec83172092f09b0e077ece2cf7a"},
+    {file = "kiwisolver-1.4.4-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:ecb1fa0db7bf4cff9dac752abb19505a233c7f16684c5826d1f11ebd9472b871"},
+    {file = "kiwisolver-1.4.4-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:28bc5b299f48150b5f822ce68624e445040595a4ac3d59251703779836eceff9"},
+    {file = "kiwisolver-1.4.4-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:81e38381b782cc7e1e46c4e14cd997ee6040768101aefc8fa3c24a4cc58e98f8"},
+    {file = "kiwisolver-1.4.4-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2a66fdfb34e05b705620dd567f5a03f239a088d5a3f321e7b6ac3239d22aa286"},
+    {file = "kiwisolver-1.4.4-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:872b8ca05c40d309ed13eb2e582cab0c5a05e81e987ab9c521bf05ad1d5cf5cb"},
+    {file = "kiwisolver-1.4.4-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:70e7c2e7b750585569564e2e5ca9845acfaa5da56ac46df68414f29fea97be9f"},
+    {file = "kiwisolver-1.4.4-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:9f85003f5dfa867e86d53fac6f7e6f30c045673fa27b603c397753bebadc3008"},
+    {file = "kiwisolver-1.4.4-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e307eb9bd99801f82789b44bb45e9f541961831c7311521b13a6c85afc09767"},
+    {file = "kiwisolver-1.4.4-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1792d939ec70abe76f5054d3f36ed5656021dcad1322d1cc996d4e54165cef9"},
+    {file = "kiwisolver-1.4.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6cb459eea32a4e2cf18ba5fcece2dbdf496384413bc1bae15583f19e567f3b2"},
+    {file = "kiwisolver-1.4.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:36dafec3d6d6088d34e2de6b85f9d8e2324eb734162fba59d2ba9ed7a2043d5b"},
+    {file = "kiwisolver-1.4.4.tar.gz", hash = "sha256:d41997519fcba4a1e46eb4a2fe31bc12f0ff957b2b81bac28db24744f333e955"},
+]
 linkify-it-py = [
-    {file = "linkify-it-py-1.0.3.tar.gz", hash = "sha256:2b3f168d5ce75e3a425e34b341a6b73e116b5d9ed8dbbbf5dc7456843b7ce2ee"},
-    {file = "linkify_it_py-1.0.3-py3-none-any.whl", hash = "sha256:11e29f00150cddaa8f434153f103c14716e7e097a8fd372d9eb1ed06ed91524d"},
+    {file = "linkify-it-py-2.0.0.tar.gz", hash = "sha256:476464480906bed8b2fa3813bf55566282e55214ad7e41b7d1c2b564666caf2f"},
+    {file = "linkify_it_py-2.0.0-py3-none-any.whl", hash = "sha256:1bff43823e24e507a099e328fc54696124423dd6320c75a9da45b4b754b748ad"},
 ]
 livereload = [
     {file = "livereload-2.6.3.tar.gz", hash = "sha256:776f2f865e59fde56490a56bcc6773b6917366bce0c267c60ee8aaf1a0959869"},
 ]
 markdown-it-py = [
-    {file = "markdown-it-py-1.1.0.tar.gz", hash = "sha256:36be6bb3ad987bfdb839f5ba78ddf094552ca38ccbd784ae4f74a4e1419fc6e3"},
-    {file = "markdown_it_py-1.1.0-py3-none-any.whl", hash = "sha256:98080fc0bc34c4f2bcf0846a096a9429acbd9d5d8e67ed34026c03c61c464389"},
+    {file = "markdown-it-py-2.2.0.tar.gz", hash = "sha256:7c9a5e412688bc771c67432cbfebcdd686c93ce6484913dccf06cb5a0bea35a1"},
+    {file = "markdown_it_py-2.2.0-py3-none-any.whl", hash = "sha256:5a35f8d1870171d9acc47b99612dc146129b631baf04970128b568f190d0cc30"},
 ]
 markupsafe = [
-    {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:86b1f75c4e7c2ac2ccdaec2b9022845dbb81880ca318bb7a0a01fbf7813e3812"},
-    {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a"},
-    {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a49907dd8420c5685cfa064a1335b6754b74541bbb3706c259c02ed65b644b3e"},
-    {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10c1bfff05d95783da83491be968e8fe789263689c02724e0c691933c52994f5"},
-    {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b7bd98b796e2b6553da7225aeb61f447f80a1ca64f41d83612e6139ca5213aa4"},
-    {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b09bf97215625a311f669476f44b8b318b075847b49316d3e28c08e41a7a573f"},
-    {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:694deca8d702d5db21ec83983ce0bb4b26a578e71fbdbd4fdcd387daa90e4d5e"},
-    {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:efc1913fd2ca4f334418481c7e595c00aad186563bbc1ec76067848c7ca0a933"},
-    {file = "MarkupSafe-2.1.1-cp310-cp310-win32.whl", hash = "sha256:4a33dea2b688b3190ee12bd7cfa29d39c9ed176bda40bfa11099a3ce5d3a7ac6"},
-    {file = "MarkupSafe-2.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:dda30ba7e87fbbb7eab1ec9f58678558fd9a6b8b853530e176eabd064da81417"},
-    {file = "MarkupSafe-2.1.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:671cd1187ed5e62818414afe79ed29da836dde67166a9fac6d435873c44fdd02"},
-    {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3799351e2336dc91ea70b034983ee71cf2f9533cdff7c14c90ea126bfd95d65a"},
-    {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e72591e9ecd94d7feb70c1cbd7be7b3ebea3f548870aa91e2732960fa4d57a37"},
-    {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6fbf47b5d3728c6aea2abb0589b5d30459e369baa772e0f37a0320185e87c980"},
-    {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d5ee4f386140395a2c818d149221149c54849dfcfcb9f1debfe07a8b8bd63f9a"},
-    {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bcb3ed405ed3222f9904899563d6fc492ff75cce56cba05e32eff40e6acbeaa3"},
-    {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e1c0b87e09fa55a220f058d1d49d3fb8df88fbfab58558f1198e08c1e1de842a"},
-    {file = "MarkupSafe-2.1.1-cp37-cp37m-win32.whl", hash = "sha256:8dc1c72a69aa7e082593c4a203dcf94ddb74bb5c8a731e4e1eb68d031e8498ff"},
-    {file = "MarkupSafe-2.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:97a68e6ada378df82bc9f16b800ab77cbf4b2fada0081794318520138c088e4a"},
-    {file = "MarkupSafe-2.1.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e8c843bbcda3a2f1e3c2ab25913c80a3c5376cd00c6e8c4a86a89a28c8dc5452"},
-    {file = "MarkupSafe-2.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0212a68688482dc52b2d45013df70d169f542b7394fc744c02a57374a4207003"},
-    {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e576a51ad59e4bfaac456023a78f6b5e6e7651dcd383bcc3e18d06f9b55d6d1"},
-    {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b9fe39a2ccc108a4accc2676e77da025ce383c108593d65cc909add5c3bd601"},
-    {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:96e37a3dc86e80bf81758c152fe66dbf60ed5eca3d26305edf01892257049925"},
-    {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6d0072fea50feec76a4c418096652f2c3238eaa014b2f94aeb1d56a66b41403f"},
-    {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:089cf3dbf0cd6c100f02945abeb18484bd1ee57a079aefd52cffd17fba910b88"},
-    {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6a074d34ee7a5ce3effbc526b7083ec9731bb3cbf921bbe1d3005d4d2bdb3a63"},
-    {file = "MarkupSafe-2.1.1-cp38-cp38-win32.whl", hash = "sha256:421be9fbf0ffe9ffd7a378aafebbf6f4602d564d34be190fc19a193232fd12b1"},
-    {file = "MarkupSafe-2.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7"},
-    {file = "MarkupSafe-2.1.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e04e26803c9c3851c931eac40c695602c6295b8d432cbe78609649ad9bd2da8a"},
-    {file = "MarkupSafe-2.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b87db4360013327109564f0e591bd2a3b318547bcef31b468a92ee504d07ae4f"},
-    {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99a2a507ed3ac881b975a2976d59f38c19386d128e7a9a18b7df6fff1fd4c1d6"},
-    {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56442863ed2b06d19c37f94d999035e15ee982988920e12a5b4ba29b62ad1f77"},
-    {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ce11ee3f23f79dbd06fb3d63e2f6af7b12db1d46932fe7bd8afa259a5996603"},
-    {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:33b74d289bd2f5e527beadcaa3f401e0df0a89927c1559c8566c066fa4248ab7"},
-    {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:43093fb83d8343aac0b1baa75516da6092f58f41200907ef92448ecab8825135"},
-    {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8e3dcf21f367459434c18e71b2a9532d96547aef8a871872a5bd69a715c15f96"},
-    {file = "MarkupSafe-2.1.1-cp39-cp39-win32.whl", hash = "sha256:d4306c36ca495956b6d568d276ac11fdd9c30a36f1b6eb928070dc5360b22e1c"},
-    {file = "MarkupSafe-2.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:46d00d6cfecdde84d40e572d63735ef81423ad31184100411e6e3388d405e247"},
-    {file = "MarkupSafe-2.1.1.tar.gz", hash = "sha256:7f91197cc9e48f989d12e4e6fbc46495c446636dfc81b9ccf50bb0ec74b91d4b"},
+    {file = "MarkupSafe-2.1.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:665a36ae6f8f20a4676b53224e33d456a6f5a72657d9c83c2aa00765072f31f7"},
+    {file = "MarkupSafe-2.1.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:340bea174e9761308703ae988e982005aedf427de816d1afe98147668cc03036"},
+    {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22152d00bf4a9c7c83960521fc558f55a1adbc0631fbb00a9471e097b19d72e1"},
+    {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28057e985dace2f478e042eaa15606c7efccb700797660629da387eb289b9323"},
+    {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca244fa73f50a800cf8c3ebf7fd93149ec37f5cb9596aa8873ae2c1d23498601"},
+    {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d9d971ec1e79906046aa3ca266de79eac42f1dbf3612a05dc9368125952bd1a1"},
+    {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7e007132af78ea9df29495dbf7b5824cb71648d7133cf7848a2a5dd00d36f9ff"},
+    {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7313ce6a199651c4ed9d7e4cfb4aa56fe923b1adf9af3b420ee14e6d9a73df65"},
+    {file = "MarkupSafe-2.1.2-cp310-cp310-win32.whl", hash = "sha256:c4a549890a45f57f1ebf99c067a4ad0cb423a05544accaf2b065246827ed9603"},
+    {file = "MarkupSafe-2.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:835fb5e38fd89328e9c81067fd642b3593c33e1e17e2fdbf77f5676abb14a156"},
+    {file = "MarkupSafe-2.1.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2ec4f2d48ae59bbb9d1f9d7efb9236ab81429a764dedca114f5fdabbc3788013"},
+    {file = "MarkupSafe-2.1.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:608e7073dfa9e38a85d38474c082d4281f4ce276ac0010224eaba11e929dd53a"},
+    {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:65608c35bfb8a76763f37036547f7adfd09270fbdbf96608be2bead319728fcd"},
+    {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2bfb563d0211ce16b63c7cb9395d2c682a23187f54c3d79bfec33e6705473c6"},
+    {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:da25303d91526aac3672ee6d49a2f3db2d9502a4a60b55519feb1a4c7714e07d"},
+    {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9cad97ab29dfc3f0249b483412c85c8ef4766d96cdf9dcf5a1e3caa3f3661cf1"},
+    {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:085fd3201e7b12809f9e6e9bc1e5c96a368c8523fad5afb02afe3c051ae4afcc"},
+    {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1bea30e9bf331f3fef67e0a3877b2288593c98a21ccb2cf29b74c581a4eb3af0"},
+    {file = "MarkupSafe-2.1.2-cp311-cp311-win32.whl", hash = "sha256:7df70907e00c970c60b9ef2938d894a9381f38e6b9db73c5be35e59d92e06625"},
+    {file = "MarkupSafe-2.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:e55e40ff0cc8cc5c07996915ad367fa47da6b3fc091fdadca7f5403239c5fec3"},
+    {file = "MarkupSafe-2.1.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a6e40afa7f45939ca356f348c8e23048e02cb109ced1eb8420961b2f40fb373a"},
+    {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf877ab4ed6e302ec1d04952ca358b381a882fbd9d1b07cccbfd61783561f98a"},
+    {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63ba06c9941e46fa389d389644e2d8225e0e3e5ebcc4ff1ea8506dce646f8c8a"},
+    {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f1cd098434e83e656abf198f103a8207a8187c0fc110306691a2e94a78d0abb2"},
+    {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:55f44b440d491028addb3b88f72207d71eeebfb7b5dbf0643f7c023ae1fba619"},
+    {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:a6f2fcca746e8d5910e18782f976489939d54a91f9411c32051b4aab2bd7c513"},
+    {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0b462104ba25f1ac006fdab8b6a01ebbfbce9ed37fd37fd4acd70c67c973e460"},
+    {file = "MarkupSafe-2.1.2-cp37-cp37m-win32.whl", hash = "sha256:7668b52e102d0ed87cb082380a7e2e1e78737ddecdde129acadb0eccc5423859"},
+    {file = "MarkupSafe-2.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6d6607f98fcf17e534162f0709aaad3ab7a96032723d8ac8750ffe17ae5a0666"},
+    {file = "MarkupSafe-2.1.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:a806db027852538d2ad7555b203300173dd1b77ba116de92da9afbc3a3be3eed"},
+    {file = "MarkupSafe-2.1.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a4abaec6ca3ad8660690236d11bfe28dfd707778e2442b45addd2f086d6ef094"},
+    {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f03a532d7dee1bed20bc4884194a16160a2de9ffc6354b3878ec9682bb623c54"},
+    {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4cf06cdc1dda95223e9d2d3c58d3b178aa5dacb35ee7e3bbac10e4e1faacb419"},
+    {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22731d79ed2eb25059ae3df1dfc9cb1546691cc41f4e3130fe6bfbc3ecbbecfa"},
+    {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f8ffb705ffcf5ddd0e80b65ddf7bed7ee4f5a441ea7d3419e861a12eaf41af58"},
+    {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8db032bf0ce9022a8e41a22598eefc802314e81b879ae093f36ce9ddf39ab1ba"},
+    {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2298c859cfc5463f1b64bd55cb3e602528db6fa0f3cfd568d3605c50678f8f03"},
+    {file = "MarkupSafe-2.1.2-cp38-cp38-win32.whl", hash = "sha256:50c42830a633fa0cf9e7d27664637532791bfc31c731a87b202d2d8ac40c3ea2"},
+    {file = "MarkupSafe-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:bb06feb762bade6bf3c8b844462274db0c76acc95c52abe8dbed28ae3d44a147"},
+    {file = "MarkupSafe-2.1.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:99625a92da8229df6d44335e6fcc558a5037dd0a760e11d84be2260e6f37002f"},
+    {file = "MarkupSafe-2.1.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8bca7e26c1dd751236cfb0c6c72d4ad61d986e9a41bbf76cb445f69488b2a2bd"},
+    {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40627dcf047dadb22cd25ea7ecfe9cbf3bbbad0482ee5920b582f3809c97654f"},
+    {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40dfd3fefbef579ee058f139733ac336312663c6706d1163b82b3003fb1925c4"},
+    {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:090376d812fb6ac5f171e5938e82e7f2d7adc2b629101cec0db8b267815c85e2"},
+    {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2e7821bffe00aa6bd07a23913b7f4e01328c3d5cc0b40b36c0bd81d362faeb65"},
+    {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c0a33bc9f02c2b17c3ea382f91b4db0e6cde90b63b296422a939886a7a80de1c"},
+    {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b8526c6d437855442cdd3d87eede9c425c4445ea011ca38d937db299382e6fa3"},
+    {file = "MarkupSafe-2.1.2-cp39-cp39-win32.whl", hash = "sha256:137678c63c977754abe9086a3ec011e8fd985ab90631145dfb9294ad09c102a7"},
+    {file = "MarkupSafe-2.1.2-cp39-cp39-win_amd64.whl", hash = "sha256:0576fe974b40a400449768941d5d0858cc624e3249dfd1e0c33674e5c7ca7aed"},
+    {file = "MarkupSafe-2.1.2.tar.gz", hash = "sha256:abcabc8c2b26036d62d4c746381a6f7cf60aafcc653198ad678306986b09450d"},
+]
+matplotlib = [
+    {file = "matplotlib-3.5.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a206a1b762b39398efea838f528b3a6d60cdb26fe9d58b48265787e29cd1d693"},
+    {file = "matplotlib-3.5.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:cd45a6f3e93a780185f70f05cf2a383daed13c3489233faad83e81720f7ede24"},
+    {file = "matplotlib-3.5.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d62880e1f60e5a30a2a8484432bcb3a5056969dc97258d7326ad465feb7ae069"},
+    {file = "matplotlib-3.5.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9ab29589cef03bc88acfa3a1490359000c18186fc30374d8aa77d33cc4a51a4a"},
+    {file = "matplotlib-3.5.3-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2886cc009f40e2984c083687251821f305d811d38e3df8ded414265e4583f0c5"},
+    {file = "matplotlib-3.5.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c995f7d9568f18b5db131ab124c64e51b6820a92d10246d4f2b3f3a66698a15b"},
+    {file = "matplotlib-3.5.3-cp310-cp310-win32.whl", hash = "sha256:6bb93a0492d68461bd458eba878f52fdc8ac7bdb6c4acdfe43dba684787838c2"},
+    {file = "matplotlib-3.5.3-cp310-cp310-win_amd64.whl", hash = "sha256:2e6d184ebe291b9e8f7e78bbab7987d269c38ea3e062eace1fe7d898042ef804"},
+    {file = "matplotlib-3.5.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:6ea6aef5c4338e58d8d376068e28f80a24f54e69f09479d1c90b7172bad9f25b"},
+    {file = "matplotlib-3.5.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:839d47b8ead7ad9669aaacdbc03f29656dc21f0d41a6fea2d473d856c39c8b1c"},
+    {file = "matplotlib-3.5.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3b4fa56159dc3c7f9250df88f653f085068bcd32dcd38e479bba58909254af7f"},
+    {file = "matplotlib-3.5.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:94ff86af56a3869a4ae26a9637a849effd7643858a1a04dd5ee50e9ab75069a7"},
+    {file = "matplotlib-3.5.3-cp37-cp37m-win32.whl", hash = "sha256:35a8ad4dddebd51f94c5d24bec689ec0ec66173bf614374a1244c6241c1595e0"},
+    {file = "matplotlib-3.5.3-cp37-cp37m-win_amd64.whl", hash = "sha256:43e9d3fa077bf0cc95ded13d331d2156f9973dce17c6f0c8b49ccd57af94dbd9"},
+    {file = "matplotlib-3.5.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:22227c976ad4dc8c5a5057540421f0d8708c6560744ad2ad638d48e2984e1dbc"},
+    {file = "matplotlib-3.5.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bf618a825deb6205f015df6dfe6167a5d9b351203b03fab82043ae1d30f16511"},
+    {file = "matplotlib-3.5.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9befa5954cdbc085e37d974ff6053da269474177921dd61facdad8023c4aeb51"},
+    {file = "matplotlib-3.5.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3840c280ebc87a48488a46f760ea1c0c0c83fcf7abbe2e6baf99d033fd35fd8"},
+    {file = "matplotlib-3.5.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:dacddf5bfcec60e3f26ec5c0ae3d0274853a258b6c3fc5ef2f06a8eb23e042be"},
+    {file = "matplotlib-3.5.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:b428076a55fb1c084c76cb93e68006f27d247169f056412607c5c88828d08f88"},
+    {file = "matplotlib-3.5.3-cp38-cp38-win32.whl", hash = "sha256:874df7505ba820e0400e7091199decf3ff1fde0583652120c50cd60d5820ca9a"},
+    {file = "matplotlib-3.5.3-cp38-cp38-win_amd64.whl", hash = "sha256:b28de401d928890187c589036857a270a032961411934bdac4cf12dde3d43094"},
+    {file = "matplotlib-3.5.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:3211ba82b9f1518d346f6309df137b50c3dc4421b4ed4815d1d7eadc617f45a1"},
+    {file = "matplotlib-3.5.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6fe807e8a22620b4cd95cfbc795ba310dc80151d43b037257250faf0bfcd82bc"},
+    {file = "matplotlib-3.5.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5c096363b206a3caf43773abebdbb5a23ea13faef71d701b21a9c27fdcef72f4"},
+    {file = "matplotlib-3.5.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0bcdfcb0f976e1bac6721d7d457c17be23cf7501f977b6a38f9d38a3762841f7"},
+    {file = "matplotlib-3.5.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1e64ac9be9da6bfff0a732e62116484b93b02a0b4d4b19934fb4f8e7ad26ad6a"},
+    {file = "matplotlib-3.5.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:73dd93dc35c85dece610cca8358003bf0760d7986f70b223e2306b4ea6d1406b"},
+    {file = "matplotlib-3.5.3-cp39-cp39-win32.whl", hash = "sha256:879c7e5fce4939c6aa04581dfe08d57eb6102a71f2e202e3314d5fbc072fd5a0"},
+    {file = "matplotlib-3.5.3-cp39-cp39-win_amd64.whl", hash = "sha256:ab8d26f07fe64f6f6736d635cce7bfd7f625320490ed5bfc347f2cdb4fae0e56"},
+    {file = "matplotlib-3.5.3-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:99482b83ebf4eb6d5fc6813d7aacdefdd480f0d9c0b52dcf9f1cc3b2c4b3361a"},
+    {file = "matplotlib-3.5.3-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:f814504e459c68118bf2246a530ed953ebd18213dc20e3da524174d84ed010b2"},
+    {file = "matplotlib-3.5.3-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:57f1b4e69f438a99bb64d7f2c340db1b096b41ebaa515cf61ea72624279220ce"},
+    {file = "matplotlib-3.5.3-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:d2484b350bf3d32cae43f85dcfc89b3ed7bd2bcd781ef351f93eb6fb2cc483f9"},
+    {file = "matplotlib-3.5.3.tar.gz", hash = "sha256:339cac48b80ddbc8bfd05daae0a3a73414651a8596904c2a881cfd1edb65f26c"},
 ]
 mdit-py-plugins = [
-    {file = "mdit-py-plugins-0.2.8.tar.gz", hash = "sha256:5991cef645502e80a5388ec4fc20885d2313d4871e8b8e320ca2de14ac0c015f"},
-    {file = "mdit_py_plugins-0.2.8-py3-none-any.whl", hash = "sha256:1833bf738e038e35d89cb3a07eb0d227ed647ce7dd357579b65343740c6d249c"},
+    {file = "mdit-py-plugins-0.3.5.tar.gz", hash = "sha256:eee0adc7195e5827e17e02d2a258a2ba159944a0748f59c5099a4a27f78fcf6a"},
+    {file = "mdit_py_plugins-0.3.5-py3-none-any.whl", hash = "sha256:ca9a0714ea59a24b2b044a1831f48d817dd0c817e84339f20e7889f392d77c4e"},
+]
+mdurl = [
+    {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"},
+    {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"},
 ]
 myst-parser = [
-    {file = "myst-parser-0.15.2.tar.gz", hash = "sha256:f7f3b2d62db7655cde658eb5d62b2ec2a4631308137bd8d10f296a40d57bbbeb"},
-    {file = "myst_parser-0.15.2-py3-none-any.whl", hash = "sha256:40124b6f27a4c42ac7f06b385e23a9dcd03d84801e9c7130b59b3729a554b1f9"},
+    {file = "myst-parser-1.0.0.tar.gz", hash = "sha256:502845659313099542bd38a2ae62f01360e7dd4b1310f025dd014dfc0439cdae"},
+    {file = "myst_parser-1.0.0-py3-none-any.whl", hash = "sha256:69fb40a586c6fa68995e6521ac0a525793935db7e724ca9bac1d33be51be9a4c"},
 ]
 nodeenv = [
     {file = "nodeenv-1.7.0-py2.py3-none-any.whl", hash = "sha256:27083a7b96a25f2f5e1d8cb4b6317ee8aeda3bdd121394e5ac54e498028a042e"},
     {file = "nodeenv-1.7.0.tar.gz", hash = "sha256:e0e7f7dfb85fc5394c6fe1e8fa98131a2473e04311a45afb6508f7cf1836fa2b"},
 ]
 nox = [
-    {file = "nox-2021.10.1-py3-none-any.whl", hash = "sha256:1bb224fb09c26c482932f0e3038ef01c27b4025d559066443a4da1f96daad01a"},
-    {file = "nox-2021.10.1.tar.gz", hash = "sha256:0a1c735d5e90fa234046b58a5ad61d08bc13ae77ab213da9b58d5cc2d25023ae"},
+    {file = "nox-2022.11.21-py3-none-any.whl", hash = "sha256:0e41a990e290e274cb205a976c4c97ee3c5234441a8132c8c3fd9ea3c22149eb"},
+    {file = "nox-2022.11.21.tar.gz", hash = "sha256:e21c31de0711d1274ca585a2c5fde36b1aa962005ba8e9322bf5eeed16dcd684"},
 ]
 nox-poetry = [
-    {file = "nox-poetry-0.9.0.tar.gz", hash = "sha256:ea48fa535cd048854da35af7c6c3e92046fbed9b9023bb81193fb4d2d3a47c92"},
-    {file = "nox_poetry-0.9.0-py3-none-any.whl", hash = "sha256:33423c855fb47e2901faf9e15937326bc20c6e356eef825903eed4f8bbda69d3"},
+    {file = "nox-poetry-1.0.2.tar.gz", hash = "sha256:22bc397979393a0283f5161af708a3a430e1c7e0cc2be274c7b27e9e46de0412"},
+    {file = "nox_poetry-1.0.2-py3-none-any.whl", hash = "sha256:a53c36eccbd67f15b5b83dd6562d077dd326c71fd4a942528d8b2299c417dbbe"},
+]
+numpy = [
+    {file = "numpy-1.21.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:38e8648f9449a549a7dfe8d8755a5979b45b3538520d1e735637ef28e8c2dc50"},
+    {file = "numpy-1.21.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:fd7d7409fa643a91d0a05c7554dd68aa9c9bb16e186f6ccfe40d6e003156e33a"},
+    {file = "numpy-1.21.1-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a75b4498b1e93d8b700282dc8e655b8bd559c0904b3910b144646dbbbc03e062"},
+    {file = "numpy-1.21.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1412aa0aec3e00bc23fbb8664d76552b4efde98fb71f60737c83efbac24112f1"},
+    {file = "numpy-1.21.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e46ceaff65609b5399163de5893d8f2a82d3c77d5e56d976c8b5fb01faa6b671"},
+    {file = "numpy-1.21.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:c6a2324085dd52f96498419ba95b5777e40b6bcbc20088fddb9e8cbb58885e8e"},
+    {file = "numpy-1.21.1-cp37-cp37m-win32.whl", hash = "sha256:73101b2a1fef16602696d133db402a7e7586654682244344b8329cdcbbb82172"},
+    {file = "numpy-1.21.1-cp37-cp37m-win_amd64.whl", hash = "sha256:7a708a79c9a9d26904d1cca8d383bf869edf6f8e7650d85dbc77b041e8c5a0f8"},
+    {file = "numpy-1.21.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:95b995d0c413f5d0428b3f880e8fe1660ff9396dcd1f9eedbc311f37b5652e16"},
+    {file = "numpy-1.21.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:635e6bd31c9fb3d475c8f44a089569070d10a9ef18ed13738b03049280281267"},
+    {file = "numpy-1.21.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4a3d5fb89bfe21be2ef47c0614b9c9c707b7362386c9a3ff1feae63e0267ccb6"},
+    {file = "numpy-1.21.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:8a326af80e86d0e9ce92bcc1e65c8ff88297de4fa14ee936cb2293d414c9ec63"},
+    {file = "numpy-1.21.1-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:791492091744b0fe390a6ce85cc1bf5149968ac7d5f0477288f78c89b385d9af"},
+    {file = "numpy-1.21.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0318c465786c1f63ac05d7c4dbcecd4d2d7e13f0959b01b534ea1e92202235c5"},
+    {file = "numpy-1.21.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9a513bd9c1551894ee3d31369f9b07460ef223694098cf27d399513415855b68"},
+    {file = "numpy-1.21.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:91c6f5fc58df1e0a3cc0c3a717bb3308ff850abdaa6d2d802573ee2b11f674a8"},
+    {file = "numpy-1.21.1-cp38-cp38-win32.whl", hash = "sha256:978010b68e17150db8765355d1ccdd450f9fc916824e8c4e35ee620590e234cd"},
+    {file = "numpy-1.21.1-cp38-cp38-win_amd64.whl", hash = "sha256:9749a40a5b22333467f02fe11edc98f022133ee1bfa8ab99bda5e5437b831214"},
+    {file = "numpy-1.21.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:d7a4aeac3b94af92a9373d6e77b37691b86411f9745190d2c351f410ab3a791f"},
+    {file = "numpy-1.21.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d9e7912a56108aba9b31df688a4c4f5cb0d9d3787386b87d504762b6754fbb1b"},
+    {file = "numpy-1.21.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:25b40b98ebdd272bc3020935427a4530b7d60dfbe1ab9381a39147834e985eac"},
+    {file = "numpy-1.21.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:8a92c5aea763d14ba9d6475803fc7904bda7decc2a0a68153f587ad82941fec1"},
+    {file = "numpy-1.21.1-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:05a0f648eb28bae4bcb204e6fd14603de2908de982e761a2fc78efe0f19e96e1"},
+    {file = "numpy-1.21.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f01f28075a92eede918b965e86e8f0ba7b7797a95aa8d35e1cc8821f5fc3ad6a"},
+    {file = "numpy-1.21.1-cp39-cp39-win32.whl", hash = "sha256:88c0b89ad1cc24a5efbb99ff9ab5db0f9a86e9cc50240177a571fbe9c2860ac2"},
+    {file = "numpy-1.21.1-cp39-cp39-win_amd64.whl", hash = "sha256:01721eefe70544d548425a07c80be8377096a54118070b8a62476866d5208e33"},
+    {file = "numpy-1.21.1-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2d4d1de6e6fb3d28781c73fbde702ac97f03d79e4ffd6598b880b2d95d62ead4"},
+    {file = "numpy-1.21.1.zip", hash = "sha256:dff4af63638afcc57a3dfb9e4b26d434a7a602d225b42d746ea7fe2edf1342fd"},
 ]
 packaging = [
-    {file = "packaging-22.0-py3-none-any.whl", hash = "sha256:957e2148ba0e1a3b282772e791ef1d8083648bc131c8ab0c1feba110ce1146c3"},
-    {file = "packaging-22.0.tar.gz", hash = "sha256:2198ec20bd4c017b8f9717e00f0c8714076fc2fd93816750ab48e2c41de2cfd3"},
+    {file = "packaging-23.0-py3-none-any.whl", hash = "sha256:714ac14496c3e68c99c29b00845f7a2b85f3bb6f1078fd9f72fd20f0570002b2"},
+    {file = "packaging-23.0.tar.gz", hash = "sha256:b6ad297f8907de0fa2fe1ccbd26fdaf387f5f47c7275fedf8cce89f99446cf97"},
 ]
 pbr = [
-    {file = "pbr-5.11.0-py2.py3-none-any.whl", hash = "sha256:db2317ff07c84c4c63648c9064a79fe9d9f5c7ce85a9099d4b6258b3db83225a"},
-    {file = "pbr-5.11.0.tar.gz", hash = "sha256:b97bc6695b2aff02144133c2e7399d5885223d42b7912ffaec2ca3898e673bfe"},
+    {file = "pbr-5.11.1-py2.py3-none-any.whl", hash = "sha256:567f09558bae2b3ab53cb3c1e2e33e726ff3338e7bae3db5dc954b3a44eef12b"},
+    {file = "pbr-5.11.1.tar.gz", hash = "sha256:aefc51675b0b533d56bb5fd1c8c6c0522fe31896679882e1c4c63d5e4a0fccb3"},
+]
+pillow = [
+    {file = "Pillow-9.4.0-1-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:1b4b4e9dda4f4e4c4e6896f93e84a8f0bcca3b059de9ddf67dac3c334b1195e1"},
+    {file = "Pillow-9.4.0-1-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:fb5c1ad6bad98c57482236a21bf985ab0ef42bd51f7ad4e4538e89a997624e12"},
+    {file = "Pillow-9.4.0-1-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:f0caf4a5dcf610d96c3bd32932bfac8aee61c96e60481c2a0ea58da435e25acd"},
+    {file = "Pillow-9.4.0-1-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:3f4cc516e0b264c8d4ccd6b6cbc69a07c6d582d8337df79be1e15a5056b258c9"},
+    {file = "Pillow-9.4.0-1-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:b8c2f6eb0df979ee99433d8b3f6d193d9590f735cf12274c108bd954e30ca858"},
+    {file = "Pillow-9.4.0-1-pp38-pypy38_pp73-macosx_10_10_x86_64.whl", hash = "sha256:b70756ec9417c34e097f987b4d8c510975216ad26ba6e57ccb53bc758f490dab"},
+    {file = "Pillow-9.4.0-1-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:43521ce2c4b865d385e78579a082b6ad1166ebed2b1a2293c3be1d68dd7ca3b9"},
+    {file = "Pillow-9.4.0-2-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:9d9a62576b68cd90f7075876f4e8444487db5eeea0e4df3ba298ee38a8d067b0"},
+    {file = "Pillow-9.4.0-2-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:87708d78a14d56a990fbf4f9cb350b7d89ee8988705e58e39bdf4d82c149210f"},
+    {file = "Pillow-9.4.0-2-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:8a2b5874d17e72dfb80d917213abd55d7e1ed2479f38f001f264f7ce7bae757c"},
+    {file = "Pillow-9.4.0-2-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:83125753a60cfc8c412de5896d10a0a405e0bd88d0470ad82e0869ddf0cb3848"},
+    {file = "Pillow-9.4.0-2-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:9e5f94742033898bfe84c93c831a6f552bb629448d4072dd312306bab3bd96f1"},
+    {file = "Pillow-9.4.0-2-pp38-pypy38_pp73-macosx_10_10_x86_64.whl", hash = "sha256:013016af6b3a12a2f40b704677f8b51f72cb007dac785a9933d5c86a72a7fe33"},
+    {file = "Pillow-9.4.0-2-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:99d92d148dd03fd19d16175b6d355cc1b01faf80dae93c6c3eb4163709edc0a9"},
+    {file = "Pillow-9.4.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:2968c58feca624bb6c8502f9564dd187d0e1389964898f5e9e1fbc8533169157"},
+    {file = "Pillow-9.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c5c1362c14aee73f50143d74389b2c158707b4abce2cb055b7ad37ce60738d47"},
+    {file = "Pillow-9.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd752c5ff1b4a870b7661234694f24b1d2b9076b8bf337321a814c612665f343"},
+    {file = "Pillow-9.4.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9a3049a10261d7f2b6514d35bbb7a4dfc3ece4c4de14ef5876c4b7a23a0e566d"},
+    {file = "Pillow-9.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:16a8df99701f9095bea8a6c4b3197da105df6f74e6176c5b410bc2df2fd29a57"},
+    {file = "Pillow-9.4.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:94cdff45173b1919350601f82d61365e792895e3c3a3443cf99819e6fbf717a5"},
+    {file = "Pillow-9.4.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:ed3e4b4e1e6de75fdc16d3259098de7c6571b1a6cc863b1a49e7d3d53e036070"},
+    {file = "Pillow-9.4.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d5b2f8a31bd43e0f18172d8ac82347c8f37ef3e0b414431157718aa234991b28"},
+    {file = "Pillow-9.4.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:09b89ddc95c248ee788328528e6a2996e09eaccddeeb82a5356e92645733be35"},
+    {file = "Pillow-9.4.0-cp310-cp310-win32.whl", hash = "sha256:f09598b416ba39a8f489c124447b007fe865f786a89dbfa48bb5cf395693132a"},
+    {file = "Pillow-9.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:f6e78171be3fb7941f9910ea15b4b14ec27725865a73c15277bc39f5ca4f8391"},
+    {file = "Pillow-9.4.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:3fa1284762aacca6dc97474ee9c16f83990b8eeb6697f2ba17140d54b453e133"},
+    {file = "Pillow-9.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:eaef5d2de3c7e9b21f1e762f289d17b726c2239a42b11e25446abf82b26ac132"},
+    {file = "Pillow-9.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a4dfdae195335abb4e89cc9762b2edc524f3c6e80d647a9a81bf81e17e3fb6f0"},
+    {file = "Pillow-9.4.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6abfb51a82e919e3933eb137e17c4ae9c0475a25508ea88993bb59faf82f3b35"},
+    {file = "Pillow-9.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:451f10ef963918e65b8869e17d67db5e2f4ab40e716ee6ce7129b0cde2876eab"},
+    {file = "Pillow-9.4.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:6663977496d616b618b6cfa43ec86e479ee62b942e1da76a2c3daa1c75933ef4"},
+    {file = "Pillow-9.4.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:60e7da3a3ad1812c128750fc1bc14a7ceeb8d29f77e0a2356a8fb2aa8925287d"},
+    {file = "Pillow-9.4.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:19005a8e58b7c1796bc0167862b1f54a64d3b44ee5d48152b06bb861458bc0f8"},
+    {file = "Pillow-9.4.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f715c32e774a60a337b2bb8ad9839b4abf75b267a0f18806f6f4f5f1688c4b5a"},
+    {file = "Pillow-9.4.0-cp311-cp311-win32.whl", hash = "sha256:b222090c455d6d1a64e6b7bb5f4035c4dff479e22455c9eaa1bdd4c75b52c80c"},
+    {file = "Pillow-9.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:ba6612b6548220ff5e9df85261bddc811a057b0b465a1226b39bfb8550616aee"},
+    {file = "Pillow-9.4.0-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:5f532a2ad4d174eb73494e7397988e22bf427f91acc8e6ebf5bb10597b49c493"},
+    {file = "Pillow-9.4.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5dd5a9c3091a0f414a963d427f920368e2b6a4c2f7527fdd82cde8ef0bc7a327"},
+    {file = "Pillow-9.4.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef21af928e807f10bf4141cad4746eee692a0dd3ff56cfb25fce076ec3cc8abe"},
+    {file = "Pillow-9.4.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:847b114580c5cc9ebaf216dd8c8dbc6b00a3b7ab0131e173d7120e6deade1f57"},
+    {file = "Pillow-9.4.0-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:653d7fb2df65efefbcbf81ef5fe5e5be931f1ee4332c2893ca638c9b11a409c4"},
+    {file = "Pillow-9.4.0-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:46f39cab8bbf4a384ba7cb0bc8bae7b7062b6a11cfac1ca4bc144dea90d4a9f5"},
+    {file = "Pillow-9.4.0-cp37-cp37m-win32.whl", hash = "sha256:7ac7594397698f77bce84382929747130765f66406dc2cd8b4ab4da68ade4c6e"},
+    {file = "Pillow-9.4.0-cp37-cp37m-win_amd64.whl", hash = "sha256:46c259e87199041583658457372a183636ae8cd56dbf3f0755e0f376a7f9d0e6"},
+    {file = "Pillow-9.4.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:0e51f608da093e5d9038c592b5b575cadc12fd748af1479b5e858045fff955a9"},
+    {file = "Pillow-9.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:765cb54c0b8724a7c12c55146ae4647e0274a839fb6de7bcba841e04298e1011"},
+    {file = "Pillow-9.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:519e14e2c49fcf7616d6d2cfc5c70adae95682ae20f0395e9280db85e8d6c4df"},
+    {file = "Pillow-9.4.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d197df5489004db87d90b918033edbeee0bd6df3848a204bca3ff0a903bef837"},
+    {file = "Pillow-9.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0845adc64fe9886db00f5ab68c4a8cd933ab749a87747555cec1c95acea64b0b"},
+    {file = "Pillow-9.4.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:e1339790c083c5a4de48f688b4841f18df839eb3c9584a770cbd818b33e26d5d"},
+    {file = "Pillow-9.4.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:a96e6e23f2b79433390273eaf8cc94fec9c6370842e577ab10dabdcc7ea0a66b"},
+    {file = "Pillow-9.4.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:7cfc287da09f9d2a7ec146ee4d72d6ea1342e770d975e49a8621bf54eaa8f30f"},
+    {file = "Pillow-9.4.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d7081c084ceb58278dd3cf81f836bc818978c0ccc770cbbb202125ddabec6628"},
+    {file = "Pillow-9.4.0-cp38-cp38-win32.whl", hash = "sha256:df41112ccce5d47770a0c13651479fbcd8793f34232a2dd9faeccb75eb5d0d0d"},
+    {file = "Pillow-9.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:7a21222644ab69ddd9967cfe6f2bb420b460dae4289c9d40ff9a4896e7c35c9a"},
+    {file = "Pillow-9.4.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:0f3269304c1a7ce82f1759c12ce731ef9b6e95b6df829dccd9fe42912cc48569"},
+    {file = "Pillow-9.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cb362e3b0976dc994857391b776ddaa8c13c28a16f80ac6522c23d5257156bed"},
+    {file = "Pillow-9.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a2e0f87144fcbbe54297cae708c5e7f9da21a4646523456b00cc956bd4c65815"},
+    {file = "Pillow-9.4.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:28676836c7796805914b76b1837a40f76827ee0d5398f72f7dcc634bae7c6264"},
+    {file = "Pillow-9.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0884ba7b515163a1a05440a138adeb722b8a6ae2c2b33aea93ea3118dd3a899e"},
+    {file = "Pillow-9.4.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:53dcb50fbdc3fb2c55431a9b30caeb2f7027fcd2aeb501459464f0214200a503"},
+    {file = "Pillow-9.4.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:e8c5cf126889a4de385c02a2c3d3aba4b00f70234bfddae82a5eaa3ee6d5e3e6"},
+    {file = "Pillow-9.4.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:6c6b1389ed66cdd174d040105123a5a1bc91d0aa7059c7261d20e583b6d8cbd2"},
+    {file = "Pillow-9.4.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0dd4c681b82214b36273c18ca7ee87065a50e013112eea7d78c7a1b89a739153"},
+    {file = "Pillow-9.4.0-cp39-cp39-win32.whl", hash = "sha256:6d9dfb9959a3b0039ee06c1a1a90dc23bac3b430842dcb97908ddde05870601c"},
+    {file = "Pillow-9.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:54614444887e0d3043557d9dbc697dbb16cfb5a35d672b7a0fcc1ed0cf1c600b"},
+    {file = "Pillow-9.4.0-pp38-pypy38_pp73-macosx_10_10_x86_64.whl", hash = "sha256:b9b752ab91e78234941e44abdecc07f1f0d8f51fb62941d32995b8161f68cfe5"},
+    {file = "Pillow-9.4.0-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d3b56206244dc8711f7e8b7d6cad4663917cd5b2d950799425076681e8766286"},
+    {file = "Pillow-9.4.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aabdab8ec1e7ca7f1434d042bf8b1e92056245fb179790dc97ed040361f16bfd"},
+    {file = "Pillow-9.4.0-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:db74f5562c09953b2c5f8ec4b7dfd3f5421f31811e97d1dbc0a7c93d6e3a24df"},
+    {file = "Pillow-9.4.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:e9d7747847c53a16a729b6ee5e737cf170f7a16611c143d95aa60a109a59c336"},
+    {file = "Pillow-9.4.0-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:b52ff4f4e002f828ea6483faf4c4e8deea8d743cf801b74910243c58acc6eda3"},
+    {file = "Pillow-9.4.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:575d8912dca808edd9acd6f7795199332696d3469665ef26163cd090fa1f8bfa"},
+    {file = "Pillow-9.4.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3c4ed2ff6760e98d262e0cc9c9a7f7b8a9f61aa4d47c58835cdaf7b0b8811bb"},
+    {file = "Pillow-9.4.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e621b0246192d3b9cb1dc62c78cfa4c6f6d2ddc0ec207d43c0dedecb914f152a"},
+    {file = "Pillow-9.4.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:8f127e7b028900421cad64f51f75c051b628db17fb00e099eb148761eed598c9"},
+    {file = "Pillow-9.4.0.tar.gz", hash = "sha256:a1c2d7780448eb93fbcc3789bf3916aa5720d942e37945f4056680317f1cd23e"},
 ]
 platformdirs = [
-    {file = "platformdirs-2.6.0-py3-none-any.whl", hash = "sha256:1a89a12377800c81983db6be069ec068eee989748799b946cce2a6e80dcc54ca"},
-    {file = "platformdirs-2.6.0.tar.gz", hash = "sha256:b46ffafa316e6b83b47489d240ce17173f123a9b9c83282141c3daf26ad9ac2e"},
+    {file = "platformdirs-3.1.1-py3-none-any.whl", hash = "sha256:e5986afb596e4bb5bde29a79ac9061aa955b94fca2399b7aaac4090860920dd8"},
+    {file = "platformdirs-3.1.1.tar.gz", hash = "sha256:024996549ee88ec1a9aa99ff7f8fc819bb59e2c3477b410d90a16d32d6e707aa"},
 ]
 pluggy = [
     {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"},
@@ -1528,8 +1916,8 @@ pprintpp = [
     {file = "pprintpp-0.4.0.tar.gz", hash = "sha256:ea826108e2c7f49dc6d66c752973c3fc9749142a798d6b254e1e301cfdbc6403"},
 ]
 pre-commit = [
-    {file = "pre_commit-2.20.0-py2.py3-none-any.whl", hash = "sha256:51a5ba7c480ae8072ecdb6933df22d2f812dc897d5fe848778116129a681aac7"},
-    {file = "pre_commit-2.20.0.tar.gz", hash = "sha256:a978dac7bc9ec0bcee55c18a277d553b0f419d259dadb4b9418ff2d00eb43959"},
+    {file = "pre_commit-2.21.0-py2.py3-none-any.whl", hash = "sha256:e2f91727039fc39a92f58a588a25b87f936de6567eed4f0e673e0507edc75bad"},
+    {file = "pre_commit-2.21.0.tar.gz", hash = "sha256:31ef31af7e474a8d8995027fefdfcf509b5c913ff31f2015b4ec4beb26a6f658"},
 ]
 psutil = [
     {file = "psutil-5.9.4-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:c1ca331af862803a42677c120aff8a814a804e09832f166f226bfd22b56feee8"},
@@ -1547,13 +1935,9 @@ psutil = [
     {file = "psutil-5.9.4-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:6001c809253a29599bc0dfd5179d9f8a5779f9dffea1da0f13c53ee568115e1e"},
     {file = "psutil-5.9.4.tar.gz", hash = "sha256:3d7f9739eb435d4b1338944abe23f49584bde5395f27487d2ee25ad9a8774a62"},
 ]
-py = [
-    {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"},
-    {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"},
-]
 pygments = [
-    {file = "Pygments-2.13.0-py3-none-any.whl", hash = "sha256:f643f331ab57ba3c9d89212ee4a2dabc6e94f117cf4eefde99a0574720d14c42"},
-    {file = "Pygments-2.13.0.tar.gz", hash = "sha256:56a8508ae95f98e2b9bdf93a6be5ae3f7d8af858b43e02c5a2ff083726be40c1"},
+    {file = "Pygments-2.14.0-py3-none-any.whl", hash = "sha256:fa7bd7bd2771287c0de303af8bfdfc731f51bd2c6a47ab69d117138893b82717"},
+    {file = "Pygments-2.14.0.tar.gz", hash = "sha256:b3ed06a9e8ac9a9aae5a6f5dbe78a8a58655d17b43b93c078f094ddc476ae297"},
 ]
 pymongo = [
     {file = "pymongo-4.3.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:74731c9e423c93cbe791f60c27030b6af6a948cef67deca079da6cd1bb583a8e"},
@@ -1631,9 +2015,13 @@ pymongo = [
     {file = "pymongo-4.3.3-cp39-cp39-win_amd64.whl", hash = "sha256:fc28e8d85d392a06434e9a934908d97e2cf453d69488d2bcd0bfb881497fd975"},
     {file = "pymongo-4.3.3.tar.gz", hash = "sha256:34e95ffb0a68bffbc3b437f2d1f25fc916fef3df5cdeed0992da5f42fae9b807"},
 ]
+pyparsing = [
+    {file = "pyparsing-3.0.9-py3-none-any.whl", hash = "sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc"},
+    {file = "pyparsing-3.0.9.tar.gz", hash = "sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb"},
+]
 pytest = [
-    {file = "pytest-6.2.5-py3-none-any.whl", hash = "sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134"},
-    {file = "pytest-6.2.5.tar.gz", hash = "sha256:131b36680866a76e6781d13f101efb86cf674ebb9762eb70d3082b6f29889e89"},
+    {file = "pytest-7.2.2-py3-none-any.whl", hash = "sha256:130328f552dcfac0b1cec75c12e3f005619dc5f874f0a06e8ff7263f0ee6225e"},
+    {file = "pytest-7.2.2.tar.gz", hash = "sha256:c99ab0c73aceb050f68929bc93af19ab6db0558791c6a0715723abe9d0ade9d4"},
 ]
 pytest-clarity = [
     {file = "pytest-clarity-1.0.1.tar.gz", hash = "sha256:505fe345fad4fe11c6a4187fe683f2c7c52c077caa1e135f3e483fe112db7772"},
@@ -1647,16 +2035,16 @@ pytest-rerunfailures = [
     {file = "pytest_rerunfailures-10.3-py3-none-any.whl", hash = "sha256:6be6f96510bf94b54198bf15bc5568fe2cdff88e83875912e22d29810acf65ff"},
 ]
 pytest-xdist = [
-    {file = "pytest-xdist-3.1.0.tar.gz", hash = "sha256:40fdb8f3544921c5dfcd486ac080ce22870e71d82ced6d2e78fa97c2addd480c"},
-    {file = "pytest_xdist-3.1.0-py3-none-any.whl", hash = "sha256:70a76f191d8a1d2d6be69fc440cdf85f3e4c03c08b520fd5dc5d338d6cf07d89"},
+    {file = "pytest-xdist-3.2.1.tar.gz", hash = "sha256:1849bd98d8b242b948e472db7478e090bf3361912a8fed87992ed94085f54727"},
+    {file = "pytest_xdist-3.2.1-py3-none-any.whl", hash = "sha256:37290d161638a20b672401deef1cba812d110ac27e35d213f091d15b8beb40c9"},
 ]
 python-dateutil = [
     {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"},
     {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"},
 ]
 pytz = [
-    {file = "pytz-2022.6-py2.py3-none-any.whl", hash = "sha256:222439474e9c98fced559f1709d89e6c9cbf8d79c794ff3eb9f8800064291427"},
-    {file = "pytz-2022.6.tar.gz", hash = "sha256:e89512406b793ca39f5971bc999cc538ce125c0e51c27941bef4568b460095e2"},
+    {file = "pytz-2022.7.1-py2.py3-none-any.whl", hash = "sha256:78f4f37d8198e0627c5f1143240bb0206b8691d8d7ac6d78fee88b78733f8c4a"},
+    {file = "pytz-2022.7.1.tar.gz", hash = "sha256:01a0681c4b9684a28304615eba55d1ab31ae00bf68ec157ec3708a8182dbbcd0"},
 ]
 pyyaml = [
     {file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"},
@@ -1701,32 +2089,36 @@ pyyaml = [
     {file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"},
 ]
 redis = [
-    {file = "redis-4.4.0-py3-none-any.whl", hash = "sha256:cae3ee5d1f57d8caf534cd8764edf3163c77e073bdd74b6f54a87ffafdc5e7d9"},
-    {file = "redis-4.4.0.tar.gz", hash = "sha256:7b8c87d19c45d3f1271b124858d2a5c13160c4e74d4835e28273400fa34d5228"},
+    {file = "redis-4.5.3-py3-none-any.whl", hash = "sha256:7df17a0a2b72a4c8895b462dd07616c51b1dcb48fdd7ecb7b6f4bf39ecb2e94e"},
+    {file = "redis-4.5.3.tar.gz", hash = "sha256:56732e156fe31801c4f43396bd3ca0c2a7f6f83d7936798531b9848d103381aa"},
 ]
 requests = [
-    {file = "requests-2.28.1-py3-none-any.whl", hash = "sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349"},
-    {file = "requests-2.28.1.tar.gz", hash = "sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983"},
+    {file = "requests-2.28.2-py3-none-any.whl", hash = "sha256:64299f4909223da747622c030b781c0d7811e359c37124b4bd368fb8c6518baa"},
+    {file = "requests-2.28.2.tar.gz", hash = "sha256:98b1b2782e3c6c4904938b84c0eb932721069dfdb9134313beff7c83c2df24bf"},
 ]
 requests-mock = [
     {file = "requests-mock-1.10.0.tar.gz", hash = "sha256:59c9c32419a9fb1ae83ec242d98e889c45bd7d7a65d48375cc243ec08441658b"},
     {file = "requests_mock-1.10.0-py2.py3-none-any.whl", hash = "sha256:2fdbb637ad17ee15c06f33d31169e71bf9fe2bdb7bc9da26185be0dd8d842699"},
 ]
 responses = [
-    {file = "responses-0.16.0-py2.py3-none-any.whl", hash = "sha256:f358ef75e8bf431b0aa203cc62625c3a1c80a600dbe9de91b944bf4e9c600b92"},
-    {file = "responses-0.16.0.tar.gz", hash = "sha256:a2e3aca2a8277e61257cd3b1c154b1dd0d782b1ae3d38b7fa37cbe3feb531791"},
+    {file = "responses-0.19.0-py3-none-any.whl", hash = "sha256:53354b5de163aa2074312c71d8ebccb8bd1ab336cff7053abb75e84dc5637abe"},
+    {file = "responses-0.19.0.tar.gz", hash = "sha256:3fc29c3117e14136b833a0a6d4e7f1217c6301bf08b6086db468e12f1e3290e2"},
 ]
 rich = [
-    {file = "rich-12.6.0-py3-none-any.whl", hash = "sha256:a4eb26484f2c82589bd9a17c73d32a010b1e29d89f1604cd9bf3a2097b81bb5e"},
-    {file = "rich-12.6.0.tar.gz", hash = "sha256:ba3a3775974105c221d31141f2c116f4fd65c5ceb0698657a11e9f295ec93fd0"},
+    {file = "rich-13.3.2-py3-none-any.whl", hash = "sha256:a104f37270bf677148d8acb07d33be1569eeee87e2d1beb286a4e9113caf6f2f"},
+    {file = "rich-13.3.2.tar.gz", hash = "sha256:91954fe80cfb7985727a467ca98a7618e5dd15178cc2da10f553b36a93859001"},
 ]
 s3transfer = [
     {file = "s3transfer-0.6.0-py3-none-any.whl", hash = "sha256:06176b74f3a15f61f1b4f25a1fc29a4429040b7647133a463da8fa5bd28d5ecd"},
     {file = "s3transfer-0.6.0.tar.gz", hash = "sha256:2ed07d3866f523cc561bf4a00fc5535827981b117dd7876f036b0c1aca42c947"},
 ]
 setuptools = [
-    {file = "setuptools-65.6.3-py3-none-any.whl", hash = "sha256:57f6f22bde4e042978bcd50176fdb381d7c21a9efa4041202288d3737a0c6a54"},
-    {file = "setuptools-65.6.3.tar.gz", hash = "sha256:a7620757bf984b58deaf32fc8a4577a9bbc0850cf92c20e1ce41c38c19e5fb75"},
+    {file = "setuptools-67.6.0-py3-none-any.whl", hash = "sha256:b78aaa36f6b90a074c1fa651168723acbf45d14cb1196b6f02c0fd07f17623b2"},
+    {file = "setuptools-67.6.0.tar.gz", hash = "sha256:2ee892cd5f29f3373097f5a814697e397cf3ce313616df0af11231e2ad118077"},
+]
+setuptools-scm = [
+    {file = "setuptools_scm-6.4.2-py3-none-any.whl", hash = "sha256:acea13255093849de7ccb11af9e1fb8bde7067783450cee9ef7a93139bddf6d4"},
+    {file = "setuptools_scm-6.4.2.tar.gz", hash = "sha256:6833ac65c6ed9711a4d5d2266f8024cfa07c533a0e55f4c12f6eff280a5a9e30"},
 ]
 six = [
     {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"},
@@ -1737,45 +2129,41 @@ snowballstemmer = [
     {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"},
 ]
 soupsieve = [
-    {file = "soupsieve-2.3.2.post1-py3-none-any.whl", hash = "sha256:3b2503d3c7084a42b1ebd08116e5f81aadfaea95863628c80a3b774a11b7c759"},
-    {file = "soupsieve-2.3.2.post1.tar.gz", hash = "sha256:fc53893b3da2c33de295667a0e19f078c14bf86544af307354de5fcf12a3f30d"},
+    {file = "soupsieve-2.4-py3-none-any.whl", hash = "sha256:49e5368c2cda80ee7e84da9dbe3e110b70a4575f196efb74e51b94549d921955"},
+    {file = "soupsieve-2.4.tar.gz", hash = "sha256:e28dba9ca6c7c00173e34e4ba57448f0688bb681b7c5e8bf4971daafc093d69a"},
 ]
 sphinx = [
-    {file = "Sphinx-4.3.0-py3-none-any.whl", hash = "sha256:7e2b30da5f39170efcd95c6270f07669d623c276521fee27ad6c380f49d2bf5b"},
-    {file = "Sphinx-4.3.0.tar.gz", hash = "sha256:6d051ab6e0d06cba786c4656b0fe67ba259fe058410f49e95bee6e49c4052cbf"},
+    {file = "Sphinx-5.3.0.tar.gz", hash = "sha256:51026de0a9ff9fc13c05d74913ad66047e104f56a129ff73e174eb5c3ee794b5"},
+    {file = "sphinx-5.3.0-py3-none-any.whl", hash = "sha256:060ca5c9f7ba57a08a1219e547b269fadf125ae25b06b9fa7f66768efb652d6d"},
 ]
 sphinx-autobuild = [
     {file = "sphinx-autobuild-2021.3.14.tar.gz", hash = "sha256:de1ca3b66e271d2b5b5140c35034c89e47f263f2cd5db302c9217065f7443f05"},
     {file = "sphinx_autobuild-2021.3.14-py3-none-any.whl", hash = "sha256:8fe8cbfdb75db04475232f05187c776f46f6e9e04cacf1e49ce81bdac649ccac"},
 ]
 sphinx-autodoc-typehints = [
-    {file = "sphinx_autodoc_typehints-1.17.1-py3-none-any.whl", hash = "sha256:f16491cad05a13f4825ecdf9ee4ff02925d9a3b1cf103d4d02f2f81802cce653"},
-    {file = "sphinx_autodoc_typehints-1.17.1.tar.gz", hash = "sha256:844d7237d3f6280b0416f5375d9556cfd84df1945356fcc34b82e8aaacab40f3"},
+    {file = "sphinx_autodoc_typehints-1.22-py3-none-any.whl", hash = "sha256:ef4a8b9d52de66065aa7d3adfabf5a436feb8a2eff07c2ddc31625d8807f2b69"},
+    {file = "sphinx_autodoc_typehints-1.22.tar.gz", hash = "sha256:71fca2d5eee9b034204e4c686ab20b4d8f5eb9409396216bcae6c87c38e18ea6"},
 ]
 sphinx-automodapi = [
-    {file = "sphinx-automodapi-0.14.1.tar.gz", hash = "sha256:a2f9c0f9e2901875e6db75df6c01412875eb15f25e7db1206e1b69fedf75bbc9"},
-    {file = "sphinx_automodapi-0.14.1-py3-none-any.whl", hash = "sha256:4238e131d7abc47226449661bb3cfa2bb1b5b190184ffa69d9b924b984a22753"},
+    {file = "sphinx-automodapi-0.15.0.tar.gz", hash = "sha256:fd5871e054df7f3e299dde959afffa849f4d01c6eac274c366b06472afcb06aa"},
+    {file = "sphinx_automodapi-0.15.0-py3-none-any.whl", hash = "sha256:06848f261fb127b25d35f27c2c4fddb041e76498733da064504f8077cbd27bec"},
 ]
 sphinx-basic-ng = [
     {file = "sphinx_basic_ng-1.0.0b1-py3-none-any.whl", hash = "sha256:ade597a3029c7865b24ad0eda88318766bcc2f9f4cef60df7e28126fde94db2a"},
     {file = "sphinx_basic_ng-1.0.0b1.tar.gz", hash = "sha256:89374bd3ccd9452a301786781e28c8718e99960f2d4f411845ea75fc7bb5a9b0"},
 ]
 sphinx-copybutton = [
-    {file = "sphinx-copybutton-0.4.0.tar.gz", hash = "sha256:8daed13a87afd5013c3a9af3575cc4d5bec052075ccd3db243f895c07a689386"},
-    {file = "sphinx_copybutton-0.4.0-py3-none-any.whl", hash = "sha256:4340d33c169dac6dd82dce2c83333412aa786a42dd01a81a8decac3b130dc8b0"},
+    {file = "sphinx-copybutton-0.5.1.tar.gz", hash = "sha256:366251e28a6f6041514bfb5439425210418d6c750e98d3a695b73e56866a677a"},
+    {file = "sphinx_copybutton-0.5.1-py3-none-any.whl", hash = "sha256:0842851b5955087a7ec7fc870b622cb168618ad408dee42692e9a5c97d071da8"},
 ]
-sphinx-inline-tabs = [
-    {file = "sphinx_inline_tabs-2022.1.2b11-py3-none-any.whl", hash = "sha256:bb4e807769ef52301a186d0678da719120b978a1af4fd62a1e9453684e962dbc"},
-    {file = "sphinx_inline_tabs-2022.1.2b11.tar.gz", hash = "sha256:afb9142772ec05ccb07f05d8181b518188fc55631b26ee803c694e812b3fdd73"},
+sphinx-design = [
+    {file = "sphinx_design-0.3.0-py3-none-any.whl", hash = "sha256:823c1dd74f31efb3285ec2f1254caefed29d762a40cd676f58413a1e4ed5cc96"},
+    {file = "sphinx_design-0.3.0.tar.gz", hash = "sha256:7183fa1fae55b37ef01bda5125a21ee841f5bbcbf59a35382be598180c4cefba"},
 ]
 sphinx-notfound-page = [
     {file = "sphinx-notfound-page-0.8.3.tar.gz", hash = "sha256:f728403280026b84c234540bebbed7f710b9ea582e7348a35a5becefe4024332"},
     {file = "sphinx_notfound_page-0.8.3-py2.py3-none-any.whl", hash = "sha256:c4867b345afccef72de71fb410c412540dfbb5c2de0dc06bde70b331b8f30469"},
 ]
-sphinx-panels = [
-    {file = "sphinx-panels-0.6.0.tar.gz", hash = "sha256:d36dcd26358117e11888f7143db4ac2301ebe90873ac00627bf1fe526bf0f058"},
-    {file = "sphinx_panels-0.6.0-py3-none-any.whl", hash = "sha256:bd64afaf85c07f8096d21c8247fc6fd757e339d1be97832c8832d6ae5ed2e61d"},
-]
 sphinxcontrib-apidoc = [
     {file = "sphinxcontrib-apidoc-0.3.0.tar.gz", hash = "sha256:729bf592cf7b7dd57c4c05794f732dc026127275d785c2a5494521fdde773fb9"},
     {file = "sphinxcontrib_apidoc-0.3.0-py2.py3-none-any.whl", hash = "sha256:6671a46b2c6c5b0dca3d8a147849d159065e50443df79614f921b42fbd15cb09"},
@@ -1804,20 +2192,24 @@ sphinxcontrib-serializinghtml = [
     {file = "sphinxcontrib-serializinghtml-1.1.5.tar.gz", hash = "sha256:aa5f6de5dfdf809ef505c4895e51ef5c9eac17d0f287933eb49ec495280b6952"},
     {file = "sphinxcontrib_serializinghtml-1.1.5-py2.py3-none-any.whl", hash = "sha256:352a9a00ae864471d3a7ead8d7d79f5fc0b57e8b3f95e9867eb9eb28999b92fd"},
 ]
+sphinxext-opengraph = [
+    {file = "sphinxext-opengraph-0.8.1.tar.gz", hash = "sha256:4e698b907ef9582cd0106bd50807106677fdab4dc5c31040be17c9afb6e17880"},
+    {file = "sphinxext_opengraph-0.8.1-py3-none-any.whl", hash = "sha256:64fe993d4974c65202d1c8f1c986abb559154a814a6378f9d3aaf8c7c9bd62bc"},
+]
+tenacity = [
+    {file = "tenacity-8.2.2-py3-none-any.whl", hash = "sha256:2f277afb21b851637e8f52e6a613ff08734c347dc19ade928e519d7d2d8569b0"},
+    {file = "tenacity-8.2.2.tar.gz", hash = "sha256:43af037822bd0029025877f3b2d97cc4d7bb0c2991000a3d59d71517c5c969e0"},
+]
 timeout-decorator = [
     {file = "timeout-decorator-0.5.0.tar.gz", hash = "sha256:6a2f2f58db1c5b24a2cc79de6345760377ad8bdc13813f5265f6c3e63d16b3d7"},
 ]
-toml = [
-    {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"},
-    {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"},
-]
 tomli = [
     {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"},
     {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"},
 ]
 tomlkit = [
-    {file = "tomlkit-0.7.2-py2.py3-none-any.whl", hash = "sha256:173ad840fa5d2aac140528ca1933c29791b79a374a0861a80347f42ec9328117"},
-    {file = "tomlkit-0.7.2.tar.gz", hash = "sha256:d7a454f319a7e9bd2e249f239168729327e4dd2d27b17dc68be264ad1ce36754"},
+    {file = "tomlkit-0.11.6-py3-none-any.whl", hash = "sha256:07de26b0d8cfc18f871aec595fda24d95b08fef89d147caa861939f37230bf4b"},
+    {file = "tomlkit-0.11.6.tar.gz", hash = "sha256:71b952e5721688937fb02cf9d354dbcf0785066149d2855e44531ebdd2b65d73"},
 ]
 tornado = [
     {file = "tornado-6.2-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:20f638fd8cc85f3cbae3c732326e96addff0a15e22d80f049e00121651e82e72"},
@@ -1833,93 +2225,93 @@ tornado = [
     {file = "tornado-6.2.tar.gz", hash = "sha256:9b630419bde84ec666bfd7ea0a4cb2a8a651c2d5cccdbdd1972a0c859dfc3c13"},
 ]
 typing-extensions = [
-    {file = "typing_extensions-4.4.0-py3-none-any.whl", hash = "sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e"},
-    {file = "typing_extensions-4.4.0.tar.gz", hash = "sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa"},
+    {file = "typing_extensions-4.5.0-py3-none-any.whl", hash = "sha256:fb33085c39dd998ac16d1431ebc293a8b3eedd00fd4a32de0ff79002c19511b4"},
+    {file = "typing_extensions-4.5.0.tar.gz", hash = "sha256:5cb5f4a79139d699607b3ef622a1dedafa84e115ab0024e0d9c044a9479ca7cb"},
 ]
 uc-micro-py = [
     {file = "uc-micro-py-1.0.1.tar.gz", hash = "sha256:b7cdf4ea79433043ddfe2c82210208f26f7962c0cfbe3bacb05ee879a7fdb596"},
     {file = "uc_micro_py-1.0.1-py3-none-any.whl", hash = "sha256:316cfb8b6862a0f1d03540f0ae6e7b033ff1fa0ddbe60c12cbe0d4cec846a69f"},
 ]
 ujson = [
-    {file = "ujson-5.6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b74396a655ac8a5299dcb765b4a17ba706e45c0df95818bcc6c13c4645a1c38e"},
-    {file = "ujson-5.6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f63535d51e039a984b2fb67ff87057ffe4216d4757c3cedf2fc846af88253cb7"},
-    {file = "ujson-5.6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4420bfff18ca6aa39cfb22fe35d8aba3811fa1190c4f4e1ad816b0aad72f7e3"},
-    {file = "ujson-5.6.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:35423460954d0c61602da734697724e8dd5326a8aa7900123e584b935116203e"},
-    {file = "ujson-5.6.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:169b3fbd1188647c6ce00cb690915526aff86997c89a94c1b50432010ad7ae0f"},
-    {file = "ujson-5.6.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:91000612a2c30f50c6a009e6459a677e5c1972e51b59ecefd6063543dc47a4e9"},
-    {file = "ujson-5.6.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:b72d4d948749e9c6afcd3d7af9ecc780fccde84e26d275c97273dd83c68a488b"},
-    {file = "ujson-5.6.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:aff708a1b9e2d4979f74375ade0bff978be72c8bd90422a756d24d8a46d78059"},
-    {file = "ujson-5.6.0-cp310-cp310-win32.whl", hash = "sha256:6ea9024749a41864bffb12da15aace4a3193c03ea97e77b069557aefa342811f"},
-    {file = "ujson-5.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:1217326ba80eab1ff3f644f9eee065bd4fcc4e0c068a2f86f851cafd05737169"},
-    {file = "ujson-5.6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:bfb1fdf61763fafc0f8a20becf9cc4287c14fc41c0e14111d28c0d0dfda9ba56"},
-    {file = "ujson-5.6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fecf83b2ef3cbce4f5cc573df6f6ded565e5e27c1af84038bae5ade306686d82"},
-    {file = "ujson-5.6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:213e41dc501b4a6d029873039da3e45ba7766b9f9eba97ecc4287c371f5403cc"},
-    {file = "ujson-5.6.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad74eb53ee07e76c82f9ef8e7256c33873b81bd1f97a274fdb65ed87c2801f6"},
-    {file = "ujson-5.6.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3a68a204386648ec92ae9b526c1ffca528f38221eca70f98b4709390c3204275"},
-    {file = "ujson-5.6.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e4be7d865cb5161824e12db71cee83290ab72b3523566371a30d6ba1bd63402a"},
-    {file = "ujson-5.6.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:dde59d2f06297fc4e70b2bae6e4a6b3ce89ca89697ab2c41e641abae3be96b0c"},
-    {file = "ujson-5.6.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:551408a5c4306839b4a4f91503c96069204dbef2c7ed91a9dab08874ac1ed679"},
-    {file = "ujson-5.6.0-cp311-cp311-win32.whl", hash = "sha256:ceee5aef3e234c7e998fdb52e5236c41e50cdedc116360f7f1874a04829f6490"},
-    {file = "ujson-5.6.0-cp311-cp311-win_amd64.whl", hash = "sha256:dd5ccc036b0f4721b98e1c03ccc604e7f3e1db53866ccc92b2add40ace1782f7"},
-    {file = "ujson-5.6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7a66c5a75b46545361271b4cf55560d9ad8bad794dd054a14b3fbb031407948e"},
-    {file = "ujson-5.6.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6d0a60c5f065737a81249c819475d001a86da9a41900d888287e34619c9b4851"},
-    {file = "ujson-5.6.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9cf04fcc958bb52a6b6c301b780cb9afab3ec68713b17ca5aa423e1f99c2c1cf"},
-    {file = "ujson-5.6.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:24d40e01accbf4f0ba5181c4db1bac83749fdc1a5413466da582529f2a096085"},
-    {file = "ujson-5.6.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:3f8b9e8c0420ce3dcc193ab6dd5628840ba79ad1b76e1816ac7ca6752c6bf035"},
-    {file = "ujson-5.6.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:0f0f21157d1a84ad5fb54388f31767cde9c1a48fb29de7ef91d8887fdc2ca92b"},
-    {file = "ujson-5.6.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:82bf24ea72a73c7d77402a7adc954931243e7ec4241d5738ae74894b53944458"},
-    {file = "ujson-5.6.0-cp37-cp37m-win32.whl", hash = "sha256:3b49a1014d396b962cb1d6c5f867f88b2c9aa9224c3860ee6ff63b2837a2965b"},
-    {file = "ujson-5.6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:74671d1bde8c03daeb92abdbd972960978347b1a1d432c4c1b3c9284ce4094cf"},
-    {file = "ujson-5.6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:72fa6e850831280a46704032721c75155fd41b839ddadabb6068ab218c56a37a"},
-    {file = "ujson-5.6.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:57904e5b49ffe93189349229dcd83f73862ef9bb8517e8f1e62d0ff73f313847"},
-    {file = "ujson-5.6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:61fdf24f7bddc402ce06b25e4bed7bf5ee4f03e23028a0a09116835c21d54888"},
-    {file = "ujson-5.6.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7174e81c137d480abe2f8036e9fb69157e509f2db0bfdee4488eb61dc3f0ff6b"},
-    {file = "ujson-5.6.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1a7e4023c79d9a053c0c6b7c6ec50ea0af78381539ab27412e6af8d9410ae555"},
-    {file = "ujson-5.6.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:31288f85db6295ec63e128daff7285bb0bc220935e1b5107bd2d67e2dc687b7e"},
-    {file = "ujson-5.6.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:f3e651f04b7510fae7d4706a4600cd43457f015df08702ece82a71339fc15c3d"},
-    {file = "ujson-5.6.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:52f536712d16a1f4e0f9d084982c28e11b7e70c397a1059069e4d28d53b3f522"},
-    {file = "ujson-5.6.0-cp38-cp38-win32.whl", hash = "sha256:23051f062bb257a87f3e55ea5a055ea98d56f08185fd415b34313268fa4d814e"},
-    {file = "ujson-5.6.0-cp38-cp38-win_amd64.whl", hash = "sha256:fb1632b27e12c0b0df62f924c362206daf246a42c0080e959dd465810dc3482e"},
-    {file = "ujson-5.6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3f00dff3bf26bbb96791ceaf51ca95a3f34e2a21985748da855a650c38633b99"},
-    {file = "ujson-5.6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d1b5e233e42f53bbbc6961caeb492986e9f3aeacd30be811467583203873bad2"},
-    {file = "ujson-5.6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a51cbe614acb5ea8e2006e4fd80b4e8ea7c51ae51e42c75290012f4925a9d6ab"},
-    {file = "ujson-5.6.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2aece7a92dffc9c78787f5f36e47e24b95495812270c27abc2fa430435a931d"},
-    {file = "ujson-5.6.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:20d929a27822cb79e034cc5e0bb62daa0257ab197247cb6f35d5149f2f438983"},
-    {file = "ujson-5.6.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7bde16cb18b95a8f68cc48715e4652b394b4fee68cb3f9fee0fd7d26b29a53b6"},
-    {file = "ujson-5.6.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bca3c06c3f10ce03fa80b1301dce53765815c2578a24bd141ce4e5769bb7b709"},
-    {file = "ujson-5.6.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5e5715b0e2767b1987ceed0066980fc0a53421dd2f197b4f88460d474d6aef4c"},
-    {file = "ujson-5.6.0-cp39-cp39-win32.whl", hash = "sha256:a8795de7ceadf84bcef88f947f91900d647eda234a2c6cc89912c25048cc0490"},
-    {file = "ujson-5.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:b9e9d26600020cf635a4e58763959f5a59f8c70f75d72ebf26ceae94c2efac74"},
-    {file = "ujson-5.6.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:798116b88158f13ed687417526100ef353ba4692e0aef8afbc622bd4bf7e9057"},
-    {file = "ujson-5.6.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c169e12642f0edf1dde607fb264721b88787b55a6da5fb3824302a9cac6f9405"},
-    {file = "ujson-5.6.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2d70b7f0b485f85141bbc518d0581ae96b912d9f8b070eaf68a9beef8eb1e60"},
-    {file = "ujson-5.6.0-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2cb7a4bd91de97b4c8e57fb5289d1e5f3f019723b59d01d79e2df83783dce5a6"},
-    {file = "ujson-5.6.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:ae723b8308ac17a591bb8be9478b58c2c26fada23fd2211fc323796801ad7ff5"},
-    {file = "ujson-5.6.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:2a24b9a96364f943a4754fa00b47855d0a01b84ac4b8b11ebf058c8fb68c1f77"},
-    {file = "ujson-5.6.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b64d2ac99503a9a5846157631addacc9f74e23f64d5a886fe910e9662660fa10"},
-    {file = "ujson-5.6.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fadebaddd3eb71a5c986f0bdc7bb28b072bfc585c141eef37474fc66d1830b0a"},
-    {file = "ujson-5.6.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9f4efcac06f45183b6ed8e2321554739a964a02d8aa3089ec343253d86bf2804"},
-    {file = "ujson-5.6.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:e97af10b6f13a498de197fb852e9242064217c25dfca79ebe7ad0cf2b0dd0cb7"},
-    {file = "ujson-5.6.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:355ef5311854936b9edc7f1ce638f8257cb45fb6b9873f6b2d16a715eafc9570"},
-    {file = "ujson-5.6.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4277f6b1d24be30b7f87ec5346a87693cbc1e55bbc5877f573381b2250c4dd6"},
-    {file = "ujson-5.6.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d6f4be832d97836d62ac0c148026ec021f9f36481f38e455b51538fcd949ed2a"},
-    {file = "ujson-5.6.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bca074d08f0677f05df8170b25ce6e61db3bcdfda78062444972fa6508dc825f"},
-    {file = "ujson-5.6.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:87578ccfc35461c77e73660fb7d89bc577732f671364f442bda9e2c58b571765"},
-    {file = "ujson-5.6.0.tar.gz", hash = "sha256:f881e2d8a022e9285aa2eab6ba8674358dbcb2b57fa68618d88d62937ac3ff04"},
+    {file = "ujson-5.7.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5eba5e69e4361ac3a311cf44fa71bc619361b6e0626768a494771aacd1c2f09b"},
+    {file = "ujson-5.7.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:aae4d9e1b4c7b61780f0a006c897a4a1904f862fdab1abb3ea8f45bd11aa58f3"},
+    {file = "ujson-5.7.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2e43ccdba1cb5c6d3448eadf6fc0dae7be6c77e357a3abc968d1b44e265866d"},
+    {file = "ujson-5.7.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:54384ce4920a6d35fa9ea8e580bc6d359e3eb961fa7e43f46c78e3ed162d56ff"},
+    {file = "ujson-5.7.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:24ad1aa7fc4e4caa41d3d343512ce68e41411fb92adf7f434a4d4b3749dc8f58"},
+    {file = "ujson-5.7.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:afff311e9f065a8f03c3753db7011bae7beb73a66189c7ea5fcb0456b7041ea4"},
+    {file = "ujson-5.7.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6e80f0d03e7e8646fc3d79ed2d875cebd4c83846e129737fdc4c2532dbd43d9e"},
+    {file = "ujson-5.7.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:137831d8a0db302fb6828ee21c67ad63ac537bddc4376e1aab1c8573756ee21c"},
+    {file = "ujson-5.7.0-cp310-cp310-win32.whl", hash = "sha256:7df3fd35ebc14dafeea031038a99232b32f53fa4c3ecddb8bed132a43eefb8ad"},
+    {file = "ujson-5.7.0-cp310-cp310-win_amd64.whl", hash = "sha256:af4639f684f425177d09ae409c07602c4096a6287027469157bfb6f83e01448b"},
+    {file = "ujson-5.7.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9b0f2680ce8a70f77f5d70aaf3f013d53e6af6d7058727a35d8ceb4a71cdd4e9"},
+    {file = "ujson-5.7.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:67a19fd8e7d8cc58a169bea99fed5666023adf707a536d8f7b0a3c51dd498abf"},
+    {file = "ujson-5.7.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6abb8e6d8f1ae72f0ed18287245f5b6d40094e2656d1eab6d99d666361514074"},
+    {file = "ujson-5.7.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8cd622c069368d5074bd93817b31bdb02f8d818e57c29e206f10a1f9c6337dd"},
+    {file = "ujson-5.7.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:14f9082669f90e18e64792b3fd0bf19f2b15e7fe467534a35ea4b53f3bf4b755"},
+    {file = "ujson-5.7.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d7ff6ebb43bc81b057724e89550b13c9a30eda0f29c2f506f8b009895438f5a6"},
+    {file = "ujson-5.7.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:f7f241488879d91a136b299e0c4ce091996c684a53775e63bb442d1a8e9ae22a"},
+    {file = "ujson-5.7.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5593263a7fcfb934107444bcfba9dde8145b282de0ee9f61e285e59a916dda0f"},
+    {file = "ujson-5.7.0-cp311-cp311-win32.whl", hash = "sha256:26c2b32b489c393106e9cb68d0a02e1a7b9d05a07429d875c46b94ee8405bdb7"},
+    {file = "ujson-5.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:ed24406454bb5a31df18f0a423ae14beb27b28cdfa34f6268e7ebddf23da807e"},
+    {file = "ujson-5.7.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:18679484e3bf9926342b1c43a3bd640f93a9eeeba19ef3d21993af7b0c44785d"},
+    {file = "ujson-5.7.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ee295761e1c6c30400641f0a20d381633d7622633cdf83a194f3c876a0e4b7e"},
+    {file = "ujson-5.7.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b738282e12a05f400b291966630a98d622da0938caa4bc93cf65adb5f4281c60"},
+    {file = "ujson-5.7.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:00343501dbaa5172e78ef0e37f9ebd08040110e11c12420ff7c1f9f0332d939e"},
+    {file = "ujson-5.7.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c0d1f7c3908357ee100aa64c4d1cf91edf99c40ac0069422a4fd5fd23b263263"},
+    {file = "ujson-5.7.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:a5d2f44331cf04689eafac7a6596c71d6657967c07ac700b0ae1c921178645da"},
+    {file = "ujson-5.7.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:16b2254a77b310f118717715259a196662baa6b1f63b1a642d12ab1ff998c3d7"},
+    {file = "ujson-5.7.0-cp37-cp37m-win32.whl", hash = "sha256:6faf46fa100b2b89e4db47206cf8a1ffb41542cdd34dde615b2fc2288954f194"},
+    {file = "ujson-5.7.0-cp37-cp37m-win_amd64.whl", hash = "sha256:ff0004c3f5a9a6574689a553d1b7819d1a496b4f005a7451f339dc2d9f4cf98c"},
+    {file = "ujson-5.7.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:75204a1dd7ec6158c8db85a2f14a68d2143503f4bafb9a00b63fe09d35762a5e"},
+    {file = "ujson-5.7.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7312731c7826e6c99cdd3ac503cd9acd300598e7a80bcf41f604fee5f49f566c"},
+    {file = "ujson-5.7.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b9dc5a90e2149643df7f23634fe202fed5ebc787a2a1be95cf23632b4d90651"},
+    {file = "ujson-5.7.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b6a6961fc48821d84b1198a09516e396d56551e910d489692126e90bf4887d29"},
+    {file = "ujson-5.7.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b01a9af52a0d5c46b2c68e3f258fdef2eacaa0ce6ae3e9eb97983f5b1166edb6"},
+    {file = "ujson-5.7.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b7316d3edeba8a403686cdcad4af737b8415493101e7462a70ff73dd0609eafc"},
+    {file = "ujson-5.7.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4ee997799a23227e2319a3f8817ce0b058923dbd31904761b788dc8f53bd3e30"},
+    {file = "ujson-5.7.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:dda9aa4c33435147262cd2ea87c6b7a1ca83ba9b3933ff7df34e69fee9fced0c"},
+    {file = "ujson-5.7.0-cp38-cp38-win32.whl", hash = "sha256:bea8d30e362180aafecabbdcbe0e1f0b32c9fa9e39c38e4af037b9d3ca36f50c"},
+    {file = "ujson-5.7.0-cp38-cp38-win_amd64.whl", hash = "sha256:c96e3b872bf883090ddf32cc41957edf819c5336ab0007d0cf3854e61841726d"},
+    {file = "ujson-5.7.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6411aea4c94a8e93c2baac096fbf697af35ba2b2ed410b8b360b3c0957a952d3"},
+    {file = "ujson-5.7.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3d3b3499c55911f70d4e074c626acdb79a56f54262c3c83325ffb210fb03e44d"},
+    {file = "ujson-5.7.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:341f891d45dd3814d31764626c55d7ab3fd21af61fbc99d070e9c10c1190680b"},
+    {file = "ujson-5.7.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2f242eec917bafdc3f73a1021617db85f9958df80f267db69c76d766058f7b19"},
+    {file = "ujson-5.7.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c3af9f9f22a67a8c9466a32115d9073c72a33ae627b11de6f592df0ee09b98b6"},
+    {file = "ujson-5.7.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4a3d794afbf134df3056a813e5c8a935208cddeae975bd4bc0ef7e89c52f0ce0"},
+    {file = "ujson-5.7.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:800bf998e78dae655008dd10b22ca8dc93bdcfcc82f620d754a411592da4bbf2"},
+    {file = "ujson-5.7.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b5ac3d5c5825e30b438ea92845380e812a476d6c2a1872b76026f2e9d8060fc2"},
+    {file = "ujson-5.7.0-cp39-cp39-win32.whl", hash = "sha256:cd90027e6d93e8982f7d0d23acf88c896d18deff1903dd96140613389b25c0dd"},
+    {file = "ujson-5.7.0-cp39-cp39-win_amd64.whl", hash = "sha256:523ee146cdb2122bbd827f4dcc2a8e66607b3f665186bce9e4f78c9710b6d8ab"},
+    {file = "ujson-5.7.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:e87cec407ec004cf1b04c0ed7219a68c12860123dfb8902ef880d3d87a71c172"},
+    {file = "ujson-5.7.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bab10165db6a7994e67001733f7f2caf3400b3e11538409d8756bc9b1c64f7e8"},
+    {file = "ujson-5.7.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b522be14a28e6ac1cf818599aeff1004a28b42df4ed4d7bc819887b9dac915fc"},
+    {file = "ujson-5.7.0-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7592f40175c723c032cdbe9fe5165b3b5903604f774ab0849363386e99e1f253"},
+    {file = "ujson-5.7.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:ed22f9665327a981f288a4f758a432824dc0314e4195a0eaeb0da56a477da94d"},
+    {file = "ujson-5.7.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:adf445a49d9a97a5a4c9bb1d652a1528de09dd1c48b29f79f3d66cea9f826bf6"},
+    {file = "ujson-5.7.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:64772a53f3c4b6122ed930ae145184ebaed38534c60f3d859d8c3f00911eb122"},
+    {file = "ujson-5.7.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:35209cb2c13fcb9d76d249286105b4897b75a5e7f0efb0c0f4b90f222ce48910"},
+    {file = "ujson-5.7.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:90712dfc775b2c7a07d4d8e059dd58636bd6ff1776d79857776152e693bddea6"},
+    {file = "ujson-5.7.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:0e4e8981c6e7e9e637e637ad8ffe948a09e5434bc5f52ecbb82b4b4cfc092bfb"},
+    {file = "ujson-5.7.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:581c945b811a3d67c27566539bfcb9705ea09cb27c4be0002f7a553c8886b817"},
+    {file = "ujson-5.7.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d36a807a24c7d44f71686685ae6fbc8793d784bca1adf4c89f5f780b835b6243"},
+    {file = "ujson-5.7.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b4257307e3662aa65e2644a277ca68783c5d51190ed9c49efebdd3cbfd5fa44"},
+    {file = "ujson-5.7.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea7423d8a2f9e160c5e011119741682414c5b8dce4ae56590a966316a07a4618"},
+    {file = "ujson-5.7.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:4c592eb91a5968058a561d358d0fef59099ed152cfb3e1cd14eee51a7a93879e"},
+    {file = "ujson-5.7.0.tar.gz", hash = "sha256:e788e5d5dcae8f6118ac9b45d0b891a0d55f7ac480eddcb7f07263f2bcf37b23"},
 ]
 url-normalize = [
     {file = "url-normalize-1.4.3.tar.gz", hash = "sha256:d23d3a070ac52a67b83a1c59a0e68f8608d1cd538783b401bc9de2c0fac999b2"},
     {file = "url_normalize-1.4.3-py2.py3-none-any.whl", hash = "sha256:ec3c301f04e5bb676d333a7fa162fa977ad2ca04b7e652bfc9fac4e405728eed"},
 ]
 urllib3 = [
-    {file = "urllib3-1.26.13-py2.py3-none-any.whl", hash = "sha256:47cc05d99aaa09c9e72ed5809b60e7ba354e64b59c9c173ac3018642d8bb41fc"},
-    {file = "urllib3-1.26.13.tar.gz", hash = "sha256:c083dd0dce68dbfbe1129d5271cb90f9447dea7d52097c6e0126120c521ddea8"},
+    {file = "urllib3-1.26.15-py2.py3-none-any.whl", hash = "sha256:aa751d169e23c7479ce47a0cb0da579e3ede798f994f5816a74e4f4500dcea42"},
+    {file = "urllib3-1.26.15.tar.gz", hash = "sha256:8a388717b9476f934a21484e8c8e61875ab60644d29b9b39e11e4b9dc1c6b305"},
 ]
 virtualenv = [
-    {file = "virtualenv-20.17.1-py3-none-any.whl", hash = "sha256:ce3b1684d6e1a20a3e5ed36795a97dfc6af29bc3970ca8dab93e11ac6094b3c4"},
-    {file = "virtualenv-20.17.1.tar.gz", hash = "sha256:f8b927684efc6f1cc206c9db297a570ab9ad0e51c16fa9e45487d36d1905c058"},
+    {file = "virtualenv-20.21.0-py3-none-any.whl", hash = "sha256:31712f8f2a17bd06234fa97fdf19609e789dd4e3e4bf108c3da71d710651adbc"},
+    {file = "virtualenv-20.21.0.tar.gz", hash = "sha256:f50e3e60f990a0757c9b68333c9fdaa72d7188caa417f96af9e52407831a3b68"},
 ]
 zipp = [
-    {file = "zipp-3.11.0-py3-none-any.whl", hash = "sha256:83a28fcb75844b5c0cdaf5aa4003c2d728c77e05f5aeabe8e95e56727005fbaa"},
-    {file = "zipp-3.11.0.tar.gz", hash = "sha256:a7a22e05929290a67401440b39690ae6563279bced5f314609d9d03798f56766"},
+    {file = "zipp-3.15.0-py3-none-any.whl", hash = "sha256:48904fc76a60e542af151aded95726c1a5c34ed43ab4134b597665c86d7ad556"},
+    {file = "zipp-3.15.0.tar.gz", hash = "sha256:112929ad649da941c23de50f356a2b5570c954b65150642bccdd66bf194d224b"},
 ]
diff --git a/pyproject.toml b/pyproject.toml
index e918106..9c647b5 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,17 +1,17 @@
 [tool.poetry]
 name = "requests-cache"
-version = "0.9.8"
-description = "A transparent persistent cache for the requests library"
+version = "1.0.1"
+description = "A persistent cache for python requests"
 authors = ["Roman Haritonov", "Jordan Cook"]
 license = "BSD-2-Clause"
 readme = "README.md"
 documentation = "https://requests-cache.readthedocs.io"
 homepage = "https://github.com/requests-cache/requests-cache"
 repository = "https://github.com/requests-cache/requests-cache"
-keywords = ["requests", "cache", "http", "python-requests", "web", "performance", "sqlite", "redis",
-            "mongodb", "gridfs", "dynamodb"]
+keywords = ["requests", "python-requests", "cache", "http", "http-client", "web",
+            "webscraping", "performance", "sqlite", "redis", "mongodb", "gridfs", "dynamodb"]
 classifiers = [
-    "Development Status :: 4 - Beta",
+    "Development Status :: 5 - Production/Stable",
     "Intended Audience :: Developers",
     "Topic :: Software Development :: Libraries :: Python Modules",
     "Typing :: Typed",
@@ -24,16 +24,20 @@ include = [
     {format="sdist", path="tests"},
 ]
 
+[tool.poetry.urls]
+"Changelog" = "https://requests-cache.readthedocs.io/en/stable/project_info/history.html"
+"Issues" = "https://github.com/requests-cache/requests-cache/issues"
+
 [tool.poetry.dependencies]
 python        = "^3.7"     # requests requires python >=3.7, <4.0
 
 # Required dependencies
 requests      = ">=2.22"   # Needs no introduction
 urllib3       = ">=1.25.5" # Use a slightly newer version than required by requests (for bugfixes)
-appdirs       = ">=1.4.4"  # For options that use platform-specific user cache dirs
 attrs         = ">=21.2"   # For response data models
 cattrs        = ">=22.2"   # For response serialization
-url-normalize = ">=1.4"    # For improved request matching
+platformdirs  = ">=2.5"    # For features that use platform-specific system directories
+url-normalize = ">=1.4"    # For more accurate request matching
 
 # Optional backend dependencies
 boto3                      = {optional=true, version=">=1.15"}
@@ -45,21 +49,21 @@ redis                      = {optional=true, version=">=3"}
 bson                       = {optional=true, version=">=0.5"}
 itsdangerous               = {optional=true, version=">=2.0"}
 pyyaml                     = {optional=true, version=">=5.4"}
-ujson                      = {optional=true, version=">=4.0"}
+ujson                      = {optional=true, version=">=5.4"}
 
-# All the bells and whistles for building documentation;
+# Dependencies for building documentation;
 # defined here because readthedocs doesn't (yet?) support poetry.dev-dependencies
-furo                       = {optional=true, version=">=2021.9.8"}
-linkify-it-py              = {optional=true, version="^1.0.1"}
-myst-parser                = {optional=true, version="^0.15.1"}
-sphinx                     = {optional=true, version="4.3.0"}
-sphinx-autodoc-typehints   = {optional=true, version="^1.11"}
-sphinx-automodapi          = {optional=true, version=">=0.13,<0.15"}
-sphinx-copybutton          = {optional=true, version=">=0.3,<0.5"}
-sphinx-inline-tabs         = {optional=true, version=">=2022.1.2b11", python=">=3.8"}
+furo                       = {optional=true, version="^2022.12.7"}
+linkify-it-py              = {optional=true, version="^2.0"}
+myst-parser                = {optional=true, version="^1.0"}
+sphinx                     = {optional=true, version="^5.0.2"}
+sphinx-autodoc-typehints   = {optional=true, version=">=1.19"}
+sphinx-automodapi          = {optional=true, version=">=0.14"}
+sphinx-copybutton          = {optional=true, version=">=0.5"}
+sphinx-design              = {optional=true, version=">=0.2"}
 sphinx-notfound-page       = {optional=true, version=">=0.8"}
-sphinx-panels              = {optional=true, version="^0.6"}
-sphinxcontrib-apidoc       = {optional=true, version="^0.3"}
+sphinxcontrib-apidoc       = {optional=true, version=">=0.3"}
+sphinxext-opengraph        = {optional=true, version=">=0.6"}
 
 [tool.poetry.extras]
 # Package extras for optional backend dependencies
@@ -78,31 +82,35 @@ all      = ["boto3", "botocore", "itsdangerous", "pymongo", "pyyaml", "redis", "
 
 # Documentation
 docs     = ["furo", "linkify-it-py", "myst-parser", "sphinx", "sphinx-autodoc-typehints",
-            "sphinx-automodapi", "sphinx-copybutton", "sphinx-inline-tabs", "sphinx-panels",
-            "sphinx-notfound-page", "sphinxcontrib-apidoc"]
+            "sphinx-automodapi", "sphinx-copybutton", "sphinx-design", "sphinx-notfound-page",
+            "sphinxcontrib-apidoc", "sphinxext-opengraph"]
 
 [tool.poetry.dev-dependencies]
 # For unit + integration tests
-coverage              = "^6.3"
+coverage              = "^7.1"
 psutil                = "^5.0"
-pytest                = "^6.2"
+pytest                = "^7.2"
 pytest-clarity        = "^1.0.1"
 pytest-cov            = ">=3.0"
 pytest-rerunfailures  = "^10.1"
 pytest-xdist          = ">=2.2"
-requests-mock         = "^1.8"
-responses             = "0.16.0"
+requests-mock         = "^1.9"
+responses             = "0.19.0"
+tenacity              = "^8.0"
 timeout-decorator     = "^0.5"
 
 # Tools for linting, type checking, etc. are managed with pre-commit
-pre-commit            = "^2.15"
+pre-commit            = "^2.19"
 
 # For convenience in local development
-nox                   = "^2021.10.1"
-nox-poetry            = "^0.9.0"
+nox                   = "^2022.1.7"
+nox-poetry            = "^1.0.0"
 rich                  = ">=10.0"
 sphinx-autobuild      = "^2021.3.14"
 
+# Workaround for missing dependency on python 3.7
+zipp                  = {version = ">=3.8", python = "<3.8"}
+
 [build-system]
 requires = ["poetry-core>=1.0.0"]
 build-backend = "poetry.core.masonry.api"
@@ -131,6 +139,8 @@ omit = [
 exclude_lines = [
     'pragma: no cover',
     'if TYPE_CHECKING:',
+    'if logger.level',
+    'except ImportError:',
 ]
 
 [tool.isort]
@@ -139,7 +149,9 @@ line_length = 100
 skip_gitignore = true
 skip = [
     'examples/',
-    'requests_cache/__init__.py',
     'tests/compat/',
 ]
 known_first_party = ['tests']
+
+[tool.mypy]
+ignore_missing_imports = true
diff --git a/requests_cache/__init__.py b/requests_cache/__init__.py
index 1033725..ad4115d 100644
--- a/requests_cache/__init__.py
+++ b/requests_cache/__init__.py
@@ -2,7 +2,7 @@
 
 # Version is defined in pyproject.toml.
 # It's copied here to make it easier for client code to check the installed version.
-__version__ = '0.9.8'
+__version__ = '1.0.1'
 
 from .backends import *
 from .cache_keys import *
diff --git a/requests_cache/_utils.py b/requests_cache/_utils.py
index 63212c0..4dbc4ec 100644
--- a/requests_cache/_utils.py
+++ b/requests_cache/_utils.py
@@ -1,14 +1,15 @@
-"""Miscellaneous minor utility functions that don't really belong anywhere else"""
+"""Minor internal utility functions that don't really belong anywhere else"""
 from inspect import signature
 from logging import getLogger
-from typing import Any, Callable, Dict, Iterable, Iterator, List
+from typing import Any, Callable, Dict, Iterable, Iterator, List, Optional, Tuple
 
+KwargDict = Dict[str, Any]
 logger = getLogger('requests_cache')
 
 
-def chunkify(iterable: Iterable, max_size: int) -> Iterator[List]:
+def chunkify(iterable: Optional[Iterable], max_size: int) -> Iterator[List]:
     """Split an iterable into chunks of a max size"""
-    iterable = list(iterable)
+    iterable = list(iterable or [])
     for index in range(0, len(iterable), max_size):
         yield iterable[index : index + max_size]
 
@@ -18,13 +19,29 @@ def coalesce(*values: Any, default=None) -> Any:
     return next((v for v in values if v is not None), default)
 
 
-def get_placeholder_class(original_exception: Exception = None):
+def decode(value, encoding='utf-8') -> str:
+    """Decode a value from bytes, if hasn't already been.
+    Note: ``PreparedRequest.body`` is always encoded in utf-8.
+    """
+    if not value:
+        return ''
+    return value.decode(encoding) if isinstance(value, bytes) else value
+
+
+def encode(value, encoding='utf-8') -> bytes:
+    """Encode a value to bytes, if it hasn't already been"""
+    if not value:
+        return b''
+    return value if isinstance(value, bytes) else str(value).encode(encoding)
+
+
+def get_placeholder_class(original_exception: Optional[Exception] = None):
     """Create a placeholder type for a class that does not have dependencies installed.
     This allows delaying ImportErrors until init time, rather than at import time.
     """
-    msg = 'Dependencies are not installed for this feature'
 
     def _log_error():
+        msg = 'Dependencies are not installed for this feature'
         logger.error(msg)
         raise original_exception or ImportError(msg)
 
@@ -32,9 +49,6 @@ def get_placeholder_class(original_exception: Exception = None):
         def __init__(self, *args, **kwargs):
             _log_error()
 
-        def __getattr__(self, *args, **kwargs):
-            _log_error()
-
         def dumps(self, *args, **kwargs):
             _log_error()
 
@@ -44,8 +58,30 @@ def get_placeholder_class(original_exception: Exception = None):
     return Placeholder
 
 
-def get_valid_kwargs(func: Callable, kwargs: Dict, extras: Iterable[str] = None) -> Dict:
-    """Get the subset of non-None ``kwargs`` that are valid params for ``func``"""
+def get_valid_kwargs(
+    func: Callable, kwargs: Dict, extras: Optional[Iterable[str]] = None
+) -> KwargDict:
+    """Get the subset of non-None ``kwargs`` that are valid arguments for ``func``"""
+    kwargs, _ = split_kwargs(func, kwargs, extras)
+    return {k: v for k, v in kwargs.items() if v is not None}
+
+
+def split_kwargs(
+    func: Callable, kwargs: Dict, extras: Optional[Iterable[str]] = None
+) -> Tuple[KwargDict, KwargDict]:
+    """Split ``kwargs`` into two dicts: those that are valid arguments for ``func``,  and those that
+    are not
+    """
     params = list(signature(func).parameters)
     params.extend(extras or [])
-    return {k: v for k, v in kwargs.items() if k in params and v is not None}
+    valid_kwargs = {k: v for k, v in kwargs.items() if k in params}
+    invalid_kwargs = {k: v for k, v in kwargs.items() if k not in params}
+    return valid_kwargs, invalid_kwargs
+
+
+def try_int(value: Any) -> Optional[int]:
+    """Convert a value to an int, if possible, otherwise ``None``"""
+    try:
+        return int(value)
+    except (TypeError, ValueError):
+        return None
diff --git a/requests_cache/backends/__init__.py b/requests_cache/backends/__init__.py
index dcd97de..d916ab8 100644
--- a/requests_cache/backends/__init__.py
+++ b/requests_cache/backends/__init__.py
@@ -4,27 +4,12 @@ from logging import getLogger
 from typing import Callable, Dict, Iterable, Optional, Type, Union
 
 from .._utils import get_placeholder_class, get_valid_kwargs
-from .base import KEY_FN, BaseCache, BaseStorage, DictStorage
+from .base import BaseCache, BaseStorage, DictStorage
 
 # Backend-specific keyword arguments equivalent to 'cache_name'
 CACHE_NAME_KWARGS = ['db_path', 'db_name', 'namespace', 'table_name']
 
-# All backend-specific keyword arguments
-BACKEND_KWARGS = CACHE_NAME_KWARGS + [
-    'connection',
-    'endpoint_url',
-    'fast_save',
-    'ignored_parameters',
-    'match_headers',
-    'name',
-    'read_capacity_units',
-    'region_name',
-    'salt',
-    'secret_key',
-    'write_capacity_units',
-]
-
-BackendSpecifier = Union[str, BaseCache, Type[BaseCache]]
+BackendSpecifier = Union[str, BaseCache]
 logger = getLogger(__name__)
 
 
@@ -33,32 +18,32 @@ try:
     from .dynamodb import DynamoDbCache, DynamoDbDict
 except ImportError as e:
     DynamoDbCache = DynamoDbDict = get_placeholder_class(e)  # type: ignore
+
 try:
-    from .gridfs import GridFSCache, GridFSPickleDict
+    from .gridfs import GridFSCache, GridFSDict
 except ImportError as e:
-    GridFSCache = GridFSPickleDict = get_placeholder_class(e)  # type: ignore
+    GridFSCache = GridFSDict = get_placeholder_class(e)  # type: ignore
+
 try:
-    from .mongodb import MongoCache, MongoDict, MongoPickleDict
+    from .mongodb import MongoCache, MongoDict
 except ImportError as e:
-    MongoCache = MongoDict = MongoPickleDict = get_placeholder_class(e)  # type: ignore
+    MongoCache = MongoDict = get_placeholder_class(e)  # type: ignore
+
 try:
     from .redis import RedisCache, RedisDict, RedisHashDict
 except ImportError as e:
     RedisCache = RedisDict = RedisHashDict = get_placeholder_class(e)  # type: ignore
+
 try:
-    # Note: Heroku doesn't support SQLite due to ephemeral storage
-    from .sqlite import SQLiteCache, SQLiteDict, SQLitePickleDict
+    from .sqlite import SQLiteCache, SQLiteDict
 except ImportError as e:
-    SQLiteCache = SQLiteDict = SQLitePickleDict = get_placeholder_class(e)  # type: ignore
+    SQLiteCache = SQLiteDict = get_placeholder_class(e)  # type: ignore
+
 try:
     from .filesystem import FileCache, FileDict
 except ImportError as e:
     FileCache = FileDict = get_placeholder_class(e)  # type: ignore
 
-# Aliases for backwards-compatibility
-DbCache = SQLiteCache
-DbDict = SQLiteDict
-DbPickleDict = SQLitePickleDict
 
 BACKEND_CLASSES = {
     'dynamodb': DynamoDbCache,
@@ -71,38 +56,36 @@ BACKEND_CLASSES = {
 }
 
 
-def init_backend(cache_name: str, backend: Optional[BackendSpecifier], **kwargs) -> BaseCache:
+def init_backend(
+    cache_name: str, backend: Optional[BackendSpecifier] = None, **kwargs
+) -> BaseCache:
     """Initialize a backend from a name, class, or instance"""
     logger.debug(f'Initializing backend: {backend} {cache_name}')
 
     # The 'cache_name' arg has a different purpose depending on the backend. If an equivalent
-    # backend-specific keyword arg is specified, handle that here to avoid conflicts. A consistent
-    # positional-only or keyword-only arg would be better, but probably not worth a breaking change.
+    # backend-specific keyword arg is specified, handle that here to avoid conflicts with the
+    # 'cache_name' positional-or-keyword arg. In hindsight, a consistent positional-only or
+    # keyword-only arg would have been better, but probably not worth a breaking change.
     cache_name_kwargs = [kwargs.pop(k) for k in CACHE_NAME_KWARGS if k in kwargs]
     cache_name = cache_name or cache_name_kwargs[0]
 
-    # Determine backend class
+    # Already a backend instance
     if isinstance(backend, BaseCache):
-        return _set_backend_kwargs(cache_name, backend, **kwargs)
-    elif isinstance(backend, type):
-        return backend(cache_name, **kwargs)
+        if cache_name:
+            backend.cache_name = cache_name
+        return backend
+    # If no backend is specified, use SQLite as default, unless the environment doesn't support it
+    # TODO: Deprecate fallback to memory?
+    #   Unsupported SQLite is a rare case, and should probably be handled by the user instead.
     elif not backend:
         sqlite_supported = issubclass(BACKEND_CLASSES['sqlite'], BaseCache)
         backend = 'sqlite' if sqlite_supported else 'memory'
 
+    # Get backend class by name
     backend = str(backend).lower()
     if backend not in BACKEND_CLASSES:
-        raise ValueError(f'Invalid backend: {backend}. Choose from: {BACKEND_CLASSES.keys()}')
-
+        raise ValueError(
+            f'Invalid backend: {backend}. Provide a backend instance, or choose from one of the '
+            f'following aliases: {list(BACKEND_CLASSES.keys())}'
+        )
     return BACKEND_CLASSES[backend](cache_name, **kwargs)
-
-
-def _set_backend_kwargs(cache_name, backend, **kwargs):
-    """Set any backend arguments if they are passed along with a backend instance"""
-    backend_kwargs = get_valid_kwargs(BaseCache.__init__, kwargs)
-    backend_kwargs.setdefault('match_headers', kwargs.pop('include_get_headers', False))
-    for k, v in backend_kwargs.items():
-        setattr(backend, k, v)
-    if cache_name:
-        backend.cache_name = cache_name
-    return backend
diff --git a/requests_cache/backends/base.py b/requests_cache/backends/base.py
index eb543c0..d64abc1 100644
--- a/requests_cache/backends/base.py
+++ b/requests_cache/backends/base.py
@@ -1,32 +1,29 @@
-"""Base classes for all cache backends.
+"""Base classes for all cache backends
 
 .. automodsumm:: requests_cache.backends.base
    :classes-only:
    :nosignatures:
 """
-import pickle
+from __future__ import annotations
+
 from abc import ABC
 from collections import UserDict
-from collections.abc import MutableMapping
 from datetime import datetime
 from logging import getLogger
-from typing import Callable, Iterable, Iterator, List, Optional, Union
+from pickle import PickleError
+from typing import TYPE_CHECKING, Iterable, Iterator, List, MutableMapping, Optional, TypeVar
 from warnings import warn
 
-from requests import Request
+from requests import Request, Response
 
 from ..cache_keys import create_key, redact_response
-from ..models import AnyRequest, AnyResponse, CachedResponse
-from ..policy import ExpirationTime, get_expiration_datetime
-from ..serializers import init_serializer
+from ..models import AnyRequest, CachedResponse
+from ..policy import DEFAULT_CACHE_NAME, CacheSettings, ExpirationTime
+from ..serializers import SerializerType, init_serializer
 
 # Specific exceptions that may be raised during deserialization
-DESERIALIZE_ERRORS = (AttributeError, ImportError, TypeError, ValueError, pickle.PickleError)
-
-# Signature for user-provided callback
-KEY_FN = Callable[..., str]
+DESERIALIZE_ERRORS = (AttributeError, ImportError, PickleError, TypeError, ValueError)
 
-ResponseOrKey = Union[CachedResponse, str]
 logger = getLogger(__name__)
 
 
@@ -35,39 +32,30 @@ class BaseCache:
 
     This manages higher-level cache operations, including:
 
-    * Cache expiration
-    * Generating cache keys
+    * Saving and retrieving responses
     * Managing redirect history
     * Convenience methods for general cache info
+    * Dict-like wrapper methods around the underlying storage
+
+    Notes:
 
-    Lower-level storage operations are handled by :py:class:`.BaseStorage`.
+    * Lower-level storage operations are handled by :py:class:`.BaseStorage`.
+    * To extend this with your own custom backend, see :ref:`custom-backends`.
 
-    To extend this with your own custom backend, see :ref:`custom-backends`.
+    Args:
+        cache_name: Cache prefix or namespace, depending on backend
+        serializer: Serializer name or instance
+        kwargs: Additional backend-specific keyword arguments
     """
 
-    def __init__(
-        self,
-        cache_name: str = 'http_cache',
-        match_headers: Union[Iterable[str], bool] = False,
-        ignored_parameters: Iterable[str] = None,
-        key_fn: KEY_FN = None,
-        **kwargs,
-    ):
-        self.responses: BaseStorage = DictStorage()
-        self.redirects: BaseStorage = DictStorage()
+    def __init__(self, cache_name: str = DEFAULT_CACHE_NAME, **kwargs):
         self.cache_name = cache_name
-        self.ignored_parameters = ignored_parameters
-        self.key_fn = key_fn or create_key
-        self.match_headers = match_headers or kwargs.pop('include_get_headers', False)
-
-    @property
-    def urls(self) -> Iterator[str]:
-        """Get all URLs currently in the cache (excluding redirects)"""
-        for key in self.responses:
-            try:
-                yield self.responses[key].url
-            except DESERIALIZE_ERRORS:
-                pass
+        self.responses: BaseStorage[str, CachedResponse] = DictStorage()
+        self.redirects: BaseStorage[str, str] = DictStorage()
+        self._settings = CacheSettings()  # Init and public access is done in CachedSession
+
+    # Main cache operations
+    # ---------------------
 
     def get_response(self, key: str, default=None) -> Optional[CachedResponse]:
         """Retrieve a response from the cache, if it exists
@@ -80,16 +68,16 @@ class BaseCache:
             response = self.responses.get(key)
             if response is None:  # Note: bool(requests.Response) is False if status > 400
                 response = self.responses[self.redirects[key]]
-            response.cache_key = key
             return response
-        except KeyError:
-            return default
-        except DESERIALIZE_ERRORS as e:
-            logger.error(f'Unable to deserialize response with key {key}: {str(e)}')
-            logger.debug(e, exc_info=True)
+        except (AttributeError, KeyError):
             return default
 
-    def save_response(self, response: AnyResponse, cache_key: str = None, expires: datetime = None):
+    def save_response(
+        self,
+        response: Response,
+        cache_key: Optional[str] = None,
+        expires: Optional[datetime] = None,
+    ):
         """Save a response to the cache
 
         Args:
@@ -99,38 +87,47 @@ class BaseCache:
         """
         cache_key = cache_key or self.create_key(response.request)
         cached_response = CachedResponse.from_response(response, expires=expires)
-        cached_response = redact_response(cached_response, self.ignored_parameters)
+        cached_response = redact_response(cached_response, self._settings.ignored_parameters)
         self.responses[cache_key] = cached_response
         for r in response.history:
             self.redirects[self.create_key(r.request)] = cache_key
 
-    def bulk_delete(self, keys: Iterable[str]):
-        """Remove multiple responses and their associated redirects from the cache"""
-        self.responses.bulk_delete(keys)
-        # Remove any redirects that no longer point to an existing response
-        invalid_redirects = [k for k, v in self.redirects.items() if v not in self.responses]
-        self.redirects.bulk_delete(set(keys) | set(invalid_redirects))
-
     def clear(self):
         """Delete all items from the cache"""
         logger.info('Clearing all items from the cache')
         self.responses.clear()
         self.redirects.clear()
 
-    def create_key(self, request: AnyRequest = None, **kwargs) -> str:
+    def close(self):
+        """Close any open backend connections"""
+        logger.debug('Closing backend connections')
+        self.responses.close()
+        self.redirects.close()
+
+    def create_key(
+        self,
+        request: AnyRequest,
+        match_headers: Optional[Iterable[str]] = None,
+        **kwargs,
+    ) -> str:
         """Create a normalized cache key from a request object"""
-        return self.key_fn(
+        key_fn = self._settings.key_fn if self._settings.key_fn is not None else create_key
+        return key_fn(
             request=request,
-            ignored_parameters=self.ignored_parameters,
-            match_headers=self.match_headers,
+            ignored_parameters=self._settings.ignored_parameters,
+            match_headers=match_headers or self._settings.match_headers,
+            serializer=self.responses.serializer,
             **kwargs,
         )
 
+    # Convenience methods
+    # --------------------
+
     def contains(
         self,
-        key: str = None,
-        request: AnyRequest = None,
-        url: str = None,
+        key: Optional[str] = None,
+        request: Optional[AnyRequest] = None,
+        url: Optional[str] = None,
     ):
         """Check if the specified request is cached
 
@@ -150,8 +147,9 @@ class BaseCache:
         *keys: str,
         expired: bool = False,
         invalid: bool = False,
-        requests: Iterable[AnyRequest] = None,
-        urls: Iterable[str] = None,
+        older_than: ExpirationTime = None,
+        requests: Optional[Iterable[AnyRequest]] = None,
+        urls: Optional[Iterable[str]] = None,
     ):
         """Remove responses from the cache according one or more conditions.
 
@@ -159,6 +157,7 @@ class BaseCache:
             keys: Remove responses with these cache keys
             expired: Remove all expired responses
             invalid: Remove all invalid responses (that can't be deserialized with current settings)
+            older_than: Remove responses older than this value, relative to ``response.created_at``
             requests: Remove matching responses, according to current request matching settings
             urls: Remove matching GET requests for the specified URL(s)
         """
@@ -168,12 +167,20 @@ class BaseCache:
         if requests:
             delete_keys += [self.create_key(request) for request in requests]
 
-        for response in self.filter(valid=False, expired=expired, invalid=invalid):
-            if response.cache_key:
-                delete_keys.append(response.cache_key)
+        for response in self.filter(
+            valid=False, expired=expired, invalid=invalid, older_than=older_than
+        ):
+            delete_keys.append(response.cache_key)
 
         logger.debug(f'Deleting up to {len(delete_keys)} responses')
-        self.responses.bulk_delete(delete_keys)
+        # For some backends, we don't want to use bulk_delete if there's only one key
+        if len(delete_keys) == 1:
+            try:
+                del self.responses[delete_keys[0]]
+            except KeyError:
+                pass
+        else:
+            self.responses.bulk_delete(delete_keys)
         self._prune_redirects()
 
     def _prune_redirects(self):
@@ -186,16 +193,18 @@ class BaseCache:
         valid: bool = True,
         expired: bool = True,
         invalid: bool = False,
+        older_than: ExpirationTime = None,
     ) -> Iterator[CachedResponse]:
-        """Get responses from the cache, with optional filters
+        """Get responses from the cache, with optional filters for which responses to include:
 
         Args:
             valid: Include valid and unexpired responses; set to ``False`` to get **only**
                 expired/invalid/old responses
             expired: Include expired responses
             invalid: Include invalid responses (as an empty ``CachedResponse``)
+            older_than: Get responses older than this value, relative to ``response.created_at``
         """
-        if not any([valid, expired, invalid]):
+        if not any([valid, expired, invalid, older_than]):
             return
         for key in self.responses.keys():
             response = self.get_response(key)
@@ -206,28 +215,45 @@ class BaseCache:
                 response.cache_key = key
                 yield response
             elif response is not None and (
-                (valid and not response.is_expired) or (expired and response.is_expired)
+                (valid and not response.is_expired)
+                or (expired and response.is_expired)
+                or (older_than and response.is_older_than(older_than))
             ):
                 yield response
 
+    def recreate_keys(self):
+        """Recreate cache keys for all previously cached responses"""
+        logger.debug('Recreating all cache keys')
+        old_keys = list(self.responses.keys())
+
+        for old_cache_key in old_keys:
+            response = self.responses[old_cache_key]
+            new_cache_key = self.create_key(response.request)
+            if new_cache_key != old_cache_key:
+                self.responses[new_cache_key] = response
+                del self.responses[old_cache_key]
+
     def reset_expiration(self, expire_after: ExpirationTime = None):
-        """Set a new expiration value on existing cache items
+        """Set a new expiration value to set on existing cache items
 
         Args:
             expire_after: New expiration value, **relative to the current time**
         """
-        expires = get_expiration_datetime(expire_after)
-        logger.info(f'Resetting expiration with: {expires}')
+        logger.info(f'Resetting expiration with: {expire_after}')
         for response in self.filter():
-            response.expires = expires
+            response.reset_expiration(expire_after)
             self.responses[response.cache_key] = response
 
-    def update(self, other: 'BaseCache'):
+    def update(self, other: 'BaseCache'):  # type: ignore
         """Update this cache with the contents of another cache"""
         logger.debug(f'Copying {len(other.responses)} responses from {repr(other)} to {repr(self)}')
         self.responses.update(other.responses)
         self.redirects.update(other.redirects)
 
+    def urls(self, **kwargs) -> List[str]:
+        """Get all unique cached URLs. Optionally takes keyword arguments for :py:meth:`.filter`."""
+        return sorted({response.url for response in self.filter(**kwargs)})
+
     def __str__(self):
         return f'<{self.__class__.__name__}(name={self.cache_name})>'
 
@@ -235,6 +261,14 @@ class BaseCache:
         return str(self)
 
     # Deprecated methods
+    #
+    # Note: delete_urls(), has_key(), keys(), values(), and response_count() were added relatively
+    # recently and appear to not be widely used, so these will likely be removed within 1 or 2
+    # minor releases.
+    #
+    # The methods delete_url(), has_url() and remove_expired_responses() have been around for longer
+    # and have appeared in various examples in the docs, so these will likely stick around longer
+    # (or could be kept indefinitely if someone really needs them)
     # --------------------
 
     def delete_url(self, url: str, method: str = 'GET', **kwargs):
@@ -253,10 +287,10 @@ class BaseCache:
 
     def has_key(self, key: str) -> bool:
         warn(
-            'BaseCache.has_key() is deprecated; please use `key in cache.responses` instead',
+            'BaseCache.has_key() is deprecated; please use .contains() instead',
             DeprecationWarning,
         )
-        return key in self.responses
+        return self.contains(key)
 
     def has_url(self, url: str, method: str = 'GET', **kwargs) -> bool:
         warn(
@@ -272,8 +306,10 @@ class BaseCache:
             DeprecationWarning,
         )
         yield from self.redirects.keys()
-        for response in self.filter(expired=not check_expiry):
-            if response.cache_key:
+        if not check_expiry:
+            yield from self.responses.keys()
+        else:
+            for response in self.filter(expired=False):
                 yield response.cache_key
 
     def response_count(self, check_expiry: bool = False) -> int:
@@ -295,48 +331,49 @@ class BaseCache:
         self.delete(expired=True, invalid=True)
 
     def values(self, check_expiry: bool = False) -> Iterator[CachedResponse]:
-        warn('BaseCache.values() is deprecated; please use .filter() instead', DeprecationWarning)
+        warn(
+            'BaseCache.values() is deprecated; '
+            'please use .filter() or BaseCache.responses.values() instead',
+            DeprecationWarning,
+        )
         yield from self.filter(expired=not check_expiry)
 
 
-class BaseStorage(MutableMapping, ABC):
-    """Base class for backend storage implementations. This provides a common dictionary-like
-    interface for the underlying storage operations (create, read, update, delete). One
-    ``BaseStorage`` instance corresponds to a single table/hash/collection, or whatever the
-    backend-specific equivalent may be.
+KT = TypeVar('KT')
+VT = TypeVar('VT')
+
 
-    ``BaseStorage`` subclasses contain no behavior specific to ``requests`` or caching, which are
-    handled by :py:class:`.BaseCache`.
+class BaseStorage(MutableMapping[KT, VT], ABC):
+    """Base class for client-agnostic storage implementations. Notes:
 
-    ``BaseStorage`` also contains a serializer module or instance (defaulting to :py:mod:`pickle`),
-    which determines how :py:class:`.CachedResponse` objects are saved internally. See
-    :ref:`serializers` for details.
+    * This provides a common dictionary-like interface for the underlying storage operations
+      (create, read, update, delete).
+    * One ``BaseStorage`` instance corresponds to a single table/hash/collection, or whatever the
+      backend-specific equivalent may be.
+    * ``BaseStorage`` subclasses contain no behavior specific to ``requests``, which are handled by
+      :py:class:`.BaseCache` subclasses.
+    * ``BaseStorage`` also contains a serializer object (defaulting to :py:mod:`pickle`), which
+      determines how :py:class:`.CachedResponse` objects are saved internally. See :ref:`serializers`
+      for details.
 
     Args:
-        serializer: Custom serializer that provides ``loads`` and ``dumps`` methods
-        kwargs: Additional serializer or backend-specific keyword arguments
+        serializer: Custom serializer that provides ``loads`` and ``dumps`` methods.
+            If not provided, values will be written as-is.
+        decode_content: Decode response body JSON or text into a human-readable format
+        kwargs: Additional backend-specific keyword arguments
     """
 
     def __init__(
-        self,
-        serializer=None,
-        **kwargs,
+        self, serializer: Optional[SerializerType] = None, decode_content: bool = False, **kwargs
     ):
-        self._serializer = init_serializer(serializer, **kwargs)
-        logger.debug(f'Initializing {type(self).__name__} with serializer: {self.serializer}')
-
-    @property
-    def serializer(self):
-        return self._serializer
+        self.serializer = init_serializer(serializer, decode_content)
+        logger.debug(f'Initialized {type(self).__name__} with serializer: {self.serializer}')
 
-    @serializer.setter
-    def serializer(self, value):
-        self._serializer = init_serializer(value)
+    def bulk_delete(self, keys: Iterable[KT]):
+        """Delete multiple keys from the cache, without raising errors for missing keys.
 
-    def bulk_delete(self, keys: Iterable[str]):
-        """Delete multiple keys from the cache, without raising errors for missing keys. This is a
-        naive implementation that subclasses should override with a more efficient backend-specific
-        implementation, if possible.
+        This is a naive, generic implementation that subclasses should override with a more
+        efficient backend-specific implementation, if possible.
         """
         for k in keys:
             try:
@@ -344,6 +381,39 @@ class BaseStorage(MutableMapping, ABC):
             except KeyError:
                 pass
 
+    def close(self):
+        """Close any open backend connections"""
+
+    def serialize(self, value: VT):
+        """Serialize a value, if a serializer is available"""
+        if TYPE_CHECKING:
+            assert hasattr(self.serializer, 'dumps')
+        return self.serializer.dumps(value) if self.serializer else value
+
+    def deserialize(self, key, value: VT):
+        """Deserialize a value, if a serializer is available.
+
+        If deserialization fails (usually due to a value saved in an older requests-cache version),
+        ``None`` will be returned.
+        """
+        if not self.serializer:
+            return value
+        if TYPE_CHECKING:
+            assert hasattr(self.serializer, 'loads')
+
+        try:
+            obj = self.serializer.loads(value)
+            # Set cache key, if it's a response object
+            try:
+                obj.cache_key = key
+            except AttributeError:
+                pass
+            return obj
+        except DESERIALIZE_ERRORS as e:
+            logger.error(f'Unable to deserialize response: {str(e)}')
+            logger.debug(e, exc_info=True)
+            return None
+
     def __str__(self):
         return str(list(self.keys()))
 
@@ -360,13 +430,18 @@ class DictStorage(UserDict, BaseStorage):
 
     def __init__(self, *args, **kwargs):
         super().__init__(*args, **kwargs)
-        self._serializer = None
+        self.serializer = None
 
     def __getitem__(self, key):
-        """An additional step is needed here for response data. Since the original response object
-        is still in memory, its content has already been read and needs to be reset.
+        """An additional step is needed here for response data. The original response object
+        is still in memory, and hasn't gone through a serialize/deserialize loop. So, the file-like
+        response body has already been read, and needs to be reset.
         """
         item = super().__getitem__(key)
         if getattr(item, 'raw', None):
             item.raw.reset()
+        try:
+            item.cache_key = key
+        except AttributeError:
+            pass
         return item
diff --git a/requests_cache/backends/dynamodb.py b/requests_cache/backends/dynamodb.py
index 11d20fc..f5d9838 100644
--- a/requests_cache/backends/dynamodb.py
+++ b/requests_cache/backends/dynamodb.py
@@ -1,183 +1,173 @@
-"""
-.. image::
-    ../_static/dynamodb.png
-
-`DynamoDB <https://aws.amazon.com/dynamodb>`_ is a NoSQL document database hosted on `Amazon Web
-Services <https://aws.amazon.com>`_. In terms of features and use cases, it is roughly comparable to
-MongoDB and other NoSQL databases. It is an especially good fit for serverless applications running
-on `AWS Lambda <https://aws.amazon.com/lambda>`_.
-
-.. warning::
-    DynamoDB binary item sizes are limited to 400KB. If you need to cache larger responses, consider
-    using a different backend.
-
-Creating Tables
-^^^^^^^^^^^^^^^
-Tables will be automatically created if they don't already exist. This is convienient if you just
-want to quickly test out DynamoDB as a cache backend, but in a production environment you will
-likely want to create the tables yourself, for example with `CloudFormation
-<https://aws.amazon.com/cloudformation/>`_ or `Terraform <https://www.terraform.io/>`_. Here are the
-details you'll need:
-
-* Tables: two tables, named ``responses`` and ``redirects``
-* Partition key (aka namespace): ``namespace``
-* Range key (aka sort key): ``key``
-* Attributes: ``namespace`` (string) and ``key`` (string)
-
-Connection Options
-^^^^^^^^^^^^^^^^^^
-The DynamoDB backend accepts any keyword arguments for :py:meth:`boto3.session.Session.resource`:
-
-    >>> backend = DynamoDbCache(region_name='us-west-2')
-    >>> session = CachedSession('http_cache', backend=backend)
-
-API Reference
-^^^^^^^^^^^^^
+"""DynamoDB cache backend. For usage details, see :ref:`Backends: DynamoDB <dynamodb>`.
+
 .. automodsumm:: requests_cache.backends.dynamodb
    :classes-only:
    :nosignatures:
 """
-from typing import Dict, Iterable
+from typing import Iterable, Optional
 
 import boto3
 from boto3.dynamodb.types import Binary
 from boto3.resources.base import ServiceResource
 from botocore.exceptions import ClientError
 
+from requests_cache.backends.base import VT
+
 from .._utils import get_valid_kwargs
-from . import BaseCache, BaseStorage
+from ..serializers import SerializerType, dynamodb_document_serializer
+from . import BaseCache, BaseStorage, DictStorage
 
 
 class DynamoDbCache(BaseCache):
-    """DynamoDB cache backend
+    """DynamoDB cache backend.
+    By default, responses are only partially serialized into a DynamoDB-compatible document format.
 
     Args:
         table_name: DynamoDB table name
-        namespace: Name of DynamoDB hash map
         connection: :boto3:`DynamoDB Resource <services/dynamodb.html#DynamoDB.ServiceResource>`
             object to use instead of creating a new one
+        ttl: Use DynamoDB TTL to automatically remove expired items
         kwargs: Additional keyword arguments for :py:meth:`~boto3.session.Session.resource`
     """
 
     def __init__(
-        self, table_name: str = 'http_cache', connection: ServiceResource = None, **kwargs
+        self,
+        table_name: str = 'http_cache',
+        *,
+        ttl: bool = True,
+        connection: Optional[ServiceResource] = None,
+        decode_content: bool = True,
+        serializer: Optional[SerializerType] = None,
+        **kwargs,
     ):
-        super().__init__(**kwargs)
-        self.responses = DynamoDbDict(table_name, 'responses', connection=connection, **kwargs)
-        self.redirects = DynamoDbDict(
-            table_name, 'redirects', connection=self.responses.connection, **kwargs
+        super().__init__(cache_name=table_name, **kwargs)
+        skwargs = {'serializer': serializer, **kwargs} if serializer else kwargs
+        self.responses = DynamoDbDict(
+            table_name,
+            ttl=ttl,
+            connection=connection,
+            decode_content=decode_content,
+            **skwargs,
         )
+        # Redirects will be only stored in memory and not persisted
+        self.redirects: BaseStorage[str, str] = DictStorage()
 
 
 class DynamoDbDict(BaseStorage):
-    """A dictionary-like interface for DynamoDB key-value store
-
-    **Notes:**
-        * The actual table name on the Dynamodb server will be ``namespace:table_name``
-        * In order to deal with how DynamoDB stores data, all values are serialized.
+    """A dictionary-like interface for DynamoDB table
 
     Args:
         table_name: DynamoDB table name
-        namespace: Name of DynamoDB hash map
         connection: :boto3:`DynamoDB Resource <services/dynamodb.html#DynamoDB.ServiceResource>`
             object to use instead of creating a new one
+        ttl: Use DynamoDB TTL to automatically remove expired items
         kwargs: Additional keyword arguments for :py:meth:`~boto3.session.Session.resource`
     """
 
     def __init__(
         self,
-        table_name,
-        namespace='http_cache',
-        connection=None,
-        read_capacity_units=1,
-        write_capacity_units=1,
+        table_name: str,
+        ttl: bool = True,
+        connection: Optional[ServiceResource] = None,
+        serializer: Optional[SerializerType] = dynamodb_document_serializer,
         **kwargs,
     ):
-        super().__init__(**kwargs)
+        super().__init__(serializer=serializer, **kwargs)
         connection_kwargs = get_valid_kwargs(
             boto3.Session.__init__, kwargs, extras=['endpoint_url']
         )
         self.connection = connection or boto3.resource('dynamodb', **connection_kwargs)
-        self.namespace = namespace
+        self.table_name = table_name
+        self.ttl = ttl
+
+        self._table = self.connection.Table(self.table_name)
+        self._create_table()
+        if ttl:
+            self._enable_ttl()
 
+    def _create_table(self):
+        """Create a default table if one does not already exist"""
         try:
             self.connection.create_table(
                 AttributeDefinitions=[
-                    {
-                        'AttributeName': 'namespace',
-                        'AttributeType': 'S',
-                    },
-                    {
-                        'AttributeName': 'key',
-                        'AttributeType': 'S',
-                    },
+                    {'AttributeName': 'key', 'AttributeType': 'S'},
                 ],
-                TableName=table_name,
+                TableName=self.table_name,
                 KeySchema=[
-                    {'AttributeName': 'namespace', 'KeyType': 'HASH'},
-                    {'AttributeName': 'key', 'KeyType': 'RANGE'},
+                    {'AttributeName': 'key', 'KeyType': 'HASH'},
                 ],
-                ProvisionedThroughput={
-                    'ReadCapacityUnits': read_capacity_units,
-                    'WriteCapacityUnits': write_capacity_units,
-                },
+                BillingMode='PAY_PER_REQUEST',
             )
-        except ClientError:
-            pass
-        self._table = self.connection.Table(table_name)
-        self._table.wait_until_exists()
-
-    def composite_key(self, key: str) -> Dict[str, str]:
-        return {'namespace': self.namespace, 'key': str(key)}
-
-    def _scan(self):
-        expression_attribute_values = {':Namespace': self.namespace}
-        expression_attribute_names = {'#N': 'namespace'}
-        key_condition_expression = '#N = :Namespace'
-        return self._table.query(
-            ExpressionAttributeValues=expression_attribute_values,
-            ExpressionAttributeNames=expression_attribute_names,
-            KeyConditionExpression=key_condition_expression,
-        )
+            self._table.wait_until_exists()
+        # Ignore error if table already exists
+        except ClientError as e:
+            if e.response['Error']['Code'] != 'ResourceInUseException':
+                raise
+
+    def _enable_ttl(self):
+        """Enable TTL, if not already enabled"""
+        try:
+            self.connection.meta.client.update_time_to_live(
+                TableName=self.table_name,
+                TimeToLiveSpecification={'AttributeName': 'ttl', 'Enabled': True},
+            )
+        # Ignore error if TTL is already enabled
+        except ClientError as e:
+            if e.response['Error']['Code'] != 'ValidationException':
+                raise
 
     def __getitem__(self, key):
-        result = self._table.get_item(Key=self.composite_key(key))
+        result = self._table.get_item(Key={'key': key})
         if 'Item' not in result:
             raise KeyError
-
-        # Depending on the serializer, the value may be either a string or Binary object
-        raw_value = result['Item']['value']
-        return self.serializer.loads(
-            raw_value.value if isinstance(raw_value, Binary) else raw_value
-        )
+        return self.deserialize(key, result['Item']['value'])
 
     def __setitem__(self, key, value):
-        item = {**self.composite_key(key), 'value': self.serializer.dumps(value)}
+        item = {'key': key, 'value': self.serialize(value)}
+
+        # If enabled, set TTL value as a timestamp in unix format
+        if self.ttl and getattr(value, 'expires_unix', None):
+            item['ttl'] = value.expires_unix
+
         self._table.put_item(Item=item)
 
     def __delitem__(self, key):
-        response = self._table.delete_item(Key=self.composite_key(key), ReturnValues='ALL_OLD')
+        response = self._table.delete_item(Key={'key': key}, ReturnValues='ALL_OLD')
         if 'Attributes' not in response:
             raise KeyError
 
     def __iter__(self):
-        response = self._scan()
-        for item in response['Items']:
+        # Alias 'key' attribute since it's a reserved keyword
+        results = self._table.scan(
+            ProjectionExpression='#k',
+            ExpressionAttributeNames={'#k': 'key'},
+        )
+        for item in results['Items']:
             yield item['key']
 
     def __len__(self):
-        return self._table.query(
-            Select='COUNT',
-            ExpressionAttributeNames={'#N': 'namespace'},
-            ExpressionAttributeValues={':Namespace': self.namespace},
-            KeyConditionExpression='#N = :Namespace',
-        )['Count']
+        """Get the number of items in the table.
+
+        **Note:** This is an estimate, and is updated every 6 hours. A full table scan will use up
+        your provisioned throughput, so it's not recommended.
+        """
+        return self._table.item_count
 
     def bulk_delete(self, keys: Iterable[str]):
         """Delete multiple keys from the cache. Does not raise errors for missing keys."""
         with self._table.batch_writer() as batch:
             for key in keys:
-                batch.delete_item(Key=self.composite_key(key))
+                batch.delete_item(Key={'key': key})
 
     def clear(self):
         self.bulk_delete((k for k in self))
+
+    def deserialize(self, key, value: VT):
+        """Handle Binary objects from a custom serializer"""
+        serialized_value = value.value if isinstance(value, Binary) else value
+        return super().deserialize(key, serialized_value)
+
+    # TODO: Support pagination
+    def values(self):
+        for item in self._table.scan()['Items']:
+            yield self.deserialize(item['key'], item['value'])
diff --git a/requests_cache/backends/filesystem.py b/requests_cache/backends/filesystem.py
index 72e2d9c..ccda6c8 100644
--- a/requests_cache/backends/filesystem.py
+++ b/requests_cache/backends/filesystem.py
@@ -1,36 +1,5 @@
-"""
-.. image::
-    ../_static/files-generic.png
-
-This backend stores responses in files on the local filesystem (one file per response).
-
-File Formats
-^^^^^^^^^^^^
-By default, responses are saved as pickle files, since this format is generally the fastest. If you
-want to save responses in a human-readable format, you can use one of the other available
-:ref:`serializers`. For example, to save responses as JSON files:
-
-    >>> session = CachedSession('~/http_cache', backend='filesystem', serializer='json')
-    >>> session.get('https://httpbin.org/get')
-    >>> print(list(session.cache.paths()))
-    ['/home/user/http_cache/4dc151d95200ec.json']
-
-Or as YAML (requires ``pyyaml``):
-
-    >>> session = CachedSession('~/http_cache', backend='filesystem', serializer='yaml')
-    >>> session.get('https://httpbin.org/get')
-    >>> print(list(session.cache.paths()))
-    ['/home/user/http_cache/4dc151d95200ec.yaml']
-
-Cache Files
-^^^^^^^^^^^
-* See :ref:`files` for general info on specifying cache paths
-* The path for a given response will be in the format ``<cache_name>/<cache_key>``
-* Redirects are stored in a separate SQLite database, located at ``<cache_name>/redirects.sqlite``
-* Use :py:meth:`.FileCache.paths` to get a list of all cached response paths
-
-API Reference
-^^^^^^^^^^^^^
+"""Filesystem cache backend. For usage details, see :ref:`Backends: Filesystem <filesystem>`.
+
 .. automodsumm:: requests_cache.backends.filesystem
    :classes-only:
    :nosignatures:
@@ -41,30 +10,41 @@ from pathlib import Path
 from pickle import PickleError
 from shutil import rmtree
 from threading import RLock
-from typing import Iterator
+from typing import Iterator, Optional
 
-from ..serializers import SERIALIZERS
+from ..serializers import SERIALIZERS, SerializerType, json_serializer
 from . import BaseCache, BaseStorage
 from .sqlite import AnyPath, SQLiteDict, get_cache_path
 
 
 class FileCache(BaseCache):
-    """Filesystem backend.
+    """Filesystem cache backend.
 
     Args:
         cache_name: Base directory for cache files
         use_cache_dir: Store datebase in a user cache directory (e.g., `~/.cache/`)
         use_temp: Store cache files in a temp directory (e.g., ``/tmp/http_cache/``).
             Note: if ``cache_name`` is an absolute path, this option will be ignored.
+        decode_content: Decode JSON or text response body into a human-readable format
         extension: Extension for cache files. If not specified, the serializer default extension
             will be used.
     """
 
-    def __init__(self, cache_name: AnyPath = 'http_cache', use_temp: bool = False, **kwargs):
-        super().__init__(**kwargs)
-        self.responses: FileDict = FileDict(cache_name, use_temp=use_temp, **kwargs)
+    def __init__(
+        self,
+        cache_name: AnyPath = 'http_cache',
+        use_temp: bool = False,
+        decode_content: bool = True,
+        serializer: Optional[SerializerType] = None,
+        **kwargs,
+    ):
+        super().__init__(cache_name=str(cache_name), **kwargs)
+        skwargs = {'serializer': serializer, **kwargs} if serializer else kwargs
+        self.responses: FileDict = FileDict(
+            cache_name, use_temp=use_temp, decode_content=decode_content, **skwargs
+        )
         self.redirects: SQLiteDict = SQLiteDict(
-            self.cache_dir / 'redirects.sqlite', 'redirects', **kwargs
+            self.cache_dir / 'redirects.sqlite', 'redirects', no_serializer=True, **kwargs
         )
 
     @property
@@ -82,9 +62,9 @@ class FileCache(BaseCache):
         self.responses.clear()
         self.redirects.init_db()
 
-    def remove_expired_responses(self, *args, **kwargs):
+    def delete(self, *args, **kwargs):
         with self.responses._lock:
-            return super().remove_expired_responses(*args, **kwargs)
+            return super().delete(*args, **kwargs)
 
 
 class FileDict(BaseStorage):
@@ -95,10 +75,11 @@ class FileDict(BaseStorage):
         cache_name: AnyPath,
         use_temp: bool = False,
         use_cache_dir: bool = False,
-        extension: str = None,
+        extension: Optional[str] = None,
+        serializer: Optional[SerializerType] = json_serializer,
         **kwargs,
     ):
-        super().__init__(**kwargs)
+        super().__init__(serializer=serializer, **kwargs)
         self.cache_dir = get_cache_path(cache_name, use_cache_dir=use_cache_dir, use_temp=use_temp)
         self.extension = _get_extension(extension, self.serializer)
         self.is_binary = getattr(self.serializer, 'is_binary', False)
@@ -122,7 +103,7 @@ class FileDict(BaseStorage):
         mode = 'rb' if self.is_binary else 'r'
         with self._try_io():
             with self._path(key).open(mode) as f:
-                return self.serializer.loads(f.read())
+                return self.deserialize(key, f.read())
 
     def __delitem__(self, key):
         with self._try_io():
@@ -131,7 +112,7 @@ class FileDict(BaseStorage):
     def __setitem__(self, key, value):
         with self._try_io():
             with self._path(key).open(mode='wb' if self.is_binary else 'w') as f:
-                f.write(self.serializer.dumps(value))
+                f.write(self.serialize(value))
 
     def __iter__(self):
         yield from self.keys()
@@ -153,7 +134,7 @@ class FileDict(BaseStorage):
             return self.cache_dir.glob(f'*{self.extension}')
 
 
-def _get_extension(extension: str = None, serializer=None) -> str:
+def _get_extension(extension: Optional[str] = None, serializer=None) -> str:
     """Use either the provided file extension, or get the serializer's default extension"""
     if extension:
         return f'.{extension}'
diff --git a/requests_cache/backends/gridfs.py b/requests_cache/backends/gridfs.py
index c9599b7..aadb7e5 100644
--- a/requests_cache/backends/gridfs.py
+++ b/requests_cache/backends/gridfs.py
@@ -1,24 +1,19 @@
-"""
-.. image::
-    ../_static/mongodb.png
-
-`GridFS <https://docs.mongodb.com/manual/core/gridfs/>`_ is a specification for storing large files
-(>16 MB) in MongoDB. See :py:mod:`~requests_cache.backends.mongodb` for more general info on MongoDB.
+"""GridFS cache backend. For usage details, see :ref:`Backends: GridFS <gridfs>` and :ref:`Backends: MongoDB <mongodb>`.
 
-API Reference
-^^^^^^^^^^^^^
 .. automodsumm:: requests_cache.backends.gridfs
    :classes-only:
    :nosignatures:
 """
 from logging import getLogger
 from threading import RLock
+from typing import Optional
 
 from gridfs import GridFS
 from gridfs.errors import CorruptGridFile, FileExists
 from pymongo import MongoClient
 
 from .._utils import get_valid_kwargs
+from ..serializers import SerializerType, pickle_serializer
 from .base import BaseCache, BaseStorage
 from .mongodb import MongoDict
 
@@ -28,29 +23,37 @@ logger = getLogger(__name__)
 class GridFSCache(BaseCache):
     """GridFS cache backend.
 
-    Example:
-
-        >>> session = CachedSession('http_cache', backend='gridfs')
-
     Args:
         db_name: Database name
         connection: :py:class:`pymongo.MongoClient` object to reuse instead of creating a new one
         kwargs: Additional keyword arguments for :py:class:`pymongo.MongoClient`
     """
 
-    def __init__(self, db_name: str, **kwargs):
-        super().__init__(**kwargs)
-        self.responses = GridFSPickleDict(db_name, **kwargs)
+    def __init__(
+        self,
+        db_name: str,
+        decode_content: bool = False,
+        serializer: Optional[SerializerType] = None,
+        **kwargs
+    ):
+        super().__init__(cache_name=db_name, **kwargs)
+        skwargs = {'serializer': serializer, **kwargs} if serializer else kwargs
+
+        self.responses = GridFSDict(db_name, decode_content=decode_content, **skwargs)
         self.redirects = MongoDict(
-            db_name, collection_name='redirects', connection=self.responses.connection, **kwargs
+            db_name,
+            collection_name='redirects',
+            connection=self.responses.connection,
+            serializer=None,
+            **kwargs
         )
 
-    def remove_expired_responses(self, *args, **kwargs):
+    def delete(self, *args, **kwargs):
         with self.responses._lock:
-            return super().remove_expired_responses(*args, **kwargs)
+            return super().delete(*args, **kwargs)
 
 
-class GridFSPickleDict(BaseStorage):
+class GridFSDict(BaseStorage):
     """A dictionary-like interface for a GridFS database
 
     Args:
@@ -60,9 +63,16 @@ class GridFSPickleDict(BaseStorage):
         kwargs: Additional keyword arguments for :py:class:`pymongo.MongoClient`
     """
 
-    def __init__(self, db_name, collection_name=None, connection=None, **kwargs):
-        super().__init__(**kwargs)
-        connection_kwargs = get_valid_kwargs(MongoClient, kwargs)
+    def __init__(
+        self,
+        db_name,
+        collection_name=None,
+        connection=None,
+        serializer: Optional[SerializerType] = pickle_serializer,
+        **kwargs
+    ):
+        super().__init__(serializer=serializer, **kwargs)
+        connection_kwargs = get_valid_kwargs(MongoClient.__init__, kwargs)
         self.connection = connection or MongoClient(**connection_kwargs)
         self.db = self.connection[db_name]
         self.fs = GridFS(self.db)
@@ -74,13 +84,13 @@ class GridFSPickleDict(BaseStorage):
                 result = self.fs.find_one({'_id': key})
                 if result is None:
                     raise KeyError
-                return self.serializer.loads(result.read())
+                return self.deserialize(key, result.read())
         except CorruptGridFile as e:
             logger.warning(e, exc_info=True)
             raise KeyError
 
     def __setitem__(self, key, item):
-        value = self.serializer.dumps(item)
+        value = self.serialize(item)
         encoding = None if isinstance(value, bytes) else 'utf-8'
 
         with self._lock:
diff --git a/requests_cache/backends/mongodb.py b/requests_cache/backends/mongodb.py
index 730b12a..76b33be 100644
--- a/requests_cache/backends/mongodb.py
+++ b/requests_cache/backends/mongodb.py
@@ -1,34 +1,27 @@
-"""
-.. image::
-    ../_static/mongodb.png
-
-`MongoDB <https://www.mongodb.com>`_ is a NoSQL document database. It stores data in collections
-of documents, which are more flexible and less strictly structured than tables in a relational
-database.
-
-Connection Options
-^^^^^^^^^^^^^^^^^^
-The MongoDB backend accepts any keyword arguments for :py:class:`pymongo.mongo_client.MongoClient`:
-
-    >>> backend = MongoCache(host='192.168.1.63', port=27017)
-    >>> session = CachedSession('http_cache', backend=backend)
+"""MongoDB cache backend. For usage details, see :ref:`Backends: MongoDB <mongodb>`.
 
-API Reference
-^^^^^^^^^^^^^
 .. automodsumm:: requests_cache.backends.mongodb
    :classes-only:
    :nosignatures:
 """
-from typing import Iterable
+from datetime import timedelta
+from logging import getLogger
+from typing import Iterable, Mapping, Optional, Union
 
 from pymongo import MongoClient
+from pymongo.errors import OperationFailure
 
 from .._utils import get_valid_kwargs
+from ..policy.expiration import NEVER_EXPIRE, get_expiration_seconds
+from ..serializers import SerializerType, bson_document_serializer
 from . import BaseCache, BaseStorage
 
+logger = getLogger(__name__)
+
 
 class MongoCache(BaseCache):
-    """MongoDB cache backend
+    """MongoDB cache backend.
+    By default, responses are only partially serialized into a MongoDB-compatible document format.
 
     Args:
         db_name: Database name
@@ -36,16 +29,45 @@ class MongoCache(BaseCache):
         kwargs: Additional keyword arguments for :py:class:`pymongo.mongo_client.MongoClient`
     """
 
-    def __init__(self, db_name: str = 'http_cache', connection: MongoClient = None, **kwargs):
-        super().__init__(**kwargs)
-        self.responses = MongoPickleDict(db_name, 'responses', connection=connection, **kwargs)
-        self.redirects = MongoDict(
+    def __init__(
+        self,
+        db_name: str = 'http_cache',
+        connection: MongoClient = None,
+        decode_content: bool = True,
+        serializer: Optional[SerializerType] = None,
+        **kwargs,
+    ):
+        super().__init__(cache_name=db_name, **kwargs)
+        skwargs = {'serializer': serializer, **kwargs} if serializer else kwargs
+        self.responses: MongoDict = MongoDict(
+            db_name,
+            collection_name='responses',
+            connection=connection,
+            decode_content=decode_content,
+            **skwargs,
+        )
+        self.redirects: MongoDict = MongoDict(
             db_name,
             collection_name='redirects',
             connection=self.responses.connection,
+            serializer=None,
             **kwargs,
         )
 
+    def get_ttl(self) -> Optional[int]:
+        """Get the currently defined TTL value in seconds, if any"""
+        return self.responses.get_ttl()
+
+    def set_ttl(self, ttl: Union[int, timedelta], overwrite: bool = False):
+        """Create or update a TTL index. Notes:
+
+        * This will have no effect if TTL is already set
+        * To overwrite an existing TTL index, use ``overwrite=True``
+        * This may take some time to complete, depending on the size of your cache
+        * Use ``ttl=None, overwrite=True`` to remove the TTL index
+        """
+        self.responses.set_ttl(ttl, overwrite=overwrite)
+
 
 class MongoDict(BaseStorage):
     """A dictionary-like interface for a MongoDB collection
@@ -57,21 +79,57 @@ class MongoDict(BaseStorage):
         kwargs: Additional keyword arguments for :py:class:`pymongo.MongoClient`
     """
 
-    def __init__(self, db_name, collection_name='http_cache', connection=None, **kwargs):
-        super().__init__(**kwargs)
+    def __init__(
+        self,
+        db_name: str,
+        collection_name: str = 'http_cache',
+        connection: Optional[MongoClient] = None,
+        serializer: Optional[SerializerType] = bson_document_serializer,
+        **kwargs,
+    ):
+        super().__init__(serializer=serializer, **kwargs)
         connection_kwargs = get_valid_kwargs(MongoClient.__init__, kwargs)
         self.connection = connection or MongoClient(**connection_kwargs)
         self.collection = self.connection[db_name][collection_name]
 
+    def get_ttl(self) -> Optional[int]:
+        """Get the currently defined TTL value in seconds, if any"""
+        idx_info = self.collection.index_information().get('ttl_idx', {})
+        return idx_info.get('expireAfterSeconds')
+
+    def set_ttl(self, ttl: Union[int, timedelta], overwrite: bool = False):
+        """Create or update a TTL index, and ignore and log any errors due to dropping a nonexistent
+        index or attempting to overwrite without ```overwrite=True``.
+        """
+        try:
+            self._set_ttl(get_expiration_seconds(ttl), overwrite=overwrite)
+        except OperationFailure:
+            logger.warning('Failed to update TTL index', exc_info=True)
+
+    def _set_ttl(self, ttl: int, overwrite: bool = False):
+        if overwrite:
+            self.collection.drop_index('ttl_idx')
+            logger.info('Dropped TTL index')
+
+        if ttl and ttl != NEVER_EXPIRE:
+            logger.info(f'Creating TTL index for {ttl} seconds')
+            self.collection.create_index('created_at', name='ttl_idx', expireAfterSeconds=ttl)
+
     def __getitem__(self, key):
         result = self.collection.find_one({'_id': key})
         if result is None:
             raise KeyError
-        return result['data']
-
-    def __setitem__(self, key, item):
-        doc = {'_id': key, 'data': item}
-        self.collection.replace_one({'_id': key}, doc, upsert=True)
+        value = result['data'] if 'data' in result else result
+        return self.deserialize(key, value)
+
+    def __setitem__(self, key, value):
+        """If ``value`` is already a dict, its values will be stored under top-level keys.
+        Otherwise, it will be stored under a 'data' key.
+        """
+        value = self.serialize(value)
+        if not isinstance(value, Mapping):
+            value = {'data': value}
+        self.collection.replace_one({'_id': key}, value, upsert=True)
 
     def __delitem__(self, key):
         result = self.collection.find_one_and_delete({'_id': key}, {'_id': True})
@@ -92,12 +150,5 @@ class MongoDict(BaseStorage):
     def clear(self):
         self.collection.drop()
 
-
-class MongoPickleDict(MongoDict):
-    """Same as :class:`MongoDict`, but pickles values before saving"""
-
-    def __setitem__(self, key, item):
-        super().__setitem__(key, self.serializer.dumps(item))
-
-    def __getitem__(self, key):
-        return self.serializer.loads(super().__getitem__(key))
+    def close(self):
+        self.connection.close()
diff --git a/requests_cache/backends/redis.py b/requests_cache/backends/redis.py
index 32a1d67..59b2920 100644
--- a/requests_cache/backends/redis.py
+++ b/requests_cache/backends/redis.py
@@ -1,57 +1,50 @@
-"""
-.. image::
-    ../_static/redis.png
-
-`Redis <https://redis.io>`_ is an in-memory data store with on-disk persistence. It offers a
-high-performace cache that scales exceptionally well, making it an ideal choice for larger
-applications.
-
-Persistence
-^^^^^^^^^^^
-Redis operates on data in memory, and by default also persists data to snapshots on disk. This is
-optimized for performance, with a minor risk of data loss, and is usually the best configuration
-for a cache. If you need different behavior, the frequency and type of persistence can be customized
-or disabled entirely. See `Redis Persistence <https://redis.io/topics/persistence>`_ for details.
+"""Redis cache backend. For usage details, see :ref:`Backends: Redis <redis>`.
 
-Connection Options
-^^^^^^^^^^^^^^^^^^
-The Redis backend accepts any keyword arguments for :py:class:`redis.client.Redis`:
-
-    >>> backend = RedisCache(host='192.168.1.63', port=6379)
-    >>> session = CachedSession('http_cache', backend=backend)
-
-API Reference
-^^^^^^^^^^^^^
 .. automodsumm:: requests_cache.backends.redis
    :classes-only:
    :nosignatures:
 """
 from logging import getLogger
-from typing import Iterable
+from typing import Iterable, Optional
 
 from redis import Redis, StrictRedis
 
 from .._utils import get_valid_kwargs
 from ..cache_keys import decode, encode
+from ..serializers import SerializerType, pickle_serializer, utf8_encoder
 from . import BaseCache, BaseStorage
 
+DEFAULT_TTL_OFFSET = 3600
 logger = getLogger(__name__)
 
 
 class RedisCache(BaseCache):
-    """Redis cache backend
+    """Redis cache backend.
 
     Args:
         namespace: Redis namespace
         connection: Redis connection instance to use instead of creating a new one
+        ttl: Use Redis TTL to automatically delete expired items
+        ttl_offset: Additional time to wait before deleting expired items, in seconds
         kwargs: Additional keyword arguments for :py:class:`redis.client.Redis`
     """
 
-    def __init__(self, namespace='http_cache', connection: Redis = None, **kwargs):
-        super().__init__(**kwargs)
-        self.responses = RedisDict(namespace, connection=connection, **kwargs)
+    def __init__(
+        self,
+        namespace='http_cache',
+        connection: Optional[Redis] = None,
+        serializer: Optional[SerializerType] = None,
+        ttl: bool = True,
+        ttl_offset: int = DEFAULT_TTL_OFFSET,
+        **kwargs,
+    ):
+        super().__init__(cache_name=namespace, **kwargs)
+        skwargs = {'serializer': serializer, **kwargs} if serializer else kwargs
+        self.responses = RedisDict(
+            namespace, connection=connection, ttl=ttl, ttl_offset=ttl_offset, **skwargs
+        )
         self.redirects = RedisHashDict(
-            namespace, 'redirects', connection=self.responses.connection, **kwargs
+            namespace, collection_name='redirects', connection=self.responses.connection, **kwargs
         )
 
 
@@ -63,11 +56,22 @@ class RedisDict(BaseStorage):
         * Supports TTL
     """
 
-    def __init__(self, namespace: str, collection_name: str = None, connection=None, **kwargs):
-        super().__init__(**kwargs)
+    def __init__(
+        self,
+        namespace: str,
+        collection_name: Optional[str] = None,
+        connection=None,
+        serializer: Optional[SerializerType] = pickle_serializer,
+        ttl: bool = True,
+        ttl_offset: int = DEFAULT_TTL_OFFSET,
+        **kwargs,
+    ):
+        super().__init__(serializer=serializer, **kwargs)
         connection_kwargs = get_valid_kwargs(Redis.__init__, kwargs)
         self.connection = connection or StrictRedis(**connection_kwargs)
         self.namespace = namespace
+        self.ttl = ttl
+        self.ttl_offset = ttl_offset
 
     def _bkey(self, key: str) -> bytes:
         """Get a full hash key as bytes"""
@@ -83,14 +87,16 @@ class RedisDict(BaseStorage):
         result = self.connection.get(self._bkey(key))
         if result is None:
             raise KeyError
-        return self.serializer.loads(result)
+        return self.deserialize(key, result)
 
     def __setitem__(self, key, item):
         """Save an item to the cache, optionally with TTL"""
-        if getattr(item, 'ttl', None):
-            self.connection.setex(self._bkey(key), item.ttl, self.serializer.dumps(item))
+        expires_delta = getattr(item, 'expires_delta', None)
+        ttl_seconds = (expires_delta or 0) + self.ttl_offset
+        if self.ttl and ttl_seconds > 0:
+            self.connection.setex(self._bkey(key), ttl_seconds, self.serialize(item))
         else:
-            self.connection.set(self._bkey(key), self.serializer.dumps(item))
+            self.connection.set(self._bkey(key), self.serialize(item))
 
     def __delitem__(self, key):
         if not self.connection.delete(self._bkey(key)):
@@ -110,6 +116,9 @@ class RedisDict(BaseStorage):
     def clear(self):
         self.bulk_delete(self.keys())
 
+    def close(self):
+        self.connection.close()
+
     def keys(self):
         return [
             decode(key).replace(f'{self.namespace}:', '')
@@ -120,21 +129,27 @@ class RedisDict(BaseStorage):
         return [(k, self[k]) for k in self.keys()]
 
     def values(self):
-        return [self.serializer.loads(v) for v in self.connection.mget(*self._bkeys(self.keys()))]
+        for _, v in self.items():
+            yield v
 
 
 class RedisHashDict(BaseStorage):
     """A dictionary-like interface for operations on a single Redis hash
 
     **Notes:**
-        * All keys will be encoded as bytes, and all values will be serialized
+        * All keys will be encoded as bytes
         * Items will be stored in a hash named ``namespace:collection_name``
     """
 
     def __init__(
-        self, namespace: str = 'http_cache', collection_name: str = None, connection=None, **kwargs
+        self,
+        namespace: str = 'http_cache',
+        collection_name: Optional[str] = None,
+        connection=None,
+        serializer: Optional[SerializerType] = utf8_encoder,
+        **kwargs,
     ):
-        super().__init__(**kwargs)
+        super().__init__(serializer=serializer, **kwargs)
         connection_kwargs = get_valid_kwargs(Redis, kwargs)
         self.connection = connection or StrictRedis(**connection_kwargs)
         self._hash_key = f'{namespace}-{collection_name}'
@@ -146,10 +161,10 @@ class RedisHashDict(BaseStorage):
         result = self.connection.hget(self._hash_key, encode(key))
         if result is None:
             raise KeyError
-        return self.serializer.loads(result)
+        return self.deserialize(key, result)
 
     def __setitem__(self, key, item):
-        self.connection.hset(self._hash_key, encode(key), self.serializer.dumps(item))
+        self.connection.hset(self._hash_key, encode(key), self.serialize(item))
 
     def __delitem__(self, key):
         if not self.connection.hdel(self._hash_key, encode(key)):
@@ -175,10 +190,11 @@ class RedisHashDict(BaseStorage):
     def items(self):
         """Get all ``(key, value)`` pairs in the hash"""
         return [
-            (decode(k), self.serializer.loads(v))
+            (decode(k), self.deserialize(decode(k), v))
             for k, v in self.connection.hgetall(self._hash_key).items()
         ]
 
     def values(self):
         """Get all values in the hash"""
-        return [self.serializer.loads(v) for v in self.connection.hvals(self._hash_key)]
+        for _, v in self.items():
+            yield v
diff --git a/requests_cache/backends/sqlite.py b/requests_cache/backends/sqlite.py
index 598f37f..72b62da 100644
--- a/requests_cache/backends/sqlite.py
+++ b/requests_cache/backends/sqlite.py
@@ -1,61 +1,5 @@
-"""
-.. image::
-    ../_static/sqlite.png
-
-`SQLite <https://www.sqlite.org/>`_ is a fast and lightweight SQL database engine that stores data
-either in memory or in a single file on disk.
-
-Despite its simplicity, SQLite is a powerful tool. For example, it's the primary storage system for
-a number of common applications including Dropbox, Firefox, and Chrome. It's well suited for
-caching, and requires no extra configuration or dependencies, which is why it's the default backend
-for requests-cache.
-
-Cache Files
-^^^^^^^^^^^
-* See :ref:`files` for general info on specifying cache paths
-* If you specify a name without an extension, the default extension ``.sqlite`` will be used
-
-In-Memory Caching
-~~~~~~~~~~~~~~~~~
-SQLite also supports `in-memory databases <https://www.sqlite.org/inmemorydb.html>`_.
-You can enable this (in "shared" memory mode) with the ``use_memory`` option:
-
-    >>> session = CachedSession('http_cache', use_memory=True)
-
-Or specify a memory URI with additional options:
-
-    >>> session = CachedSession(':file:memdb1?mode=memory')
-
-Or just ``:memory:``, if you are only using the cache from a single thread:
+"""SQLite cache backend. For usage details, see :ref:`Backends: SQLite <sqlite>`.
 
-    >>> session = CachedSession(':memory:')
-
-Performance
-^^^^^^^^^^^
-When working with average-sized HTTP responses (< 1MB) and using a modern SSD for file storage, you
-can expect speeds of around:
-
-* Write: 2-8ms
-* Read: 0.2-0.6ms
-
-Of course, this will vary based on hardware specs, response size, and other factors.
-
-Concurrency
-^^^^^^^^^^^
-SQLite supports concurrent access, so it is safe to use from a multi-threaded and/or multi-process
-application. It supports unlimited concurrent reads. Writes, however, are queued and run in serial,
-so if you need to make large volumes of concurrent requests, you may want to consider a different
-backend that's specifically made for that kind of workload, like :py:class:`.RedisCache`.
-
-Connection Options
-^^^^^^^^^^^^^^^^^^
-The SQLite backend accepts any keyword arguments for :py:func:`sqlite3.connect`:
-
-    >>> backend = SQLiteCache('http_cache', timeout=30)
-    >>> session = CachedSession(backend=backend)
-
-API Reference
-^^^^^^^^^^^^^
 .. automodsumm:: requests_cache.backends.sqlite
    :classes-only:
    :nosignatures:
@@ -65,15 +9,20 @@ import threading
 from contextlib import contextmanager
 from logging import getLogger
 from os import unlink
-from os.path import isfile
+from os.path import getsize, isfile
 from pathlib import Path
 from tempfile import gettempdir
-from typing import Collection, Iterable, Iterator, List, Tuple, Type, Union
+from time import time
+from typing import Collection, Iterator, List, Optional, Tuple, Type, Union
 
-from appdirs import user_cache_dir
+from platformdirs import user_cache_dir
 
 from .._utils import chunkify, get_valid_kwargs
+from ..models.response import CachedResponse
+from ..policy import ExpirationTime
+from ..serializers import SerializerType, pickle_serializer
 from . import BaseCache, BaseStorage
+from .base import VT
 
 MEMORY_URI = 'file::memory:?cache=shared'
 SQLITE_MAX_VARIABLE_NUMBER = 999
@@ -90,32 +39,33 @@ class SQLiteCache(BaseCache):
         use_temp: Store database in a temp directory (e.g., ``/tmp/http_cache.sqlite``)
         use_memory: Store database in memory instead of in a file
         fast_save: Significantly increases cache write performance, but with the possibility of data
-            loss. See `pragma: synchronous <http://www.sqlite.org/pragma.html#pragma_synchronous>`_
+            loss. See `pragma: synchronous <https://www.sqlite.org/pragma.html#pragma_synchronous>`_
             for details.
+        wal: Use `Write Ahead Logging <https://sqlite.org/wal.html>`_, so readers do not block writers.
         kwargs: Additional keyword arguments for :py:func:`sqlite3.connect`
     """
 
-    def __init__(self, db_path: AnyPath = 'http_cache', **kwargs):
-        super().__init__(**kwargs)
-        self.responses: SQLiteDict = SQLitePickleDict(db_path, table_name='responses', **kwargs)
-        self.redirects = SQLiteDict(db_path, table_name='redirects', **kwargs)
+    def __init__(
+        self,
+        db_path: AnyPath = 'http_cache',
+        serializer: Optional[SerializerType] = None,
+        **kwargs,
+    ):
+        super().__init__(cache_name=str(db_path), **kwargs)
+        # Only override serializer if a non-None value is specified
+        skwargs = {'serializer': serializer, **kwargs} if serializer else kwargs
+        self.responses: SQLiteDict = SQLiteDict(db_path, table_name='responses', **skwargs)
+        self.redirects: SQLiteDict = SQLiteDict(
+            db_path, table_name='redirects', serializer=None, **kwargs
+        )
 
     @property
     def db_path(self) -> AnyPath:
         return self.responses.db_path
 
-    def bulk_delete(self, keys):
-        """Remove multiple responses and their associated redirects from the cache, with additional cleanup"""
-        self.responses.bulk_delete(keys=keys)
-        self.responses.vacuum()
-
-        self.redirects.bulk_delete(keys=keys)
-        self.redirects.bulk_delete(values=keys)
-        self.redirects.vacuum()
-
     def clear(self):
-        """Clear the cache. If this fails due to a corrupted cache or other I/O error, this will
-        attempt to delete the cache file and re-initialize.
+        """Delete all items from the cache. If this fails due to a corrupted cache or other I/O
+        error, this will  attempt to delete the cache file and re-initialize.
         """
         try:
             super().clear()
@@ -126,9 +76,92 @@ class SQLiteCache(BaseCache):
             self.responses.init_db()
             self.redirects.init_db()
 
-    def remove_expired_responses(self, *args, **kwargs):
-        with self.responses._lock, self.redirects._lock:
-            return super().remove_expired_responses(*args, **kwargs)
+    # A more efficient SQLite implementation of :py:meth:`BaseCache.delete`
+    def delete(
+        self,
+        *keys: str,
+        expired: bool = False,
+        **kwargs,
+    ):
+        if keys:
+            self.responses.bulk_delete(keys)
+        if expired:
+            self._delete_expired()
+
+        # For any remaining conditions, use base implementation
+        if kwargs:
+            with self.responses._lock, self.redirects._lock:
+                return super().delete(**kwargs)
+        else:
+            self._prune_redirects()
+
+        self.responses.vacuum()
+        self.redirects.vacuum()
+
+    def _delete_expired(self):
+        """A more efficient implementation of deleting expired responses in SQL"""
+        with self.responses.connection(commit=True) as con:
+            con.execute(
+                f'DELETE FROM {self.responses.table_name} WHERE expires <= ?', (round(time()),)
+            )
+
+    def _prune_redirects(self):
+        """A more efficient implementation of removing invalid redirects in SQL"""
+        with self.redirects.connection(commit=True) as conn:
+            t1 = self.redirects.table_name
+            t2 = self.responses.table_name
+            conn.execute(
+                f'DELETE FROM {t1} WHERE key IN ('
+                f'    SELECT {t1}.key FROM {t1}'
+                f'    LEFT JOIN {t2} ON {t2}.key = {t1}.value'
+                f'    WHERE {t2}.key IS NULL'
+                ')'
+            )
+
+    def count(self, expired: bool = True) -> int:
+        """Count number of responses, optionally excluding expired
+
+        Args:
+            expired: Set to ``False`` to count only unexpired responses
+        """
+        return self.responses.count(expired=expired)
+
+    # A more efficient implementation of :py:meth:`BaseCache.filter` to make use of indexes
+    def filter(
+        self,
+        valid: bool = True,
+        expired: bool = True,
+        invalid: bool = False,
+        older_than: ExpirationTime = None,
+    ) -> Iterator[CachedResponse]:
+        if valid and not invalid:
+            return self.responses.sorted(expired=expired)
+        else:
+            return super().filter(
+                valid=valid, expired=expired, invalid=invalid, older_than=older_than
+            )
+
+    # A more efficient implementation of :py:meth:`BaseCache.recreate_keys
+    def recreate_keys(self):
+        with self.responses.bulk_commit():
+            super().recreate_keys()
+
+    def sorted(
+        self,
+        key: str = 'expires',
+        reversed: bool = False,
+        limit: Optional[int] = None,
+        expired: bool = True,
+    ) -> Iterator[CachedResponse]:
+        """Get cached responses, with sorting and other query options.
+
+        Args:
+            key: Key to sort by; either 'expires', 'size', or 'key'
+            reversed: Sort in descending order
+            limit: Maximum number of responses to return
+            expired: Set to ``False`` to exclude expired responses
+        """
+        return self.responses.sorted(key, reversed, limit, expired)
 
 
 class SQLiteDict(BaseStorage):
@@ -139,48 +172,74 @@ class SQLiteDict(BaseStorage):
         db_path,
         table_name='http_cache',
         fast_save=False,
+        serializer: Optional[SerializerType] = pickle_serializer,
         use_cache_dir: bool = False,
         use_memory: bool = False,
         use_temp: bool = False,
+        wal: bool = False,
         **kwargs,
     ):
-        super().__init__(**kwargs)
+        super().__init__(serializer=serializer, **kwargs)
         self._can_commit = True
-        self._local_context = threading.local()
+        self._connection: Optional[sqlite3.Connection] = None
         self._lock = threading.RLock()
         self.connection_kwargs = get_valid_kwargs(sqlite_template, kwargs)
+        self.connection_kwargs.setdefault('check_same_thread', False)
+        if use_memory:
+            self.connection_kwargs['uri'] = True
         self.db_path = _get_sqlite_cache_path(db_path, use_cache_dir, use_temp, use_memory)
         self.fast_save = fast_save
         self.table_name = table_name
+        self.wal = wal
         self.init_db()
 
     def init_db(self):
         """Initialize the database, if it hasn't already been"""
         self.close()
-        with self._lock, self.connection() as con:
-            con.execute(f'CREATE TABLE IF NOT EXISTS {self.table_name} (key PRIMARY KEY, value)')
+        with self.connection(commit=True) as con:
+            # Add new column to tables created before 1.0
+            try:
+                con.execute(f'ALTER TABLE {self.table_name} ADD COLUMN expires INTEGER')
+            except sqlite3.OperationalError:
+                pass
+
+            con.execute(
+                f'CREATE TABLE IF NOT EXISTS {self.table_name} ('
+                '    key TEXT PRIMARY KEY,'
+                '    value BLOB, '
+                '    expires INTEGER'
+                ')'
+            )
+            con.execute(f'CREATE INDEX IF NOT EXISTS expires_idx ON {self.table_name}(expires)')
 
     @contextmanager
     def connection(self, commit=False) -> Iterator[sqlite3.Connection]:
         """Get a thread-local database connection"""
-        if not getattr(self._local_context, 'con', None):
+        if not self._connection:
             logger.debug(f'Opening connection to {self.db_path}:{self.table_name}')
-            self._local_context.con = sqlite3.connect(self.db_path, **self.connection_kwargs)
+            self._connection = sqlite3.connect(self.db_path, **self.connection_kwargs)
             if self.fast_save:
-                self._local_context.con.execute('PRAGMA synchronous = 0;')
-        yield self._local_context.con
+                self._connection.execute('PRAGMA synchronous = 0;')
+            if self.wal:
+                self._connection.execute('PRAGMA journal_mode = wal')
+
+        # Any write operations need to be run in serial
+        if commit and self._can_commit:
+            self._lock.acquire()
+        yield self._connection
         if commit and self._can_commit:
-            self._local_context.con.commit()
+            self._connection.commit()
+            self._lock.release()
 
     def close(self):
         """Close any active connections"""
-        if getattr(self._local_context, 'con', None):
-            self._local_context.con.close()
-            self._local_context.con = None
+        if self._connection:
+            self._connection.close()
+            self._connection = None
 
     @contextmanager
     def bulk_commit(self):
-        """Context manager used to speed up insertion of a large number of records
+        """Insert a large number of records within a single transaction
 
         Example:
 
@@ -190,13 +249,13 @@ class SQLiteDict(BaseStorage):
             ...         d1[i] = i * 2
 
         """
-        self._can_commit = False
-        try:
-            yield
-            if hasattr(self._local_context, 'con'):
-                self._local_context.con.commit()
-        finally:
-            self._can_commit = True
+        with self._lock:
+            self._can_commit = False
+            try:
+                yield
+                self._connection.commit()
+            finally:
+                self._can_commit = True
 
     def __del__(self):
         self.close()
@@ -213,13 +272,17 @@ class SQLiteDict(BaseStorage):
         # raise error after the with block, otherwise the connection will be locked
         if not row:
             raise KeyError
-        return row[0]
+
+        return self.deserialize(key, row[0])
 
     def __setitem__(self, key, value):
+        # If available, set expiration as a timestamp in unix format
+        expires = getattr(value, 'expires_unix', None)
+        value = self.serialize(value)
         with self.connection(commit=True) as con:
             con.execute(
-                f'INSERT OR REPLACE INTO {self.table_name} (key,value) VALUES (?,?)',
-                (key, value),
+                f'INSERT OR REPLACE INTO {self.table_name} (key,value,expires) VALUES (?,?,?)',
+                (key, value, expires),
             )
 
     def __iter__(self):
@@ -228,12 +291,11 @@ class SQLiteDict(BaseStorage):
                 yield row[0]
 
     def __len__(self):
-        with self.connection() as con:
-            return con.execute(f'SELECT COUNT(key) FROM  {self.table_name}').fetchone()[0]
+        return self.count()
 
     def bulk_delete(self, keys=None, values=None):
-        """Delete multiple keys from the cache, without raising errors for any missing keys.
-        Also supports deleting by value.
+        """Delete multiple items from the cache, without raising errors for any missing items.
+        Supports deleting by either key or by value.
         """
         if not keys and not values:
             return
@@ -253,35 +315,86 @@ class SQLiteDict(BaseStorage):
             self.init_db()
             self.vacuum()
 
-    def vacuum(self):
-        with self.connection(commit=True) as con:
-            con.execute('VACUUM')
+    def count(self, expired: bool = True) -> int:
+        """Count number of responses, optionally excluding expired"""
+        filter_expr = ''
+        params: Tuple = ()
+        if not expired:
+            filter_expr = 'WHERE expires is null or expires > ?'
+            params = (time(),)
+        query = f'SELECT COUNT(key) FROM {self.table_name} {filter_expr}'
 
+        with self.connection() as con:
+            return con.execute(query, params).fetchone()[0]
 
-class SQLitePickleDict(SQLiteDict):
-    """Same as :class:`SQLiteDict`, but serializes values before saving"""
+    def serialize(self, value: VT):
+        value = super().serialize(value)
+        return sqlite3.Binary(value) if isinstance(value, bytes) else value
 
-    def __setitem__(self, key, value):
-        serialized_value = self.serializer.dumps(value)
-        if isinstance(serialized_value, bytes):
-            serialized_value = sqlite3.Binary(serialized_value)
-        super().__setitem__(key, serialized_value)
+    def size(self) -> int:
+        """Return the size of the database, in bytes. For an in-memory database, this will be an
+        estimate based on page size.
+        """
+        try:
+            return getsize(self.db_path)
+        except IOError:
+            return self._estimate_size()
+
+    def _estimate_size(self) -> int:
+        """Estimate the current size of the database based on page count * size"""
+        with self.connection() as conn:
+            page_count = conn.execute('PRAGMA page_count').fetchone()[0]
+            page_size = conn.execute('PRAGMA page_size').fetchone()[0]
+            return page_count * page_size
+
+    def sorted(
+        self,
+        key: str = 'expires',
+        reversed: bool = False,
+        limit: Optional[int] = None,
+        expired: bool = True,
+    ) -> Iterator[CachedResponse]:
+        """Get cache values in sorted order; see :py:meth:`.SQLiteCache.sorted` for usage details"""
+        # Get sort key, direction, and limit
+        if key not in ['expires', 'size', 'key']:
+            raise ValueError(f'Invalid sort key: {key}')
+        if key == 'size':
+            key = 'LENGTH(value)'
+        direction = 'DESC' if reversed else 'ASC'
+        limit_expr = f'LIMIT {limit}' if limit else ''
+
+        # Filter out expired items, if specified
+        filter_expr = ''
+        params: Tuple = ()
+        if not expired:
+            filter_expr = 'WHERE expires is null or expires > ?'
+            params = (time(),)
 
-    def __getitem__(self, key):
-        return self.serializer.loads(super().__getitem__(key))
+        with self.connection(commit=True) as con:
+            for row in con.execute(
+                f'SELECT key, value FROM {self.table_name} {filter_expr}'
+                f'  ORDER BY {key} {direction} {limit_expr}',
+                params,
+            ):
+                result = self.deserialize(row[0], row[1])
+                # Omit any results that can't be deserialized
+                if result:
+                    yield result
+
+    def vacuum(self):
+        with self.connection(commit=True) as con:
+            con.execute('VACUUM')
 
 
 def _format_sequence(values: Collection) -> Tuple[str, List]:
-    """Get SQL parameter marks for a sequence-based query, and ensure value is a sequence"""
-    if not isinstance(values, Iterable):
-        values = [values]
+    """Get SQL parameter marks for a sequence-based query"""
     return ','.join(['?'] * len(values)), list(values)
 
 
 def _get_sqlite_cache_path(
     db_path: AnyPath, use_cache_dir: bool, use_temp: bool, use_memory: bool = False
 ) -> AnyPath:
-    """Get a resolved path for a SQLite database file (or memory URI("""
+    """Get a resolved path for a SQLite database file (or memory URI)"""
     # Use an in-memory database, if specified
     db_path = str(db_path)
     if use_memory:
@@ -320,16 +433,10 @@ def get_cache_path(db_path: AnyPath, use_cache_dir: bool = False, use_temp: bool
 def sqlite_template(
     timeout: float = 5.0,
     detect_types: int = 0,
-    isolation_level: str = None,
+    isolation_level: Optional[str] = None,
     check_same_thread: bool = True,
-    factory: Type = None,
+    factory: Optional[Type] = None,
     cached_statements: int = 100,
     uri: bool = False,
 ):
     """Template function to get an accurate signature for the builtin :py:func:`sqlite3.connect`"""
-
-
-# Aliases for backwards-compatibility
-DbCache = SQLiteCache
-DbDict = SQLiteDict
-DbPickeDict = SQLitePickleDict
diff --git a/requests_cache/cache_control.py b/requests_cache/cache_control.py
deleted file mode 100644
index c1c33f8..0000000
--- a/requests_cache/cache_control.py
+++ /dev/null
@@ -1,11 +0,0 @@
-# flake8: noqa: F401, F403
-from warnings import warn
-
-from .policy import *
-
-warn(
-    DeprecationWarning(
-        'Contents of requests_cache.cache_control will be moved in an upcoming release; '
-        'please import members `from requests_cache` instead'
-    )
-)
diff --git a/requests_cache/cache_keys.py b/requests_cache/cache_keys.py
index 256e331..fa60619 100644
--- a/requests_cache/cache_keys.py
+++ b/requests_cache/cache_keys.py
@@ -1,4 +1,4 @@
-"""Internal utilities for generating the cache keys that are used to match requests
+"""Internal utilities for generating cache keys that are used for request matching
 
 .. automodsumm:: requests_cache.cache_keys
    :functions-only:
@@ -9,26 +9,41 @@ from __future__ import annotations
 import json
 from hashlib import blake2b
 from logging import getLogger
-from typing import TYPE_CHECKING, Dict, Iterable, List, Mapping, Optional, Union
+from typing import (
+    TYPE_CHECKING,
+    Any,
+    Dict,
+    Iterable,
+    List,
+    Mapping,
+    MutableMapping,
+    Optional,
+    Tuple,
+    Union,
+)
 from urllib.parse import parse_qsl, urlencode, urlparse, urlunparse
 
 from requests import Request, Session
 from requests.models import CaseInsensitiveDict
 from url_normalize import url_normalize
 
-from ._utils import get_valid_kwargs
+from ._utils import decode, encode
 
+__all__ = [
+    'create_key',
+    'normalize_body',
+    'normalize_headers',
+    'normalize_request',
+    'normalize_params',
+    'normalize_url',
+]
 if TYPE_CHECKING:
     from .models import AnyPreparedRequest, AnyRequest, CachedResponse
 
-__all__ = ['create_key', 'normalize_request']
-
-# Request headers that are always excluded from cache keys, but not redacted from cached responses
-DEFAULT_EXCLUDE_HEADERS = {'Cache-Control', 'If-None-Match', 'If-Modified-Since'}
-
-# Maximum JSON request body size that will be normalized
-MAX_NORM_BODY_SIZE = 10 * 1024 * 1025
+# Maximum JSON request body size that will be filtered and normalized
+MAX_NORM_BODY_SIZE = 10 * 1024 * 1024
 
+KVList = List[Tuple[str, str]]
 ParamList = Optional[Iterable[str]]
 RequestContent = Union[Mapping, str, bytes]
 
@@ -36,24 +51,20 @@ logger = getLogger(__name__)
 
 
 def create_key(
-    request: AnyRequest = None,
+    request: AnyRequest,
     ignored_parameters: ParamList = None,
     match_headers: Union[ParamList, bool] = False,
+    serializer: Any = None,
     **request_kwargs,
 ) -> str:
-    """Create a normalized cache key from either a request object or :py:class:`~requests.Request`
-    arguments
+    """Create a normalized cache key based on a request object
 
     Args:
         request: Request object to generate a cache key from
-        ignored_parameters: Request parames, headers, and/or body params to not match against
+        ignored_parameters: Request paramters, headers, and/or JSON body params to exclude
         match_headers: Match only the specified headers, or ``True`` to match all headers
-        request_kwargs: Request arguments to generate a cache key from
+        request_kwargs: Additional keyword arguments for :py:func:`~requests.request`
     """
-    # Convert raw request arguments into a request object, if needed
-    if not request:
-        request = Request(**get_valid_kwargs(Request.__init__, request_kwargs))
-
     # Normalize and gather all relevant request info to match against
     request = normalize_request(request, ignored_parameters)
     key_parts = [
@@ -62,6 +73,7 @@ def create_key(
         request.body or '',
         request_kwargs.get('verify', True),
         *get_matched_headers(request.headers, match_headers),
+        str(serializer),
     ]
 
     # Generate a hash based on this info
@@ -79,15 +91,11 @@ def get_matched_headers(
     """
     if not match_headers:
         return []
-
-    if isinstance(match_headers, Iterable):
-        included = set(match_headers) - DEFAULT_EXCLUDE_HEADERS
-    else:
-        included = set(headers) - DEFAULT_EXCLUDE_HEADERS
-
+    if match_headers is True:
+        match_headers = headers
     return [
         f'{k.lower()}={headers[k]}'
-        for k in sorted(included, key=lambda x: x.lower())
+        for k in sorted(match_headers, key=lambda x: x.lower())
         if k in headers
     ]
 
@@ -103,8 +111,7 @@ def normalize_request(
 
     Args:
         request: Request object to normalize
-        ignored_parameters: Request parames, headers, and/or body params to not match against and
-            to remove from the request
+        ignored_parameters: Request paramters, headers, and/or JSON body params to exclude
     """
     if isinstance(request, Request):
         norm_request: AnyPreparedRequest = Session().prepare_request(request)
@@ -119,11 +126,15 @@ def normalize_request(
 
 
 def normalize_headers(
-    headers: Mapping[str, str], ignored_parameters: ParamList
+    headers: MutableMapping[str, str], ignored_parameters: ParamList = None
 ) -> CaseInsensitiveDict:
-    """Sort and filter request headers"""
+    """Sort and filter request headers, and normalize minor variations in multi-value headers"""
     if ignored_parameters:
         headers = filter_sort_dict(headers, ignored_parameters)
+    for k, v in headers.items():
+        if ',' in v:
+            values = [v.strip() for v in v.lower().split(',') if v.strip()]
+            headers[k] = ', '.join(sorted(values))
     return CaseInsensitiveDict(headers)
 
 
@@ -149,15 +160,16 @@ def normalize_url(url: str, ignored_parameters: ParamList) -> str:
 
 def normalize_body(request: AnyPreparedRequest, ignored_parameters: ParamList) -> bytes:
     """Normalize and filter a request body if possible, depending on Content-Type"""
-    original_body = request.body or b''
+    if not request.body:
+        return b''
     content_type = request.headers.get('Content-Type')
 
     # Filter and sort params if possible
-    filtered_body: Union[str, bytes] = original_body
+    filtered_body: Union[str, bytes] = request.body
     if content_type == 'application/json':
-        filtered_body = normalize_json_body(original_body, ignored_parameters)
+        filtered_body = normalize_json_body(request.body, ignored_parameters)
     elif content_type == 'application/x-www-form-urlencoded':
-        filtered_body = normalize_params(original_body, ignored_parameters)
+        filtered_body = normalize_params(request.body, ignored_parameters)
 
     return encode(filtered_body)
 
@@ -166,8 +178,7 @@ def normalize_json_body(
     original_body: Union[str, bytes], ignored_parameters: ParamList
 ) -> Union[str, bytes]:
     """Normalize and filter a request body with serialized JSON data"""
-
-    if len(original_body) == 0 or len(original_body) > MAX_NORM_BODY_SIZE:
+    if len(original_body) <= 2 or len(original_body) > MAX_NORM_BODY_SIZE:
         return original_body
 
     try:
@@ -176,21 +187,24 @@ def normalize_json_body(
         return json.dumps(body)
     # If it's invalid JSON, then don't mess with it
     except (AttributeError, TypeError, ValueError):
-        logger.debug('Invalid JSON body:', exc_info=True)
+        logger.debug('Invalid JSON body')
         return original_body
 
 
-def normalize_params(value: Union[str, bytes], ignored_parameters: ParamList) -> str:
+def normalize_params(value: Union[str, bytes], ignored_parameters: ParamList = None) -> str:
     """Normalize and filter urlencoded params from either a URL or request body with form data"""
-    query_str = decode(value)
-    params = dict(parse_qsl(query_str))
+    value = decode(value)
+    params = parse_qsl(value)
+    params = filter_sort_multidict(params, ignored_parameters)
+    query_str = urlencode(params)
 
     # parse_qsl doesn't handle key-only params, so add those here
-    key_only_params = [k for k in query_str.split('&') if k and '=' not in k]
-    params.update({k: '' for k in key_only_params})
+    key_only_params = [k for k in value.split('&') if k and '=' not in k]
+    if key_only_params:
+        key_only_param_str = '&'.join(sorted(key_only_params))
+        query_str = f'{query_str}&{key_only_param_str}' if query_str else key_only_param_str
 
-    params = filter_sort_dict(params, ignored_parameters)
-    return urlencode(params)
+    return query_str
 
 
 def redact_response(response: CachedResponse, ignored_parameters: ParamList) -> CachedResponse:
@@ -201,18 +215,6 @@ def redact_response(response: CachedResponse, ignored_parameters: ParamList) ->
     return response
 
 
-def decode(value, encoding='utf-8') -> str:
-    """Decode a value from bytes, if hasn't already been.
-    Note: ``PreparedRequest.body`` is always encoded in utf-8.
-    """
-    return value.decode(encoding) if isinstance(value, bytes) else value
-
-
-def encode(value, encoding='utf-8') -> bytes:
-    """Encode a value to bytes, if it hasn't already been"""
-    return value if isinstance(value, bytes) else str(value).encode(encoding)
-
-
 def filter_sort_json(data: Union[List, Mapping], ignored_parameters: ParamList):
     if isinstance(data, Mapping):
         return filter_sort_dict(data, ignored_parameters)
@@ -220,13 +222,21 @@ def filter_sort_json(data: Union[List, Mapping], ignored_parameters: ParamList):
         return filter_sort_list(data, ignored_parameters)
 
 
-def filter_sort_dict(data: Mapping[str, str], ignored_parameters: ParamList) -> Dict[str, str]:
-    if not ignored_parameters:
-        return dict(sorted(data.items()))
-    return {k: v for k, v in sorted(data.items()) if k not in set(ignored_parameters)}
+def filter_sort_dict(
+    data: Mapping[str, str], ignored_parameters: ParamList = None
+) -> Dict[str, str]:
+    # Note: Any ignored_parameters present will have their values replaced instead of removing the
+    # parameter, so the cache key will still match whether the parameter was present or not.
+    ignored_parameters = set(ignored_parameters or [])
+    return {k: ('REDACTED' if k in ignored_parameters else v) for k, v in sorted(data.items())}
+
+
+def filter_sort_multidict(data: KVList, ignored_parameters: ParamList = None) -> KVList:
+    ignored_parameters = set(ignored_parameters or [])
+    return [(k, 'REDACTED' if k in ignored_parameters else v) for k, v in sorted(data)]
 
 
-def filter_sort_list(data: List, ignored_parameters: ParamList) -> List:
+def filter_sort_list(data: List, ignored_parameters: ParamList = None) -> List:
     if not ignored_parameters:
         return sorted(data)
     return [k for k in sorted(data) if k not in set(ignored_parameters)]
diff --git a/requests_cache/models/__init__.py b/requests_cache/models/__init__.py
index 6ffc7ad..93d8279 100644
--- a/requests_cache/models/__init__.py
+++ b/requests_cache/models/__init__.py
@@ -4,10 +4,11 @@ from typing import Union
 
 from requests import PreparedRequest, Request, Response
 
+from .base import RichMixin
 from .raw_response import CachedHTTPResponse
 from .request import CachedRequest
-from .response import CachedResponse, set_response_defaults
+from .response import CachedResponse, DecodedContent, OriginalResponse
 
-AnyResponse = Union[Response, CachedResponse]
+AnyResponse = Union[OriginalResponse, CachedResponse]
 AnyRequest = Union[Request, PreparedRequest, CachedRequest]
 AnyPreparedRequest = Union[PreparedRequest, CachedRequest]
diff --git a/requests_cache/models/base.py b/requests_cache/models/base.py
new file mode 100644
index 0000000..4c9f59c
--- /dev/null
+++ b/requests_cache/models/base.py
@@ -0,0 +1,33 @@
+from datetime import datetime
+from typing import List
+
+from attr import Factory
+
+
+class RichMixin:
+    """Mixin that customizes output when pretty-printed with rich. Compared to default rich behavior
+    for attrs classes, this does the following:
+
+    * Inform rich about all default values so they will be excluded from output
+    * Handle default value factories
+    * Stringify datetime objects
+    * Does not handle positional-only args (since we don't currently have any)
+    * Add a base repr that excludes default values even if rich isn't installed
+    """
+
+    def __rich_repr__(self):
+        public_attrs = [a for a in self.__attrs_attrs__ if a.repr]
+        for a in public_attrs:
+            default = a.default.factory() if isinstance(a.default, Factory) else a.default
+            value = getattr(self, a.name)
+            value = str(value) if isinstance(value, datetime) else value
+            yield a.name, value, default
+
+    def __repr__(self):
+        tokens: List[str] = []
+        for arg in self.__rich_repr__():
+            key, value, default = arg
+            tokens.append(f'{key}={value!r}' if value != default else None)
+
+        repr_attrs = ', '.join([t for t in tokens if t])
+        return f'{self.__class__.__name__}({repr_attrs})'
diff --git a/requests_cache/models/raw_response.py b/requests_cache/models/raw_response.py
index 96d586d..f95afab 100644
--- a/requests_cache/models/raw_response.py
+++ b/requests_cache/models/raw_response.py
@@ -1,6 +1,6 @@
 from io import BytesIO
 from logging import getLogger
-from typing import Mapping
+from typing import TYPE_CHECKING, Optional
 
 from attr import define, field, fields_dict
 from requests import Response
@@ -10,53 +10,54 @@ from urllib3.response import (  # type: ignore  # import location false positive
     is_fp_closed,
 )
 
+from . import RichMixin
+
 logger = getLogger(__name__)
 
 
-@define(auto_attribs=False, slots=False)
-class CachedHTTPResponse(HTTPResponse):
-    """A serializable dataclass that emulates :py:class:`~urllib3.response.HTTPResponse`.
-    Supports streaming requests and generator usage.
+if TYPE_CHECKING:
+    from . import CachedResponse
+
 
-    The only action this doesn't support is explicitly calling :py:meth:`.read` with
-    ``decode_content=False``.
+@define(auto_attribs=False, repr=False, slots=False)
+class CachedHTTPResponse(RichMixin, HTTPResponse):
+    """A wrapper class that emulates :py:class:`~urllib3.response.HTTPResponse`.
+
+    This enables consistent behavior for streaming requests and generator usage in the following
+    cases:
+    * On an original response, after reading its content to write to the cache
+    * On a cached response
     """
 
     decode_content: bool = field(default=None)
-    # These headers are redundant and not serialized; copied in init and CachedResponse post-init
-    headers: HTTPHeaderDict = None  # type: ignore
+    headers: HTTPHeaderDict = field(factory=HTTPHeaderDict)
     reason: str = field(default=None)
     request_url: str = field(default=None)
     status: int = field(default=0)
     strict: int = field(default=0)
     version: int = field(default=0)
 
-    def __init__(self, *args, body: bytes = None, headers: Mapping = None, **kwargs):
+    def __init__(self, body: Optional[bytes] = None, **kwargs):
         """First initialize via HTTPResponse, then via attrs"""
         kwargs = {k: v for k, v in kwargs.items() if v is not None}
         super().__init__(body=BytesIO(body or b''), preload_content=False, **kwargs)
-
         self._body = body
-        self.headers = HTTPHeaderDict(headers)
-        self.__attrs_init__(*args, **kwargs)  # type: ignore # False positive in mypy 0.920+?
+        self.__attrs_init__(**kwargs)  # type: ignore # False positive in mypy 0.920+?
 
     @classmethod
-    def from_response(cls, original_response: Response):
+    def from_response(cls, response: Response) -> 'CachedHTTPResponse':
         """Create a CachedHTTPResponse based on an original response"""
         # Copy basic attributes
-        raw = original_response.raw
-        copy_attrs = list(fields_dict(cls).keys()) + ['headers']
-        kwargs = {k: getattr(raw, k, None) for k in copy_attrs}
-
-        # Note: _request_url is not available in urllib <=1.21
-        kwargs['request_url'] = getattr(raw, '_request_url', None)
+        raw = response.raw
+        kwargs = {k: getattr(raw, k, None) for k in fields_dict(cls).keys()}
+        kwargs['request_url'] = raw._request_url
 
         # Copy response data and restore response object to its original state
-        if hasattr(raw, '_fp') and not is_fp_closed(raw._fp):
+        if getattr(raw, '_fp', None) and not is_fp_closed(raw._fp):
             body = raw.read(decode_content=False)
             kwargs['body'] = body
             raw._fp = BytesIO(body)
-            original_response.content  # This property reads, decodes, and stores response content
+            response.content  # This property reads, decodes, and stores response content
 
             # After reading, reset file pointer on original raw response
             raw._fp = BytesIO(body)
@@ -65,6 +66,18 @@ class CachedHTTPResponse(HTTPResponse):
 
         return cls(**kwargs)  # type: ignore  # False positive in mypy 0.920+?
 
+    @classmethod
+    def from_cached_response(cls, response: 'CachedResponse'):
+        """Create a CachedHTTPResponse based on a cached response"""
+        obj = cls(
+            headers=HTTPHeaderDict(response.headers),
+            reason=response.reason,
+            status=response.status_code,
+            request_url=response.request.url,
+        )
+        obj.reset(response._content)
+        return obj
+
     def release_conn(self):
         """No-op for compatibility"""
 
@@ -72,8 +85,8 @@ class CachedHTTPResponse(HTTPResponse):
         """Simplified reader for cached content that emulates
         :py:meth:`urllib3.response.HTTPResponse.read()`
         """
-        if 'content-encoding' in self.headers and decode_content is False:
-            logger.warning('read() returns decoded data, even with decode_content=False')
+        if 'Content-Encoding' in self.headers and decode_content is False:
+            logger.warning('read(decode_content=False) is not supported for cached responses')
 
         data = self._fp.read(amt)
         # "close" the file to inform consumers to stop reading from it
@@ -81,7 +94,7 @@ class CachedHTTPResponse(HTTPResponse):
             self._fp.close()
         return data
 
-    def reset(self, body: bytes = None):
+    def reset(self, body: Optional[bytes] = None):
         """Reset raw response file pointer, and optionally update content"""
         if body is not None:
             self._body = body
diff --git a/requests_cache/models/request.py b/requests_cache/models/request.py
index 870a336..acef134 100644
--- a/requests_cache/models/request.py
+++ b/requests_cache/models/request.py
@@ -7,12 +7,13 @@ from requests.cookies import RequestsCookieJar
 from requests.structures import CaseInsensitiveDict
 
 from ..cache_keys import encode
+from . import RichMixin
 
 logger = getLogger(__name__)
 
 
-@define(auto_attribs=False)
-class CachedRequest:
+@define(repr=False)
+class CachedRequest(RichMixin):
     """A serializable dataclass that emulates :py:class:`requests.PreparedResponse`"""
 
     body: bytes = field(default=None, converter=encode)
diff --git a/requests_cache/models/response.py b/requests_cache/models/response.py
index a3b364f..2cd6047 100755
--- a/requests_cache/models/response.py
+++ b/requests_cache/models/response.py
@@ -1,85 +1,121 @@
+from __future__ import annotations
+
 from datetime import datetime, timedelta, timezone
 from logging import getLogger
-from typing import TYPE_CHECKING, List, Optional, Tuple, Union
+from time import time
+from typing import TYPE_CHECKING, Dict, List, Optional, Union
 
 import attr
 from attr import define, field
 from requests import PreparedRequest, Response
 from requests.cookies import RequestsCookieJar
 from requests.structures import CaseInsensitiveDict
-from urllib3._collections import HTTPHeaderDict
 
 from ..policy import ExpirationTime, get_expiration_datetime
-from . import CachedHTTPResponse, CachedRequest
+from . import CachedHTTPResponse, CachedRequest, RichMixin
+
+if TYPE_CHECKING:
+    from ..policy.actions import CacheActions
 
 DATETIME_FORMAT = '%Y-%m-%d %H:%M:%S %Z'  # Format used for __str__ only
-HeaderList = List[Tuple[str, str]]
+DecodedContent = Union[Dict, str, None]
 logger = getLogger(__name__)
 
 
-@define(auto_attribs=False, slots=False)
-class CachedResponse(Response):
-    """A class that emulates :py:class:`requests.Response`, with some additional optimizations
-    for serialization.
+@define(auto_attribs=False, repr=False, slots=False)
+class BaseResponse(Response):
+    """Wrapper class for responses returned by :py:class:`.CachedSession`. This mainly exists to
+    provide type hints for extra cache-related attributes that are added to non-cached responses.
     """
 
+    created_at: datetime = field(factory=datetime.utcnow)
+    expires: Optional[datetime] = field(default=None)
+    cache_key: str = ''  # Not serialized; set by BaseCache.get_response()
+    revalidated: bool = False  # Not serialized; set by CacheActions.update_revalidated_response()
+
+    @property
+    def from_cache(self) -> bool:
+        return False
+
+    @property
+    def is_expired(self) -> bool:
+        return False
+
+
+@define(auto_attribs=False, repr=False, slots=False)
+class OriginalResponse(BaseResponse):
+    """Wrapper class for non-cached responses returned by :py:class:`.CachedSession`"""
+
+    @classmethod
+    def wrap_response(cls, response: Response, actions: 'CacheActions') -> 'OriginalResponse':
+        """Modify a response object in-place and add extra cache-related attributes"""
+        if not isinstance(response, cls):
+            response.__class__ = cls
+            # Add expires and cache_key only if the response was written to the cache
+            response.expires = None if actions.skip_write else actions.expires  # type: ignore
+            response.cache_key = None if actions.skip_write else actions.cache_key  # type: ignore
+            response.created_at = datetime.utcnow()  # type: ignore
+        return response  # type: ignore
+
+
+@define(auto_attribs=False, repr=False, slots=False)
+class CachedResponse(RichMixin, BaseResponse):
+    """A class that emulates :py:class:`requests.Response`, optimized for serialization"""
+
     _content: bytes = field(default=None)
+    _decoded_content: DecodedContent = field(default=None)
     _next: Optional[CachedRequest] = field(default=None)
-    cache_key: Optional[str] = None  # Not serialized; set by BaseCache.get_response()
     cookies: RequestsCookieJar = field(factory=RequestsCookieJar)
-    created_at: datetime = field(factory=datetime.utcnow)
+    created_at: datetime = field(default=None)
     elapsed: timedelta = field(factory=timedelta)
     encoding: str = field(default=None)
     expires: Optional[datetime] = field(default=None)
     headers: CaseInsensitiveDict = field(factory=CaseInsensitiveDict)
     history: List['CachedResponse'] = field(factory=list)  # type: ignore
-    raw: CachedHTTPResponse = field(factory=CachedHTTPResponse, repr=False)
+    raw: CachedHTTPResponse = None  # type: ignore  # Not serialized; populated from CachedResponse attrs
     reason: str = field(default=None)
     request: CachedRequest = field(factory=CachedRequest)  # type: ignore
     status_code: int = field(default=0)
     url: str = field(default=None)
 
     def __attrs_post_init__(self):
-        """Re-initialize raw response body after deserialization"""
-        if self.raw._body is None and self._content is not None:
-            self.raw.reset(self._content)
-        if not self.raw.headers:
-            self.raw.headers = HTTPHeaderDict(self.headers)
+        # Not using created_at field default due to possible bug on Windows with omit_if_default
+        self.created_at = self.created_at or datetime.utcnow()
+        # Re-initialize raw (urllib3) response after deserialization
+        self.raw = self.raw or CachedHTTPResponse.from_cached_response(self)
 
     @classmethod
-    def from_response(
-        cls,
-        original_response: Union[Response, 'CachedResponse'],
-        expires: datetime = None,
-        **kwargs,
-    ):
+    def from_response(cls, response: Response, **kwargs) -> 'CachedResponse':
         """Create a CachedResponse based on an original Response or another CachedResponse object"""
-        if isinstance(original_response, CachedResponse):
-            return attr.evolve(original_response, expires=expires)
-        obj = cls(expires=expires, **kwargs)
+        if isinstance(response, CachedResponse):
+            obj = attr.evolve(response, **kwargs)
+            obj._convert_redirects()
+            return obj
+
+        obj = cls(**kwargs)
 
         # Copy basic attributes
         for k in Response.__attrs__:
-            setattr(obj, k, getattr(original_response, k, None))
+            setattr(obj, k, getattr(response, k, None))
 
         # Store request, raw response, and next response (if it's a redirect response)
-        obj.request = CachedRequest.from_request(original_response.request)
-        obj.raw = CachedHTTPResponse.from_response(original_response)
-        obj._next = (
-            CachedRequest.from_request(original_response.next) if original_response.next else None
-        )
+        obj.raw = CachedHTTPResponse.from_response(response)
+        obj.request = CachedRequest.from_request(response.request)
+        obj._next = CachedRequest.from_request(response.next) if response.next else None
 
         # Store response body, which will have been read & decoded by requests.Response by now
-        obj._content = original_response.content
-
-        # Copy redirect history, if any; avoid recursion by not copying redirects of redirects
-        obj.history = []
-        if not obj.is_redirect:
-            for redirect in original_response.history:
-                obj.history.append(cls.from_response(redirect))
+        obj._content = response.content
 
+        obj._convert_redirects()
         return obj
 
+    def _convert_redirects(self):
+        """Convert redirect history, if any; avoid recursion by not copying redirects of redirects"""
+        if self.is_redirect:
+            self.history = []
+            return
+        self.history = [self.from_response(redirect) for redirect in self.history]
+
     @property
     def _content_consumed(self) -> bool:
         """For compatibility with requests.Response; will always be True for a cached response"""
@@ -89,6 +125,20 @@ class CachedResponse(Response):
     def _content_consumed(self, value: bool):
         pass
 
+    @property
+    def expires_delta(self) -> Optional[int]:
+        """Get time to expiration in seconds (rounded to the nearest second)"""
+        if self.expires is None:
+            return None
+        delta = self.expires - datetime.utcnow()
+        return round(delta.total_seconds())
+
+    @property
+    def expires_unix(self) -> Optional[int]:
+        """Get expiration time as a Unix timestamp"""
+        seconds = self.expires_delta
+        return round(time() + seconds) if seconds is not None else None
+
     @property
     def from_cache(self) -> bool:
         return True
@@ -98,21 +148,18 @@ class CachedResponse(Response):
         """Determine if this cached response is expired"""
         return self.expires is not None and datetime.utcnow() >= self.expires
 
-    @property
-    def ttl(self) -> Optional[int]:
-        """Get time to expiration in seconds"""
-        if self.expires is None or self.is_expired:
-            return None
-        delta = self.expires - datetime.utcnow()
-        return int(delta.total_seconds())
+    def is_older_than(self, older_than: ExpirationTime) -> bool:
+        """Determine if this cached response is older than the given time"""
+        older_than = get_expiration_datetime(older_than, negative_delta=True)
+        return older_than is not None and self.created_at < older_than
 
     @property
     def next(self) -> Optional[PreparedRequest]:
         """Returns a PreparedRequest for the next request in a redirect chain, if there is one."""
         return self._next.prepare() if self._next else None
 
-    def revalidate(self, expire_after: ExpirationTime) -> bool:
-        """Set a new expiration for this response, and determine if it is now expired"""
+    def reset_expiration(self, expire_after: ExpirationTime):
+        """Set a new expiration for this response"""
         self.expires = get_expiration_datetime(expire_after)
         return self.is_expired
 
@@ -132,9 +179,10 @@ class CachedResponse(Response):
 
     def __str__(self):
         return (
-            f'request: {self.request}, response: {self.status_code} '
-            f'({format_file_size(self.size)}), created: {format_datetime(self.created_at)}, '
-            f'expires: {format_datetime(self.expires)} ({"stale" if self.is_expired else "fresh"})'
+            f'<CachedResponse [{self.status_code}]: '
+            f'created: {format_datetime(self.created_at)}, '
+            f'expires: {format_datetime(self.expires)} ({"stale" if self.is_expired else "fresh"}), '
+            f'size: {format_file_size(self.size)}, request: {self.request}>'
         )
 
 
@@ -161,18 +209,3 @@ def format_file_size(n_bytes: int) -> str:
 
     if TYPE_CHECKING:
         return _format(unit)
-
-
-def set_response_defaults(
-    response: Union[Response, CachedResponse], cache_key: str = None
-) -> Union[Response, CachedResponse]:
-    """Set some default CachedResponse values on a requests.Response object, so they can be
-    expected to always be present
-    """
-    if not isinstance(response, CachedResponse):
-        response.cache_key = cache_key  # type: ignore
-        response.created_at = None  # type: ignore
-        response.expires = None  # type: ignore
-        response.from_cache = False  # type: ignore
-        response.is_expired = False  # type: ignore
-    return response
diff --git a/requests_cache/patcher.py b/requests_cache/patcher.py
index 9730ba8..406ee45 100644
--- a/requests_cache/patcher.py
+++ b/requests_cache/patcher.py
@@ -1,4 +1,4 @@
-"""Utilities for patching ``requests``.
+"""Utilities for patching ``requests``. See :ref:`patching` for general usage info.
 
 .. warning:: These functions are not thread-safe. Use :py:class:`.CachedSession` if you want to use
     caching in a multi-threaded environment.
@@ -9,12 +9,12 @@
 """
 from contextlib import contextmanager
 from logging import getLogger
-from typing import Callable, Dict, Iterable, Optional, Type
+from typing import Optional, Type
+from warnings import warn
 
 import requests
 
 from .backends import BackendSpecifier, BaseCache, init_backend
-from .policy import ExpirationTime
 from .session import CachedSession, OriginalSession
 
 logger = getLogger(__name__)
@@ -22,13 +22,7 @@ logger = getLogger(__name__)
 
 def install_cache(
     cache_name: str = 'http_cache',
-    backend: BackendSpecifier = None,
-    expire_after: ExpirationTime = -1,
-    urls_expire_after: Dict[str, ExpirationTime] = None,
-    allowable_codes: Iterable[int] = (200,),
-    allowable_methods: Iterable['str'] = ('GET', 'HEAD'),
-    filter_fn: Callable = None,
-    stale_if_error: bool = False,
+    backend: Optional[BackendSpecifier] = None,
     session_factory: Type[OriginalSession] = CachedSession,
     **kwargs,
 ):
@@ -49,17 +43,7 @@ def install_cache(
 
     class _ConfiguredCachedSession(session_factory):  # type: ignore  # See mypy issue #5865
         def __init__(self):
-            super().__init__(
-                cache_name=cache_name,
-                backend=backend,
-                expire_after=expire_after,
-                urls_expire_after=urls_expire_after,
-                allowable_codes=allowable_codes,
-                allowable_methods=allowable_methods,
-                filter_fn=filter_fn,
-                stale_if_error=stale_if_error,
-                **kwargs,
-            )
+            super().__init__(cache_name=cache_name, backend=backend, **kwargs)
 
     _patch_session_factory(_ConfiguredCachedSession)
 
@@ -77,7 +61,7 @@ def disabled():
     Example:
 
         >>> with requests_cache.disabled():
-        ...     requests.get('http://httpbin.org/get')
+        ...     requests.get('https://httpbin.org/get')
 
     """
     previous = requests.Session
@@ -93,13 +77,12 @@ def enabled(*args, **kwargs):
     """
     Context manager for temporarily enabling caching for all ``requests`` functions
 
-    Accepts the same arguments as :py:func:`.install_cache`.
-
     Example:
 
-        >>> with requests_cache.enabled('cache_db'):
-        ...     requests.get('http://httpbin.org/get')
+        >>> with requests_cache.enabled('cache.db'):
+        ...     requests.get('https://httpbin.org/get')
 
+    Accepts the same arguments as :py:class:`.CachedSession` and :py:func:`.install_cache`.
     """
     install_cache(*args, **kwargs)
     try:
@@ -124,15 +107,22 @@ def clear():
         get_cache().clear()
 
 
-def remove_expired_responses(expire_after: ExpirationTime = None):
-    """Remove expired responses from the cache, optionally with revalidation
-
-    Args:
-        expire_after: A new expiration time used to revalidate the cache
+def delete(*args, **kwargs):
+    """Remove responses from the cache according one or more conditions.
+    See :py:meth:`.BaseCache.delete for usage details.
     """
     session = requests.Session()
     if isinstance(session, CachedSession):
-        session.remove_expired_responses(expire_after)
+        session.cache.delete(*args, **kwargs)
+
+
+def remove_expired_responses():
+    """Remove expired responses from the cache"""
+    warn(
+        'remove_expired_responses() is deprecated; please use delete() instead',
+        DeprecationWarning,
+    )
+    delete(expired=True)
 
 
 def _patch_session_factory(session_factory: Type[OriginalSession] = CachedSession):
diff --git a/requests_cache/policy/__init__.py b/requests_cache/policy/__init__.py
index 965ff65..48b384a 100644
--- a/requests_cache/policy/__init__.py
+++ b/requests_cache/policy/__init__.py
@@ -1,2 +1,25 @@
-# flake8: noqa: F401, F403
-from .actions import *
+"""Modules that implement cache policy, based on a combination of standard HTTP headers and
+additional settings and features specific to requests-cache.
+"""
+# flake8: noqa: E402,F401
+# isort: skip_file
+from datetime import datetime, timedelta
+from typing import Callable, Dict, Pattern as RegexPattern, Union, MutableMapping
+
+from requests import Response
+
+ExpirationTime = Union[None, int, float, str, datetime, timedelta]
+ExpirationPattern = Union[  # Either a glob expression as str or a compiled regex pattern
+    str,
+    RegexPattern,
+]
+ExpirationPatterns = Dict[ExpirationPattern, ExpirationTime]
+FilterCallback = Callable[[Response], bool]
+KeyCallback = Callable[..., str]
+HeaderDict = MutableMapping[str, str]
+
+
+from .expiration import *
+from .settings import *
+from .directives import CacheDirectives, set_request_headers
+from .actions import CacheActions
diff --git a/requests_cache/policy/actions.py b/requests_cache/policy/actions.py
index 6482bb0..034cb72 100644
--- a/requests_cache/policy/actions.py
+++ b/requests_cache/policy/actions.py
@@ -1,279 +1,330 @@
-"""Internal utilities for determining cache expiration and other cache actions.
-
-.. automodsumm:: requests_cache.cache_control
-   :classes-only:
-   :nosignatures:
-
-.. automodsumm:: requests_cache.cache_control
-   :functions-only:
-   :nosignatures:
-"""
-from __future__ import annotations
-
-from datetime import datetime, timedelta, timezone
-from email.utils import parsedate_to_datetime
-from fnmatch import fnmatch
-from logging import getLogger
-from math import ceil
-from typing import TYPE_CHECKING, Any, Dict, Mapping, Optional, Tuple, Union
+from datetime import datetime, timedelta
+from logging import DEBUG, getLogger
+from typing import TYPE_CHECKING, Dict, List, MutableMapping, Optional, Union
 
 from attr import define, field
 from requests import PreparedRequest, Response
 
 from .._utils import coalesce
+from ..cache_keys import normalize_headers
+from ..models import RichMixin
+from . import (
+    DO_NOT_CACHE,
+    EXPIRE_IMMEDIATELY,
+    NEVER_EXPIRE,
+    CacheDirectives,
+    ExpirationTime,
+    KeyCallback,
+    get_expiration_datetime,
+    get_expiration_seconds,
+    get_url_expiration,
+)
+from .settings import CacheSettings
 
 if TYPE_CHECKING:
-    from .models import CachedResponse
-
-
-# May be set by either headers or expire_after param to disable caching or disable expiration
-DO_NOT_CACHE = 0
-NEVER_EXPIRE = -1
-# Supported Cache-Control directives
-CACHE_DIRECTIVES = ['immutable', 'max-age', 'no-cache', 'no-store']
-
-CacheDirective = Tuple[str, Union[None, int, bool]]
-ExpirationTime = Union[None, int, float, str, datetime, timedelta]
-ExpirationPatterns = Dict[str, ExpirationTime]
+    from ..models import CachedResponse
 
 logger = getLogger(__name__)
 
 
-@define
-class CacheActions:
-    """A class that translates cache settings and headers into specific actions to take for a
-    given cache item. Actions include:
+@define(repr=False)
+class CacheActions(RichMixin):
+    """Translates cache settings and headers into specific actions to take for a given cache item.
+     The resulting actions are then handled in :py:meth:`CachedSession.send`.
 
-    * Read from the cache
-    * Write to the cache
-    * Set cache expiration
-    * Add headers for conditional requests
+    .. rubric:: Notes
 
-    If multiple sources provide an expiration time, they will be used in the following order of
-    precedence:
+    * See :ref:`precedence` for behavior if multiple sources provide an expiration
+    * See :ref:`headers` for more details about header behavior
+    * The following arguments/properties are the outputs of this class:
 
-    1. Cache-Control request headers
-    2. Cache-Control response headers (if enabled)
-    3. Per-request expiration
-    4. Per-URL expiration
-    5. Per-session expiration
-
-    See :ref:`headers` for more details about behavior.
+    Args:
+        cache_key: The cache key created based on the initial request
+        error_504: Indicates the request cannot be fulfilled based on cache settings
+        expire_after: User or header-provided expiration value
+        send_request: Send a new request
+        resend_request: Send a new request to refresh a stale cache item
+        resend_async: Return a stale cache item, and send a non-blocking request to refresh it
+        skip_read: Skip reading from the cache
+        skip_write: Skip writing to the cache
     """
 
-    cache_control: bool = field(default=False)
-    cache_key: str = field(default=None)
+    # Outputs
+    cache_key: str = field(default=None, repr=False)
+    error_504: bool = field(default=False)
     expire_after: ExpirationTime = field(default=None)
-    request_directives: Dict[str, str] = field(factory=dict)
+    send_request: bool = field(default=False)
+    resend_request: bool = field(default=False)
+    resend_async: bool = field(default=False)
     skip_read: bool = field(default=False)
     skip_write: bool = field(default=False)
-    validation_headers: Dict[str, str] = field(factory=dict)
+
+    # Inputs
+    _directives: CacheDirectives = field(default=None, repr=False)
+    _settings: CacheSettings = field(default=None, repr=False)
+
+    # Temporary attributes
+    _only_if_cached: bool = field(default=False, repr=False)
+    _refresh: bool = field(default=False, repr=False)
+    _request: PreparedRequest = field(default=None, repr=False)
+    _stale_if_error: Union[bool, ExpirationTime] = field(default=None, repr=False)
+    _stale_while_revalidate: Union[bool, ExpirationTime] = field(default=None, repr=False)
+    _validation_headers: Dict[str, str] = field(factory=dict, repr=False)
 
     @classmethod
     def from_request(
-        cls,
-        cache_key: str,
-        request: PreparedRequest,
-        cache_control: bool = False,
-        session_expire_after: ExpirationTime = None,
-        urls_expire_after: ExpirationPatterns = None,
-        request_expire_after: ExpirationTime = None,
-        **kwargs,
+        cls, cache_key: str, request: PreparedRequest, settings: Optional[CacheSettings] = None
     ):
-        """Initialize from request info and cache settings"""
-        directives = get_cache_directives(request.headers)
+        """Initialize from request info and cache settings.
+
+        Note on refreshing: `must-revalidate` isn't a standard request header, but is used here to
+        indicate a user-requested refresh. Typically that's only used in response headers, and
+        `max-age=0` would be used by a client to request a refresh. However, this would conflict
+        with the `expire_after` option provided in :py:meth:`.CachedSession.request`.
+
+        Args:
+            request: The outgoing request
+            settings: Session-level cache settings
+        """
+        settings = settings or CacheSettings()
+        directives = CacheDirectives.from_headers(request.headers)
         logger.debug(f'Cache directives from request headers: {directives}')
 
+        # Merge values that may come from either settings or headers
+        only_if_cached = settings.only_if_cached or directives.only_if_cached
+        refresh = directives.max_age == EXPIRE_IMMEDIATELY or directives.must_revalidate
+        stale_if_error = settings.stale_if_error or directives.stale_if_error
+        stale_while_revalidate = (
+            settings.stale_while_revalidate or directives.stale_while_revalidate
+        )
+
         # Check expiration values in order of precedence
         expire_after = coalesce(
-            directives.get('max-age'),
-            request_expire_after,
-            get_url_expiration(request.url, urls_expire_after),
-            session_expire_after,
+            directives.max_age,
+            get_url_expiration(request.url, settings.urls_expire_after),
+            settings.expire_after,
         )
 
-        # Check conditions for caching based on request headers. Also check expire_after options
-        # unless cache_control=True, in which case these may be overridden by response headers.
-        check_expiration = directives.get('max-age') if cache_control else expire_after
-        skip_write = check_expiration == DO_NOT_CACHE or 'no-store' in directives
+        # Check and log conditions for reading from the cache
+        read_criteria = {
+            'disabled cache': settings.disabled,
+            'disabled method': str(request.method) not in settings.allowable_methods,
+            'disabled by headers or refresh': directives.no_cache or directives.no_store,
+            'disabled by expiration': expire_after == DO_NOT_CACHE,
+        }
+        _log_cache_criteria('read', read_criteria)
 
-        return cls(
-            cache_control=cache_control,
+        actions = cls(
             cache_key=cache_key,
+            directives=directives,
             expire_after=expire_after,
-            request_directives=directives,
-            skip_read=skip_write or 'no-cache' in directives,
-            skip_write=skip_write,
+            only_if_cached=only_if_cached,
+            refresh=refresh,
+            request=request,
+            settings=settings,
+            skip_read=any(read_criteria.values()),
+            skip_write=directives.no_store,
+            stale_if_error=stale_if_error,
+            stale_while_revalidate=stale_while_revalidate,
         )
+        return actions
 
     @property
     def expires(self) -> Optional[datetime]:
-        """Convert the user/header-provided expiration value to a datetime"""
+        """Convert the user/header-provided expiration value to a datetime. Applies to new cached
+        responses, and previously cached responses that are being revalidated.
+        """
         return get_expiration_datetime(self.expire_after)
 
-    def update_from_cached_response(self, response: CachedResponse):
-        """Check for relevant cache headers from a cached response, and set headers for a
-        conditional request, if possible.
+    def is_usable(self, cached_response: Optional['CachedResponse'], error: bool = False):
+        """Determine whether a given cached response is "fresh enough" to satisfy the request,
+        based on:
 
-        Used after fetching a cached response, but before potentially sending a new request
-        (if expired).
+        * min-fresh
+        * max-stale
+        * stale-if-error (if an error has occured)
+        * stale-while-revalidate
         """
-        if not response or not response.is_expired:
-            return
+        if cached_response is None:
+            return False
+        elif (
+            cached_response.expires is None
+            or (cached_response.is_expired and self._stale_while_revalidate is True)
+            or (error and self._stale_if_error is True)
+        ):
+            return True
+        # Handle stale_if_error as a time value
+        elif error and self._stale_if_error:
+            offset = timedelta(seconds=get_expiration_seconds(self._stale_if_error))
+        # Handle stale_while_revalidate as a time value
+        elif cached_response.is_expired and self._stale_while_revalidate:
+            offset = timedelta(seconds=get_expiration_seconds(self._stale_while_revalidate))
+        # Handle min-fresh and max-stale
+        else:
+            offset = self._directives.get_expire_offset()
 
-        if response.headers.get('ETag'):
-            self.validation_headers['If-None-Match'] = response.headers['ETag']
-        if response.headers.get('Last-Modified'):
-            self.validation_headers['If-Modified-Since'] = response.headers['Last-Modified']
+        return datetime.utcnow() < cached_response.expires + offset
 
-    def update_from_response(self, response: Response):
-        """Update expiration + actions based on headers from a new response.
+    def update_from_cached_response(
+        self,
+        cached_response: Optional['CachedResponse'],
+        create_key: Optional[KeyCallback] = None,
+        **key_kwargs,
+    ):
+        """Determine if we can reuse a cached response, or set headers for a conditional request
+        if possible.
+
+        Used after fetching a cached response, but before potentially sending a new request.
 
-        Used after receiving a new response but before saving it to the cache.
+        Args:
+            cached_response: Cached response to examine
+            create_key: Cache key function, used for validating ``Vary`` headers
+            key_kwargs: Additional keyword arguments for ``create_key``.
         """
-        if not response or not self.cache_control:
-            return
+        usable_response = self.is_usable(cached_response)
+        usable_if_error = self.is_usable(cached_response, error=True)
+
+        # Can't satisfy the request
+        if not usable_response and self._only_if_cached and not usable_if_error:
+            self.error_504 = True
+        # Send the request for the first time
+        elif cached_response is None:
+            self.send_request = True
+        # If response contains Vary and doesn't match, consider it a cache miss
+        elif create_key and not self._validate_vary(cached_response, create_key, **key_kwargs):
+            self.send_request = True
+        # Resend the request, unless settings permit a stale response
+        elif not usable_response and not (self._only_if_cached and usable_if_error):
+            self.resend_request = True
+        # Resend the request in the background; meanwhile return stale response
+        elif cached_response.is_expired and usable_response and self._stale_while_revalidate:
+            self.resend_async = True
+
+        if cached_response is not None and not self._only_if_cached:
+            self._update_validation_headers(cached_response)
+        logger.debug(f'Post-read cache actions: {self}')
 
-        directives = get_cache_directives(response.headers)
+    def update_from_response(self, response: Response):
+        """Update expiration + actions based on headers and other details from a new response.
+
+        Used after receiving a new response, but before saving it to the cache.
+        """
+        directives = CacheDirectives.from_headers(response.headers)
+        if self._settings.cache_control:
+            self._update_from_response_headers(directives)
+
+        # If "expired" but there's a validator, save it to the cache and revalidate on use
+        skip_stale = self.expire_after == EXPIRE_IMMEDIATELY and not directives.has_validator
+        do_not_cache = self.expire_after == DO_NOT_CACHE
+
+        # Apply filter callback, if any
+        callback = self._settings.filter_fn
+        filtered_out = callback is not None and not callback(response)
+
+        # Check and log conditions for writing to the cache
+        write_criteria = {
+            'disabled cache': self._settings.disabled,
+            'disabled method': str(response.request.method) not in self._settings.allowable_methods,
+            'disabled status': response.status_code not in self._settings.allowable_codes,
+            'disabled by filter': filtered_out,
+            'disabled by headers': self.skip_write,
+            'disabled by expiration': do_not_cache or skip_stale,
+        }
+        self.skip_write = any(write_criteria.values())
+        _log_cache_criteria('write', write_criteria)
+
+    def update_request(self, request: PreparedRequest) -> PreparedRequest:
+        """Apply validation headers (if any) before sending a request"""
+        request.headers.update(self._validation_headers)
+        return request
+
+    def update_revalidated_response(
+        self, response: Response, cached_response: 'CachedResponse'
+    ) -> 'CachedResponse':
+        """After revalidation, update the cached response's expiration and headers"""
+        logger.debug(f'Response for URL {response.request.url} has not been modified')
+        cached_response.expires = self.expires
+        cached_response.headers.update(response.headers)
+        cached_response.revalidated = True
+        return cached_response
+
+    def _update_from_response_headers(self, directives: CacheDirectives):
+        """Check response headers for expiration and other cache directives"""
         logger.debug(f'Cache directives from response headers: {directives}')
 
-        # Check headers for expiration, validators, and other cache directives
-        if directives.get('immutable'):
+        self._stale_if_error = self._stale_if_error or directives.stale_if_error
+        if directives.immutable:
             self.expire_after = NEVER_EXPIRE
         else:
             self.expire_after = coalesce(
-                directives.get('max-age'), directives.get('expires'), self.expire_after
+                directives.max_age,
+                directives.expires,
+                self.expire_after,
             )
-        has_validator = response.headers.get('ETag') or response.headers.get('Last-Modified')
-        no_store = 'no-store' in directives or 'no-store' in self.request_directives
-
-        # If expiration is 0 and there's a validator, save it to the cache and revalidate on use
-        # Otherwise, skip writing to the cache if specified by expiration or other headers
-        expire_immediately = _try_int(self.expire_after) == DO_NOT_CACHE
-        self.skip_write = (expire_immediately or no_store) and not has_validator
-
-
-def get_expiration_datetime(
-    expire_after: ExpirationTime, ignore_invalid_httpdate: bool = False
-) -> Optional[datetime]:
-    """Convert an expiration value in any supported format to an absolute datetime"""
-    # Never expire
-    if expire_after is None or expire_after == NEVER_EXPIRE:
-        return None
-    # Expire immediately
-    elif _try_int(expire_after) == DO_NOT_CACHE:
-        return datetime.utcnow()
-    # Httpdate str (allowed for headers only)
-    if isinstance(expire_after, str):
-        expire_after_dt = _parse_http_date(expire_after)
-        if not expire_after_dt and not ignore_invalid_httpdate:
-            raise ValueError(f'Invalid HTTP date: {expire_after}')
-        return expire_after_dt
-    # Already a datetime
-    elif isinstance(expire_after, datetime):
-        return _to_utc(expire_after)
-
-    # Otherwise, it must be a timedelta or time in seconds
-    if not isinstance(expire_after, timedelta):
-        expire_after = timedelta(seconds=expire_after)
-    return datetime.utcnow() + expire_after
-
-
-def get_expiration_seconds(expire_after: ExpirationTime) -> int:
-    """Convert an expiration value in any supported format to an expiration time in seconds"""
-    expires = get_expiration_datetime(expire_after)
-    return ceil((expires - datetime.utcnow()).total_seconds()) if expires else NEVER_EXPIRE
-
-
-def get_cache_directives(headers: Mapping) -> Dict:
-    """Get all Cache-Control directives, and handle multiple headers and comma-separated lists"""
-    if not headers:
-        return {}
-
-    kv_directives = {}
-    if headers.get('Cache-Control'):
-        cache_control = (
-            headers['Cache-Control'].decode()
-            if isinstance(headers['Cache-Control'], bytes)
-            else headers['Cache-Control']
-        )
-        cache_directives = cache_control.split(',')
-        kv_directives = dict([_split_kv_directive(value) for value in cache_directives])
-
-    if 'Expires' in headers:
-        kv_directives['expires'] = headers['Expires']
-    return kv_directives
-
-
-def get_url_expiration(
-    url: Optional[str], urls_expire_after: ExpirationPatterns = None
-) -> ExpirationTime:
-    """Check for a matching per-URL expiration, if any"""
-    if not url:
-        return None
-
-    for pattern, expire_after in (urls_expire_after or {}).items():
-        if _url_match(url, pattern):
-            logger.debug(f'URL {url} matched pattern "{pattern}": {expire_after}')
-            return expire_after
-    return None
-
-
-def _parse_http_date(value: str) -> Optional[datetime]:
-    """Attempt to parse an HTTP (RFC 5322-compatible) timestamp"""
-    try:
-        expire_after = parsedate_to_datetime(value)
-        return _to_utc(expire_after)
-    except (TypeError, ValueError):
-        logger.debug(f'Failed to parse timestamp: {value}')
-        return None
+        self.skip_write = self.skip_write or directives.no_store
 
+    def _update_validation_headers(self, cached_response: 'CachedResponse'):
+        """If needed, get validation headers based on a cached response. Revalidation may be
+        triggered by a stale response, request headers, or cached response headers.
+        """
+        directives = CacheDirectives.from_headers(cached_response.headers)
+        # These conditions always apply
+        revalidate = directives.has_validator and (
+            cached_response.is_expired or self._refresh or self._settings.always_revalidate
+        )
+        # These conditions only apply if cache_control=True
+        cc_revalidate = self._settings.cache_control and (
+            directives.no_cache or directives.must_revalidate
+        )
 
-def _split_kv_directive(header_value: str) -> CacheDirective:
-    """Split a cache directive into a ``(header_value, int)`` key-value pair, if possible;
-    otherwise just ``(header_value, True)``.
-    """
-    header_value = header_value.strip()
-    if '=' in header_value:
-        k, v = header_value.split('=', 1)
-        return k, _try_int(v)
+        # Add the appropriate validation headers, if needed
+        if revalidate or cc_revalidate:
+            if directives.etag:
+                self._validation_headers['If-None-Match'] = directives.etag
+            if directives.last_modified:
+                self._validation_headers['If-Modified-Since'] = directives.last_modified
+            self.send_request = True
+            self.resend_request = False
+
+    def _validate_vary(
+        self, cached_response: 'CachedResponse', create_key: KeyCallback, **key_kwargs
+    ) -> bool:
+        """If the cached response contains Vary, check that the specified request headers match"""
+        vary = cached_response.headers.get('Vary')
+        if not vary:
+            return True
+        elif vary == '*':
+            return False
+
+        # Generate a secondary cache key based on Vary for both the cached request and new request
+        key_kwargs['match_headers'] = [k.strip() for k in vary.split(',')]
+        vary_cache_key = create_key(cached_response.request, **key_kwargs)
+        headers_match = create_key(self._request, **key_kwargs) == vary_cache_key
+        if not headers_match:
+            _log_vary_diff(
+                self._request.headers, cached_response.request.headers, key_kwargs['match_headers']
+            )
+        return headers_match
+
+
+def _log_vary_diff(
+    headers_1: MutableMapping[str, str], headers_2: MutableMapping[str, str], vary: List[str]
+):
+    """Log which specific headers specified by Vary did not match"""
+    if logger.level > DEBUG:
+        return
+    headers_1 = normalize_headers(headers_1)
+    headers_2 = normalize_headers(headers_2)
+    nonmatching = [k for k in vary if headers_1.get(k) != headers_2.get(k)]
+    logger.debug(f'Failed Vary check. Non-matching headers: {", ".join(nonmatching)}')
+
+
+def _log_cache_criteria(operation: str, criteria: Dict):
+    """Log details on any failed checks for cache read or write"""
+    if logger.level > DEBUG:
+        return
+    if any(criteria.values()):
+        status = ', '.join([k for k, v in criteria.items() if v])
     else:
-        return header_value, True
-
-
-def _to_utc(dt: datetime):
-    """All internal datetimes are UTC and timezone-naive. Convert any user/header-provided
-    datetimes to the same format.
-    """
-    if dt.tzinfo:
-        dt = dt.astimezone(timezone.utc)
-        dt = dt.replace(tzinfo=None)
-    return dt
-
-
-def _try_int(value: Any) -> Optional[int]:
-    """Convert a value to an int, if possible, otherwise ``None``"""
-    try:
-        return int(value)
-    except (TypeError, ValueError):
-        return None
-
-
-def _url_match(url: str, pattern: str) -> bool:
-    """Determine if a URL matches a pattern
-
-    Args:
-        url: URL to test. Its base URL (without protocol) will be used.
-        pattern: Glob pattern to match against. A recursive wildcard will be added if not present
-
-    Example:
-        >>> url_match('https://httpbin.org/delay/1', 'httpbin.org/delay')
-        True
-        >>> url_match('https://httpbin.org/stream/1', 'httpbin.org/*/1')
-        True
-        >>> url_match('https://httpbin.org/stream/2', 'httpbin.org/*/1')
-        False
-    """
-    url = url.split('://')[-1]
-    pattern = pattern.split('://')[-1].rstrip('*') + '**'
-    return fnmatch(url, pattern)
+        status = 'Passed'
+    logger.debug(f'Pre-{operation} cache checks: {status}')
diff --git a/requests_cache/policy/directives.py b/requests_cache/policy/directives.py
new file mode 100644
index 0000000..3be50c4
--- /dev/null
+++ b/requests_cache/policy/directives.py
@@ -0,0 +1,85 @@
+from datetime import timedelta
+from typing import Optional
+
+from attr import define, field
+from requests.models import CaseInsensitiveDict
+
+from .._utils import decode, get_valid_kwargs, try_int
+from ..models import RichMixin
+from . import HeaderDict, get_expiration_seconds
+
+
+@define(repr=False)
+class CacheDirectives(RichMixin):
+    """Parses Cache-Control directives and other relevant cache settings from either request or
+    response headers
+    """
+
+    expires: str = field(default=None)
+    immutable: bool = field(default=False)
+    max_age: int = field(default=None, converter=try_int)
+    max_stale: int = field(default=None, converter=try_int)
+    min_fresh: int = field(default=None, converter=try_int)
+    must_revalidate: bool = field(default=False)
+    no_cache: bool = field(default=False)
+    no_store: bool = field(default=False)
+    only_if_cached: bool = field(default=False)
+    stale_if_error: int = field(default=None, converter=try_int)
+    stale_while_revalidate: int = field(default=None, converter=try_int)
+    etag: str = field(default=None)
+    last_modified: str = field(default=None)
+
+    @classmethod
+    def from_headers(cls, headers: HeaderDict):
+        """Parse cache directives and other settings from request or response headers"""
+        headers = CaseInsensitiveDict(headers)
+        directives = decode(headers.get('Cache-Control', '')).split(',')
+        kv_directives = dict(_split_kv_directive(value) for value in directives)
+        kwargs = get_valid_kwargs(
+            cls.__init__, {k.replace('-', '_'): v for k, v in kv_directives.items()}
+        )
+
+        kwargs['expires'] = headers.get('Expires')
+        kwargs['etag'] = headers.get('ETag')
+        kwargs['last_modified'] = headers.get('Last-Modified')
+        return cls(**kwargs)
+
+    def get_expire_offset(self) -> timedelta:
+        """Return the time offset to use for expiration, if either min-fresh or max-stale is set"""
+        offset_seconds = 0
+        if self.max_stale:
+            offset_seconds = self.max_stale
+        elif self.min_fresh:
+            offset_seconds = -self.min_fresh
+        return timedelta(seconds=offset_seconds)
+
+    @property
+    def has_validator(self) -> bool:
+        return bool(self.etag or self.last_modified)
+
+
+def _split_kv_directive(directive: str):
+    """Split a cache directive into a `(key, value)` pair, or `(key, True)` if value-only"""
+    directive = directive.strip().lower()
+    return directive.split('=', 1) if '=' in directive else (directive, True)
+
+
+def set_request_headers(
+    headers: Optional[HeaderDict], expire_after, only_if_cached, refresh, force_refresh
+):
+    """Translate keyword arguments into equivalent request headers"""
+    headers = CaseInsensitiveDict(headers)
+    directives = headers['Cache-Control'].split(',') if headers.get('Cache-Control') else []
+
+    if expire_after is not None:
+        directives.append(f'max-age={get_expiration_seconds(expire_after)}')
+    if only_if_cached:
+        directives.append('only-if-cached')
+    if refresh:
+        directives.append('must-revalidate')
+    if force_refresh:
+        directives.append('no-cache')
+
+    if directives:
+        headers['Cache-Control'] = ','.join(directives)
+    return headers
diff --git a/requests_cache/policy/expiration.py b/requests_cache/policy/expiration.py
new file mode 100644
index 0000000..1350951
--- /dev/null
+++ b/requests_cache/policy/expiration.py
@@ -0,0 +1,118 @@
+"""Utility functions for parsing and converting expiration values"""
+from datetime import datetime, timedelta, timezone
+from email.utils import parsedate_to_datetime
+from fnmatch import fnmatch
+from logging import getLogger
+from math import ceil
+from typing import Optional
+from typing import Pattern as RegexPattern
+
+from .._utils import try_int
+from . import ExpirationPattern, ExpirationPatterns, ExpirationTime
+
+# Special expiration values that may be set by either headers or keyword args
+DO_NOT_CACHE = 0x0D0E0200020704  # Per RFC 4824
+EXPIRE_IMMEDIATELY = 0
+NEVER_EXPIRE = -1
+
+logger = getLogger(__name__)
+
+
+def get_expiration_datetime(
+    expire_after: ExpirationTime,
+    start_time: Optional[datetime] = None,
+    negative_delta: bool = False,
+    ignore_invalid_httpdate: bool = False,
+) -> Optional[datetime]:
+    """Convert an expiration value in any supported format to an absolute datetime"""
+    # Never expire (or do not cache, in which case expiration won't be used)
+    if expire_after is None or expire_after in [NEVER_EXPIRE, DO_NOT_CACHE]:
+        return None
+    # Expire immediately
+    elif try_int(expire_after) == EXPIRE_IMMEDIATELY:
+        return start_time or datetime.utcnow()
+    # Already a datetime or httpdate str (allowed for headers only)
+    if isinstance(expire_after, str):
+        expire_after_dt = _parse_http_date(expire_after)
+        if not expire_after_dt and not ignore_invalid_httpdate:
+            raise ValueError(f'Invalid HTTP date: {expire_after}')
+        return expire_after_dt
+    elif isinstance(expire_after, datetime):
+        return _to_utc(expire_after)
+
+    # Otherwise, it must be a timedelta or time in seconds
+    if not isinstance(expire_after, timedelta):
+        expire_after = timedelta(seconds=expire_after)
+    if negative_delta:
+        expire_after = -expire_after
+    return (start_time or datetime.utcnow()) + expire_after
+
+
+def get_expiration_seconds(expire_after: ExpirationTime) -> int:
+    """Convert an expiration value in any supported format to an expiration time in seconds"""
+    if expire_after == DO_NOT_CACHE:
+        return DO_NOT_CACHE
+    expires = get_expiration_datetime(expire_after, ignore_invalid_httpdate=True)
+    return ceil((expires - datetime.utcnow()).total_seconds()) if expires else NEVER_EXPIRE
+
+
+def get_url_expiration(
+    url: Optional[str], urls_expire_after: Optional[ExpirationPatterns] = None
+) -> ExpirationTime:
+    """Check for a matching per-URL expiration, if any"""
+    if not url:
+        return None
+
+    for pattern, expire_after in (urls_expire_after or {}).items():
+        if _url_match(url, pattern):
+            logger.debug(f'URL {url} matched pattern "{pattern}": {expire_after}')
+            return expire_after
+    return None
+
+
+def _parse_http_date(value: str) -> Optional[datetime]:
+    """Attempt to parse an HTTP (RFC 5322-compatible) timestamp"""
+    try:
+        expire_after = parsedate_to_datetime(value)
+        return _to_utc(expire_after)
+    except (TypeError, ValueError):
+        logger.debug(f'Failed to parse timestamp: {value}')
+        return None
+
+
+def _to_utc(dt: datetime):
+    """All internal datetimes are UTC and timezone-naive. Convert any user/header-provided
+    datetimes to the same format.
+    """
+    if dt.tzinfo:
+        dt = dt.astimezone(timezone.utc)
+        dt = dt.replace(tzinfo=None)
+    return dt
+
+
+def _url_match(url: str, pattern: ExpirationPattern) -> bool:
+    """Determine if a URL matches a pattern
+
+    Args:
+        url: URL to test. Its base URL (without protocol) will be used.
+        pattern: Glob pattern to match against. A recursive wildcard will be added if not present
+
+    Example:
+        >>> url_match('https://httpbin.org/delay/1', 'httpbin.org/delay')
+        True
+        >>> url_match('https://httpbin.org/stream/1', 'httpbin.org/*/1')
+        True
+        >>> url_match('https://httpbin.org/stream/2', 'httpbin.org/*/1')
+        False
+        >>> url_match('https://httpbin.org/stream/2', re.compile('httpbin.org/*/\\d+'))
+        True
+        >>> url_match('https://httpbin.org/stream/x', re.compile('httpbin.org/*/\\d+'))
+        False
+    """
+    if isinstance(pattern, RegexPattern):
+        match = pattern.search(url)
+        return match is not None
+    else:
+        url = url.split('://')[-1]
+        pattern = pattern.split('://')[-1].rstrip('*') + '**'
+        return fnmatch(url, pattern)
diff --git a/requests_cache/policy/settings.py b/requests_cache/policy/settings.py
new file mode 100644
index 0000000..7c4dce8
--- /dev/null
+++ b/requests_cache/policy/settings.py
@@ -0,0 +1,58 @@
+from typing import Dict, Iterable, Union
+
+from attr import define, field
+
+from .._utils import get_valid_kwargs
+from ..models import RichMixin
+from . import ExpirationPattern, ExpirationTime, FilterCallback, KeyCallback
+
+ALL_METHODS = ('GET', 'HEAD', 'OPTIONS', 'POST', 'PUT', 'PATCH', 'DELETE')
+DEFAULT_CACHE_NAME = 'http_cache'
+DEFAULT_METHODS = ('GET', 'HEAD')
+DEFAULT_STATUS_CODES = (200,)
+
+# Default params and/or headers that are excluded from cache keys and redacted from cached responses
+DEFAULT_IGNORED_PARAMS = ('Authorization', 'X-API-KEY', 'access_token', 'api_key')
+
+
+@define(repr=False)
+class CacheSettings(RichMixin):
+    """Class used internally to store settings that affect caching behavior. This allows settings
+    to be used across multiple modules, but exposed to the user in a single property
+    (:py:attr:`.CachedSession.settings`). These values can safely be modified after initialization.
+    See :py:class:`.CachedSession` and :ref:`user-guide` for usage details.
+    """
+
+    allowable_codes: Iterable[int] = field(default=DEFAULT_STATUS_CODES)
+    allowable_methods: Iterable[str] = field(default=DEFAULT_METHODS)
+    always_revalidate: bool = field(default=None)
+    cache_control: bool = field(default=False)
+    disabled: bool = field(default=False)
+    expire_after: ExpirationTime = field(default=None)
+    filter_fn: FilterCallback = field(default=None)
+    ignored_parameters: Iterable[str] = field(default=DEFAULT_IGNORED_PARAMS)
+    key_fn: KeyCallback = field(default=None)
+    match_headers: Union[Iterable[str], bool] = field(default=False)
+    only_if_cached: bool = field(default=False)
+    stale_if_error: Union[bool, ExpirationTime] = field(default=False)
+    stale_while_revalidate: Union[bool, ExpirationTime] = field(default=False)
+    urls_expire_after: Dict[ExpirationPattern, ExpirationTime] = field(factory=dict)
+
+    @classmethod
+    def from_kwargs(cls, **kwargs):
+        """Constructor with some additional steps:
+
+        * Handle some deprecated argument names
+        * Ignore invalid settings, for easier initialization from mixed ``**kwargs``
+        """
+        kwargs = cls._rename_kwargs(kwargs)
+        kwargs = get_valid_kwargs(cls.__init__, kwargs)
+        return cls(**kwargs)
+
+    @staticmethod
+    def _rename_kwargs(kwargs):
+        if 'old_data_on_error' in kwargs:
+            kwargs['stale_if_error'] = kwargs.pop('old_data_on_error')
+        if 'include_get_headers' in kwargs:
+            kwargs['match_headers'] = kwargs.pop('include_get_headers')
+        return kwargs
diff --git a/requests_cache/serializers/__init__.py b/requests_cache/serializers/__init__.py
index 08ac11c..c477787 100644
--- a/requests_cache/serializers/__init__.py
+++ b/requests_cache/serializers/__init__.py
@@ -1,11 +1,33 @@
 """Response serialization utilities. See :ref:`serializers` for general usage info.
+
+**Summary:**
+
+The ``cattrs`` library includes a number of `pre-configured converters
+<https://cattrs.readthedocs.io/en/latest/preconf.html>`_ that perform some pre-serialization steps
+required for specific serialization formats.
+
+The module :py:mod:`requests_cache.serializers.preconf` wraps those converters as serializer
+:py:class:`.Stage` objects, which are then combined into a :py:class:`.SerializerPipeline`. Preconf
+converters run after the base converter and before the format's ``dumps()`` (or equivalent) method.
+For example, for JSON:
+
+* Run base converter (:py:class:`.CattrStage`) to convert :py:class:`.CachedResponse` to a dict
+* Run json prconf converter to convert binary response body to base84
+* Run ``json.dumps()``
+
+For any optional libraries that aren't installed, the corresponding serializer will be a placeholder
+class that raises an ``ImportError`` at initialization time instead of at import time.
 """
 # flake8: noqa: F401
+from typing import Optional, Union
+
 from .cattrs import CattrStage
 from .pipeline import SerializerPipeline, Stage
 from .preconf import (
+    bson_document_serializer,
     bson_serializer,
     dict_serializer,
+    dynamodb_document_serializer,
     json_serializer,
     pickle_serializer,
     safe_pickle_serializer,
@@ -17,13 +39,17 @@ __all__ = [
     'SERIALIZERS',
     'CattrStage',
     'SerializerPipeline',
+    'SerializerType',
     'Stage',
+    'init_serializer',
     'bson_serializer',
+    'bson_document_serializer',
+    'dynamodb_document_serializer',
+    'dict_serializer',
     'json_serializer',
     'pickle_serializer',
     'safe_pickle_serializer',
     'yaml_serializer',
-    'init_serializer',
     'utf8_encoder',
 ]
 
@@ -34,10 +60,23 @@ SERIALIZERS = {
     'yaml': yaml_serializer,
 }
 
+SerializerType = Union[str, SerializerPipeline, Stage]
+
 
-def init_serializer(serializer=None, **kwargs):
-    """Initialize a serializer from a name, class, or instance"""
-    serializer = serializer or 'pickle'
+def init_serializer(
+    serializer: Optional[SerializerType], decode_content: bool
+) -> Optional[SerializerPipeline]:
+    """Intitialze a serializer by name or instance"""
+    if not serializer:
+        return None
+
+    # Look up a serializer by name, if needed
     if isinstance(serializer, str):
         serializer = SERIALIZERS[serializer]
+
+    # Wrap in a SerializerPipeline, if needed
+    if not isinstance(serializer, SerializerPipeline):
+        serializer = SerializerPipeline([serializer], name=str(serializer))
+    serializer.set_decode_content(decode_content)
+
     return serializer
diff --git a/requests_cache/serializers/cattrs.py b/requests_cache/serializers/cattrs.py
index b880512..bf45c2d 100644
--- a/requests_cache/serializers/cattrs.py
+++ b/requests_cache/serializers/cattrs.py
@@ -1,7 +1,7 @@
 """
 Utilities to break down :py:class:`.CachedResponse` objects into a dict of python builtin types
-using `cattrs <https://cattrs.readthedocs.io>`_. This does the majority of the work needed for any
-serialization format.
+using `cattrs <https://cattrs.readthedocs.io>`_. This does the majority of the work needed for all
+serialization formats.
 
 .. automodsumm:: requests_cache.serializers.cattrs
    :classes-only:
@@ -12,38 +12,77 @@ serialization format.
    :nosignatures:
 """
 from datetime import datetime, timedelta
-from typing import Callable, Dict, ForwardRef, MutableMapping
+from decimal import Decimal
+from json import JSONDecodeError
+from typing import Callable, Dict, ForwardRef, MutableMapping, Optional
 
 from cattr import Converter
 from requests.cookies import RequestsCookieJar, cookiejar_from_dict
+from requests.exceptions import RequestException
 from requests.structures import CaseInsensitiveDict
-from urllib3._collections import HTTPHeaderDict
 
-from ..models import CachedResponse
+from ..models import CachedResponse, DecodedContent
 from .pipeline import Stage
 
+try:
+    import ujson as json
+except ImportError:
+    import json  # type: ignore
+
 
 class CattrStage(Stage):
     """Base serializer class that does pre/post-processing with  ``cattrs``. This can be used either
     on its own, or as a stage within a :py:class:`.SerializerPipeline`.
+
+    Args:
+        factory: A callable that returns a ``cattrs`` converter to start from instead of a new
+            ``Converter``. Mainly useful for preconf converters.
+        decode_content: Save response body in human-readable format, if possible
+
+    Notes on ``decode_content`` option:
+
+    * Response body will be decoded into a human-readable format (if possible) during serialization,
+      and re-encoded during deserialization to reconstruct the original response.
+    * Supported  Content-Types are ``application/json`` and ``text/*``. All other types will be saved as-is.
+    * Decoded responses are saved in a separate ``_decoded_content`` attribute, to ensure that
+      ``_content`` is always binary.
+    * This is the default behavior for Filesystem, DynamoDB, and MongoDB backends.
     """
 
-    def __init__(self, factory: Callable[..., Converter] = None):
-        self.converter = init_converter(factory)
+    def __init__(
+        self,
+        factory: Optional[Callable[..., Converter]] = None,
+        decode_content: bool = False,
+        **kwargs
+    ):
+        self.converter = init_converter(factory, **kwargs)
+        self.decode_content = decode_content
 
     def dumps(self, value: CachedResponse) -> Dict:
         if not isinstance(value, CachedResponse):
             return value
-        return self.converter.unstructure(value)
+        response_dict = self.converter.unstructure(value)
+        return _decode_content(value, response_dict) if self.decode_content else response_dict
 
     def loads(self, value: Dict) -> CachedResponse:
         if not isinstance(value, MutableMapping):
             return value
-        return self.converter.structure(value, cl=CachedResponse)
+        return _encode_content(self.converter.structure(value, cl=CachedResponse))
+
 
+def init_converter(
+    factory: Optional[Callable[..., Converter]] = None,
+    convert_datetime: bool = True,
+    convert_timedelta: bool = True,
+) -> Converter:
+    """Make a converter to structure and unstructure nested objects within a
+    :py:class:`.CachedResponse`
 
-def init_converter(factory: Callable[..., Converter] = None):
-    """Make a converter to structure and unstructure nested objects within a :py:class:`.CachedResponse`"""
+    Args:
+        factory: An optional factory function that returns a ``cattrs`` converter
+        convert_datetime: May be set to ``False`` for pre-configured converters that already have
+            datetime support
+    """
     factory = factory or Converter
     try:
         converter = factory(omit_if_default=True)
@@ -52,24 +91,34 @@ def init_converter(factory: Callable[..., Converter] = None):
         converter = factory()
 
     # Convert datetimes to and from iso-formatted strings
-    converter.register_unstructure_hook(datetime, lambda obj: obj.isoformat() if obj else None)  # type: ignore
-    converter.register_structure_hook(datetime, _to_datetime)
+    if convert_datetime:
+        converter.register_unstructure_hook(datetime, lambda obj: obj.isoformat() if obj else None)
+        converter.register_structure_hook(datetime, _to_datetime)
 
     # Convert timedeltas to and from float values in seconds
-    converter.register_unstructure_hook(timedelta, lambda obj: obj.total_seconds() if obj else None)  # type: ignore
-    converter.register_structure_hook(timedelta, _to_timedelta)
+    if convert_timedelta:
+        converter.register_unstructure_hook(
+            timedelta, lambda obj: obj.total_seconds() if obj else None
+        )
+        converter.register_structure_hook(timedelta, _to_timedelta)
 
     # Convert dict-like objects to and from plain dicts
-    converter.register_unstructure_hook(RequestsCookieJar, lambda obj: dict(obj.items()))  # type: ignore
+    converter.register_unstructure_hook(RequestsCookieJar, lambda obj: dict(obj.items()))
     converter.register_structure_hook(RequestsCookieJar, lambda obj, cls: cookiejar_from_dict(obj))
     converter.register_unstructure_hook(CaseInsensitiveDict, dict)
     converter.register_structure_hook(
         CaseInsensitiveDict, lambda obj, cls: CaseInsensitiveDict(obj)
     )
-    converter.register_unstructure_hook(HTTPHeaderDict, dict)
-    converter.register_structure_hook(HTTPHeaderDict, lambda obj, cls: HTTPHeaderDict(obj))
+    # Convert decoded JSON body back to string
+    converter.register_structure_hook(
+        DecodedContent, lambda obj, cls: json.dumps(obj) if isinstance(obj, dict) else obj
+    )
 
-    # Tell cattrs to resolve forward references (required for CachedResponse.history)
+    # Resolve forward references (required for CachedResponse.history)
+    converter.register_unstructure_hook_func(
+        lambda cls: cls.__class__ is ForwardRef,
+        lambda obj, cls=None: converter.unstructure(obj, cls.__forward_value__ if cls else None),
+    )
     converter.register_structure_hook_func(
         lambda cls: cls.__class__ is ForwardRef,
         lambda obj, cls: converter.structure(obj, cls.__forward_value__),
@@ -78,6 +127,45 @@ def init_converter(factory: Callable[..., Converter] = None):
     return converter
 
 
+def make_decimal_timedelta_converter(**kwargs) -> Converter:
+    """Make a converter that uses Decimals instead of floats to represent timedelta objects"""
+    converter = Converter(**kwargs)
+    converter.register_unstructure_hook(
+        timedelta, lambda obj: Decimal(str(obj.total_seconds())) if obj else None
+    )
+    converter.register_structure_hook(timedelta, _to_timedelta)
+    return converter
+
+
+def _decode_content(response: CachedResponse, response_dict: Dict) -> Dict:
+    """Decode response body into a human-readable format, if possible"""
+    # Decode body as JSON
+    if response.headers.get('Content-Type') == 'application/json':
+        try:
+            response_dict['_decoded_content'] = response.json()
+            response_dict.pop('_content', None)
+        except (JSONDecodeError, RequestException):
+            pass
+
+    # Decode body as text
+    if response.headers.get('Content-Type', '').startswith('text/'):
+        response_dict['_decoded_content'] = response.text
+        response_dict.pop('_content', None)
+
+    # Otherwise, it is most likely a binary body
+    return response_dict
+
+
+def _encode_content(response: CachedResponse) -> CachedResponse:
+    """Re-encode response body if saved as JSON or text; has no effect for a binary response body"""
+    if isinstance(response._decoded_content, str):
+        response._content = response._decoded_content.encode('utf-8')
+        response._decoded_content = None
+        response.encoding = 'utf-8'  # Set encoding explicitly so requests doesn't have to guess
+        response.headers['Content-Length'] = str(len(response._content))  # Size may have changed
+    return response
+
+
 def _to_datetime(obj, cls) -> datetime:
     if isinstance(obj, str):
         obj = datetime.fromisoformat(obj)
@@ -87,4 +175,6 @@ def _to_datetime(obj, cls) -> datetime:
 def _to_timedelta(obj, cls) -> timedelta:
     if isinstance(obj, (int, float)):
         obj = timedelta(seconds=obj)
+    elif isinstance(obj, Decimal):
+        obj = timedelta(seconds=float(obj))
     return obj
diff --git a/requests_cache/serializers/pipeline.py b/requests_cache/serializers/pipeline.py
index 2a22761..320e498 100644
--- a/requests_cache/serializers/pipeline.py
+++ b/requests_cache/serializers/pipeline.py
@@ -1,15 +1,17 @@
-"""
+"""Classes for building complex serializers from a sequence of stages.
+
 .. automodsumm:: requests_cache.serializers.pipeline
    :classes-only:
    :nosignatures:
 """
-from typing import Any, Callable, Sequence, Union
+from typing import Any, Callable, Optional, Sequence, Union
 
 from ..models import CachedResponse
 
 
 class Stage:
-    """Generic class to wrap serialization steps with consistent ``dumps()`` and ``loads()`` methods
+    """A single stage in a serializer pipeline. This wraps serialization steps with consistent
+    ``dumps()`` and ``loads()`` methods
 
     Args:
         obj: Serializer object or module, if applicable
@@ -31,17 +33,22 @@ class Stage:
 class SerializerPipeline:
     """A pipeline of stages chained together to serialize and deserialize response objects.
 
+    Note: Typically, the first stage should be a :py:class:`.CattrStage`, since this does the
+    majority of the non-format-specific work to unstructure a response object into a dict (and
+    vice versa).
+
     Args:
         stages: A sequence of :py:class:`Stage` objects, or any objects with ``dumps()`` and
             ``loads()`` methods
         is_binary: Indicates whether the serialized content is binary
     """
 
-    def __init__(self, stages: Sequence, is_binary: bool = False):
+    def __init__(self, stages: Sequence, name: Optional[str] = None, is_binary: bool = False):
         self.is_binary = is_binary
         self.stages = stages
         self.dump_stages = [stage.dumps for stage in stages]
         self.load_stages = [stage.loads for stage in reversed(stages)]
+        self.name = name
 
     def dumps(self, value) -> Union[str, bytes]:
         for step in self.dump_stages:
@@ -52,3 +59,12 @@ class SerializerPipeline:
         for step in self.load_stages:
             value = step(value)
         return value
+
+    def set_decode_content(self, decode_content: bool):
+        """Set decode_content, if the pipeline contains a CattrStage or compatible object"""
+        for stage in self.stages:
+            if hasattr(stage, 'decode_content'):
+                stage.decode_content = decode_content
+
+    def __str__(self) -> str:
+        return f'SerializerPipeline(name={self.name}, n_stages={len(self.dump_stages)})'
diff --git a/requests_cache/serializers/preconf.py b/requests_cache/serializers/preconf.py
index 73bc838..a5516b5 100644
--- a/requests_cache/serializers/preconf.py
+++ b/requests_cache/serializers/preconf.py
@@ -1,26 +1,15 @@
 # flake8: noqa: F841
-"""The ``cattrs`` library includes a number of `pre-configured converters
-<https://cattrs.readthedocs.io/en/latest/preconf.html>`_ that perform some pre-serialization steps
-required for specific serialization formats.
-
-This module wraps those converters as serializer :py:class:`.Stage` objects. These are then used as
-stages in a :py:class:`.SerializerPipeline`, which runs after the base converter and before the
-format's ``dumps()`` (or equivalent) method.
-
-For any optional libraries that aren't installed, the corresponding serializer will be a placeholder
-class that raises an ``ImportError`` at initialization time instead of at import time.
+"""Stages and serializers for supported serialization formats.
 
 .. automodsumm:: requests_cache.serializers.preconf
    :nosignatures:
 """
 import pickle
-from datetime import timedelta
-from decimal import Decimal
 from functools import partial
 from importlib import import_module
 
 from .._utils import get_placeholder_class
-from .cattrs import CattrStage
+from .cattrs import CattrStage, make_decimal_timedelta_converter
 from .pipeline import SerializerPipeline, Stage
 
 
@@ -36,7 +25,9 @@ def make_stage(preconf_module: str, **kwargs):
 # Pre-serialization stages
 base_stage = CattrStage()  #: Base stage for all serializer pipelines
 utf8_encoder = Stage(dumps=str.encode, loads=lambda x: x.decode())  #: Encode to bytes
-bson_preconf_stage = make_stage('cattr.preconf.bson')  #: Pre-serialization steps for BSON
+bson_preconf_stage = make_stage(
+    'cattr.preconf.bson', convert_datetime=False
+)  #: Pre-serialization steps for BSON
 json_preconf_stage = make_stage('cattr.preconf.json')  #: Pre-serialization steps for JSON
 msgpack_preconf_stage = make_stage('cattr.preconf.msgpack')  #: Pre-serialization steps for msgpack
 orjson_preconf_stage = make_stage('cattr.preconf.orjson')  #: Pre-serialization steps for orjson
@@ -46,12 +37,13 @@ yaml_preconf_stage = make_stage('cattr.preconf.pyyaml')  #: Pre-serialization st
 
 # Basic serializers with no additional dependencies
 dict_serializer = SerializerPipeline(
-    [base_stage], is_binary=False
+    [base_stage], name='dict', is_binary=False
 )  #: Partial serializer that unstructures responses into dicts
 pickle_serializer = SerializerPipeline(
-    [base_stage, Stage(pickle)], is_binary=True
+    [base_stage, Stage(pickle)], name='pickle', is_binary=True
 )  #: Pickle serializer
 
+
 # Safe pickle serializer
 def signer_stage(secret_key=None, salt='requests-cache') -> Stage:
     """Create a stage that uses ``itsdangerous`` to add a signature to responses on write, and
@@ -73,6 +65,7 @@ def safe_pickle_serializer(secret_key=None, salt='requests-cache', **kwargs) ->
     """
     return SerializerPipeline(
         [base_stage, Stage(pickle), signer_stage(secret_key, salt)],
+        name='safe_pickle',
         is_binary=True,
     )
 
@@ -84,7 +77,7 @@ except ImportError as e:
     safe_pickle_serializer = get_placeholder_class(e)
 
 
-# BSON serializer
+# BSON/MongoDB document serializers
 def _get_bson_functions():
     """Handle different function names between pymongo's bson and standalone bson"""
     try:
@@ -100,10 +93,17 @@ try:
 
     bson_serializer = SerializerPipeline(
         [bson_preconf_stage, Stage(bson, **_get_bson_functions())],
+        name='bson',
         is_binary=True,
     )  #: Complete BSON serializer; uses pymongo's ``bson`` if installed, otherwise standalone ``bson`` codec
+    bson_document_serializer = SerializerPipeline(
+        [bson_preconf_stage],
+        name='bson_document',
+        is_binary=False,
+    )  #: BSON partial serializer that produces a MongoDB-compatible document
 except ImportError as e:
     bson_serializer = get_placeholder_class(e)
+    bson_document_serializer = get_placeholder_class(e)
 
 
 # JSON serializer
@@ -119,6 +119,7 @@ except ImportError:
 _json_stage = Stage(dumps=partial(json.dumps, indent=2), loads=json.loads)
 json_serializer = SerializerPipeline(
     [_json_preconf_stage, _json_stage],
+    name='json',
     is_binary=False,
 )  #: Complete JSON serializer; uses ultrajson if available
 
@@ -130,7 +131,19 @@ try:
     _yaml_stage = Stage(yaml, loads='safe_load', dumps='safe_dump')
     yaml_serializer = SerializerPipeline(
         [yaml_preconf_stage, _yaml_stage],
+        name='yaml',
         is_binary=False,
     )  #: Complete YAML serializer
 except ImportError as e:
     yaml_serializer = get_placeholder_class(e)
+
+
+# DynamoDB document serializer
+dynamodb_preconf_stage = CattrStage(
+    factory=make_decimal_timedelta_converter, convert_timedelta=False
+)  #: Pre-serialization steps for DynamoDB
+dynamodb_document_serializer = SerializerPipeline(
+    [dynamodb_preconf_stage],
+    name='dynamodb_document',
+    is_binary=False,
+)  #: DynamoDB-compatible document serializer
diff --git a/requests_cache/session.py b/requests_cache/session.py
index 4c33e7e..c2eaa74 100644
--- a/requests_cache/session.py
+++ b/requests_cache/session.py
@@ -1,43 +1,40 @@
-"""Main classes to add caching features to ``requests.Session``
-
-.. autosummary::
-   :nosignatures:
-
-   CachedSession
-   CacheMixin
-
-.. Explicitly show inherited method docs on CachedSession instead of CachedMixin
-.. autoclass:: requests_cache.session.CachedSession
-    :show-inheritance:
-    :inherited-members:
-
-.. autoclass:: requests_cache.session.CacheMixin
-"""
-from contextlib import contextmanager
+"""Main classes to add caching features to :py:class:`requests.Session`"""
+from contextlib import contextmanager, nullcontext
 from logging import getLogger
-from threading import RLock
-from typing import TYPE_CHECKING, Callable, Dict, Iterable, Optional, Union
+from threading import RLock, Thread
+from typing import TYPE_CHECKING, Iterable, MutableMapping, Optional, Union
 
-from requests import PreparedRequest, Response
+from requests import PreparedRequest
 from requests import Session as OriginalSession
 from requests.hooks import dispatch_hook
 from urllib3 import filepost
 
 from ._utils import get_valid_kwargs
-from .backends import KEY_FN, BackendSpecifier, init_backend
-from .models import AnyResponse, CachedResponse, set_response_defaults
-from .policy import CacheActions, ExpirationTime, get_expiration_seconds
-
-__all__ = ['ALL_METHODS', 'CachedSession', 'CacheMixin']
-ALL_METHODS = ['GET', 'HEAD', 'OPTIONS', 'POST', 'PUT', 'PATCH', 'DELETE']
-FILTER_FN = Callable[[AnyResponse], bool]
-
-logger = getLogger(__name__)
+from .backends import BackendSpecifier, init_backend
+from .models import AnyResponse, CachedResponse, OriginalResponse
+from .policy import (
+    DEFAULT_CACHE_NAME,
+    DEFAULT_IGNORED_PARAMS,
+    DEFAULT_METHODS,
+    DEFAULT_STATUS_CODES,
+    CacheActions,
+    CacheSettings,
+    ExpirationPatterns,
+    ExpirationTime,
+    FilterCallback,
+    KeyCallback,
+    set_request_headers,
+)
+from .serializers import SerializerType
+
+__all__ = ['CachedSession', 'CacheMixin']
 if TYPE_CHECKING:
     MIXIN_BASE = OriginalSession
 else:
     MIXIN_BASE = object
 
+logger = getLogger(__name__)
+
 
 class CacheMixin(MIXIN_BASE):
     """Mixin class that extends :py:class:`requests.Session` with caching features.
@@ -46,156 +43,202 @@ class CacheMixin(MIXIN_BASE):
 
     def __init__(
         self,
-        cache_name: str = 'http_cache',
-        backend: BackendSpecifier = None,
+        cache_name: str = DEFAULT_CACHE_NAME,
+        backend: Optional[BackendSpecifier] = None,
+        serializer: Optional[SerializerType] = None,
         expire_after: ExpirationTime = -1,
-        urls_expire_after: Dict[str, ExpirationTime] = None,
+        urls_expire_after: Optional[ExpirationPatterns] = None,
         cache_control: bool = False,
-        allowable_codes: Iterable[int] = (200,),
-        allowable_methods: Iterable[str] = ('GET', 'HEAD'),
-        filter_fn: FILTER_FN = None,
-        stale_if_error: bool = False,
+        allowable_codes: Iterable[int] = DEFAULT_STATUS_CODES,
+        allowable_methods: Iterable[str] = DEFAULT_METHODS,
+        always_revalidate: bool = False,
+        ignored_parameters: Iterable[str] = DEFAULT_IGNORED_PARAMS,
+        match_headers: Union[Iterable[str], bool] = False,
+        filter_fn: Optional[FilterCallback] = None,
+        key_fn: Optional[KeyCallback] = None,
+        stale_if_error: Union[bool, int] = False,
         **kwargs,
     ):
-        self.cache = init_backend(cache_name, backend, **kwargs)
-        self.allowable_codes = allowable_codes
-        self.allowable_methods = allowable_methods
-        self.expire_after = expire_after
-        self.urls_expire_after = urls_expire_after
-        self.cache_control = cache_control
-        self.filter_fn = filter_fn or (lambda r: True)
-        self.stale_if_error = stale_if_error or kwargs.pop('old_data_on_error', False)
-
-        self._disabled = False
+        self.cache = init_backend(cache_name, backend, serializer=serializer, **kwargs)
+        self.settings = CacheSettings.from_kwargs(
+            expire_after=expire_after,
+            urls_expire_after=urls_expire_after,
+            cache_control=cache_control,
+            allowable_codes=allowable_codes,
+            allowable_methods=allowable_methods,
+            always_revalidate=always_revalidate,
+            ignored_parameters=ignored_parameters,
+            match_headers=match_headers,
+            filter_fn=filter_fn,
+            key_fn=key_fn,
+            stale_if_error=stale_if_error,
+            **kwargs,
+        )
         self._lock = RLock()
 
-        # If the superclass is custom Session, pass along any valid kwargs
-        session_kwargs = get_valid_kwargs(super().__init__, kwargs)
-        super().__init__(**session_kwargs)  # type: ignore
+        # If the mixin superclass is a custom Session, pass along any valid kwargs
+        super().__init__(**get_valid_kwargs(super().__init__, kwargs))  # type: ignore
+
+    @property
+    def settings(self) -> CacheSettings:
+        """Settings that affect cache behavior"""
+        return self.cache._settings
+
+    @settings.setter
+    def settings(self, value: CacheSettings):
+        self.cache._settings = value
+
+    # For backwards-compatibility
+    @property
+    def expire_after(self) -> ExpirationTime:
+        return self.settings.expire_after
+
+    @expire_after.setter
+    def expire_after(self, value: ExpirationTime):
+        self.settings.expire_after = value
+
+    # Wrapper methods to add return type hints
+    def get(self, url: str, params=None, **kwargs) -> AnyResponse:  # type: ignore
+        kwargs.setdefault('allow_redirects', True)
+        return self.request('GET', url, params=params, **kwargs)
+
+    def options(self, url: str, **kwargs) -> AnyResponse:  # type: ignore
+        kwargs.setdefault('allow_redirects', True)
+        return self.request('OPTIONS', url, **kwargs)
+
+    def head(self, url: str, **kwargs) -> AnyResponse:  # type: ignore
+        kwargs.setdefault('allow_redirects', False)
+        return self.request('HEAD', url, **kwargs)
 
-    def request(  # type: ignore  # Note: An extra param (expire_after) is added here
+    def post(self, url: str, data=None, **kwargs) -> AnyResponse:  # type: ignore
+        return self.request('POST', url, data=data, **kwargs)
+
+    def put(self, url: str, data=None, **kwargs) -> AnyResponse:  # type: ignore
+        return self.request('PUT', url, data=data, **kwargs)
+
+    def patch(self, url: str, data=None, **kwargs) -> AnyResponse:  # type: ignore
+        return self.request('PATCH', url, data=data, **kwargs)
+
+    def delete(self, url: str, **kwargs) -> AnyResponse:  # type: ignore
+        return self.request('DELETE', url, **kwargs)
+
+    def request(  # type: ignore
         self,
         method: str,
         url: str,
         *args,
+        headers: Optional[MutableMapping[str, str]] = None,
         expire_after: ExpirationTime = None,
+        only_if_cached: bool = False,
+        refresh: bool = False,
+        force_refresh: bool = False,
         **kwargs,
     ) -> AnyResponse:
         """This method prepares and sends a request while automatically performing any necessary
         caching operations. This will be called by any other method-specific ``requests`` functions
-        (get, post, etc.). This does not include prepared requests, which will still be cached via
-        ``send()``.
+        (get, post, etc.). This is not used by :py:class:`~requests.PreparedRequest` objects, which
+        are handled by :py:meth:`send()`.
 
-        See :py:meth:`requests.Session.request` for parameters. Additional parameters:
+        See :py:meth:`requests.Session.request` for base parameters. Additional parameters:
 
         Args:
-            expire_after: Expiration time to set only for this request; see details below.
-                Overrides ``CachedSession.expire_after``. Accepts all the same values as
-                ``CachedSession.expire_after``. Use ``-1`` to disable expiration.
+            expire_after: Expiration time to set only for this request. See :ref:`expiration` for
+                details.
+            only_if_cached: Only return results from the cache. If not cached, return a 504 response
+                instead of sending a new request.
+            refresh: Revalidate with the server before using a cached response, and refresh if needed
+                (e.g., a "soft refresh," like F5 in a browser)
+            force_refresh: Always make a new request, and overwrite any previously cached response
+                (e.g., a "hard refresh", like Ctrl-F5 in a browser))
 
         Returns:
             Either a new or cached response
+        """
+        headers = set_request_headers(headers, expire_after, only_if_cached, refresh, force_refresh)
+        with patch_form_boundary() if kwargs.get('files') else nullcontext():
+            return super().request(method, url, *args, headers=headers, **kwargs)  # type: ignore
+
+    def send(
+        self,
+        request: PreparedRequest,
+        expire_after: ExpirationTime = None,
+        only_if_cached: bool = False,
+        refresh: bool = False,
+        force_refresh: bool = False,
+        **kwargs,
+    ) -> AnyResponse:
+        """Send a prepared request, with caching. See :py:meth:`requests.Session.send` for base
+        parameters, and see :py:meth:`.request` for extra parameters.
 
         **Order of operations:** For reference, a request will pass through the following methods:
 
-        1. :py:func:`requests.get`/:py:meth:`requests.Session.get` or other method-specific functions (optional)
+        1. :py:func:`requests.get`, :py:meth:`CachedSession.get`, etc. (optional)
         2. :py:meth:`.CachedSession.request`
         3. :py:meth:`requests.Session.request`
         4. :py:meth:`.CachedSession.send`
         5. :py:meth:`.BaseCache.get_response`
-        6. :py:meth:`requests.Session.send` (if not previously cached)
-        7. :py:meth:`.BaseCache.save_response` (if not previously cached)
-        """
-        # If present, set per-request expiration as a request header, to be handled in send()
-        if expire_after is not None:
-            kwargs.setdefault('headers', {})
-            kwargs['headers']['Cache-Control'] = f'max-age={get_expiration_seconds(expire_after)}'
-
-        with patch_form_boundary(**kwargs):
-            return super().request(method, url, *args, **kwargs)
-
-    def send(
-        self, request: PreparedRequest, expire_after: ExpirationTime = None, **kwargs
-    ) -> AnyResponse:
-        """Send a prepared request, with caching. See :py:meth:`.request` for notes on behavior, and
-        see :py:meth:`requests.Session.send` for parameters. Additional parameters:
-
-        Args:
-            expire_after: Expiration time to set only for this request
+        6. :py:meth:`requests.Session.send` (if not using a cached response)
+        7. :py:meth:`.BaseCache.save_response` (if not using a cached response)
         """
-        # Determine which actions to take based on request info and cache settings
-        cache_key = self.cache.create_key(request, **kwargs)
+        # Determine which actions to take based on settings and request info
+        request.headers = set_request_headers(
+            request.headers, expire_after, only_if_cached, refresh, force_refresh
+        )
         actions = CacheActions.from_request(
-            cache_key=cache_key,
-            request=request,
-            request_expire_after=expire_after,
-            session_expire_after=self.expire_after,
-            urls_expire_after=self.urls_expire_after,
-            cache_control=self.cache_control,
-            **kwargs,
+            self.cache.create_key(request, **kwargs), request, self.settings
         )
 
         # Attempt to fetch a cached response
         cached_response: Optional[CachedResponse] = None
-        if not (self._disabled or actions.skip_read):
-            cached_response = self.cache.get_response(cache_key)
-            actions.update_from_cached_response(cached_response)
-        is_expired = getattr(cached_response, 'is_expired', False)
-
-        # If the response is expired or missing, or the cache is disabled, then fetch a new response
-        if cached_response is None:
-            response = self._send_and_cache(request, actions, **kwargs)
-        elif is_expired and self.stale_if_error:
-            response = self._resend_and_ignore(request, actions, cached_response, **kwargs)
-        elif is_expired:
-            response = self._resend(request, actions, cached_response, **kwargs)
+        if not actions.skip_read:
+            cached_response = self.cache.get_response(actions.cache_key)
+        actions.update_from_cached_response(cached_response, self.cache.create_key, **kwargs)
+
+        # Handle missing and expired responses based on settings and headers
+        if actions.error_504:
+            response: AnyResponse = get_504_response(request)
+        elif actions.resend_async:
+            self._resend_async(request, actions, cached_response, **kwargs)
+            response = cached_response  # type: ignore
+        elif actions.resend_request:
+            response = self._resend(request, actions, cached_response, **kwargs)  # type: ignore
+        elif actions.send_request:
+            response = self._send_and_cache(request, actions, cached_response, **kwargs)
         else:
-            response = cached_response
+            response = cached_response  # type: ignore  # Guaranteed to be non-None by this point
 
         # If the request has been filtered out and was previously cached, delete it
-        if not self.filter_fn(response):
+        if self.settings.filter_fn is not None and not self.settings.filter_fn(response):
             logger.debug(f'Deleting filtered response for URL: {response.url}')
-            self.cache.delete(cache_key)
+            self.cache.delete(actions.cache_key)
             return response
 
-        # Dispatch any hooks here, because they are removed before pickling
+        # Dispatch any hooks here, because they are removed during serialization
         return dispatch_hook('response', request.hooks, response, **kwargs)
 
-    def _is_cacheable(self, response: Response, actions: CacheActions) -> bool:
-        """Perform all checks needed to determine if the given response should be saved to the cache"""
-        cache_criteria = {
-            'disabled cache': self._disabled,
-            'disabled method': str(response.request.method) not in self.allowable_methods,
-            'disabled status': response.status_code not in self.allowable_codes,
-            'disabled by filter': not self.filter_fn(response),
-            'disabled by headers or expiration params': actions.skip_write,
-        }
-        logger.debug(f'Pre-cache checks for response from {response.url}: {cache_criteria}')
-        return not any(cache_criteria.values())
-
     def _send_and_cache(
         self,
         request: PreparedRequest,
         actions: CacheActions,
-        cached_response: CachedResponse = None,
+        cached_response: Optional[CachedResponse] = None,
         **kwargs,
     ) -> AnyResponse:
-        """Send the request and cache the response, unless disabled by settings or headers.
-
-        If applicable, also add headers to make a conditional request. If we get a 304 Not Modified
-        response, return the stale cache item.
+        """Send a request and cache the response, unless disabled by settings or headers.
+        If applicable, also handle conditional requests.
         """
-        request.headers.update(actions.validation_headers)
+        request = actions.update_request(request)
         response = super().send(request, **kwargs)
         actions.update_from_response(response)
 
-        if self._is_cacheable(response, actions):
+        if not actions.skip_write:
             self.cache.save_response(response, actions.cache_key, actions.expires)
-        elif cached_response and response.status_code == 304:
-            return self._update_revalidated_response(actions, response, cached_response)
+        elif cached_response is not None and response.status_code == 304:
+            cached_response = actions.update_revalidated_response(response, cached_response)
+            self.cache.save_response(cached_response, actions.cache_key, actions.expires)
+            return cached_response
         else:
             logger.debug(f'Skipping cache write for URL: {request.url}')
-        return set_response_defaults(response, actions.cache_key)
+        return OriginalResponse.wrap_response(response, actions)
 
     def _resend(
         self,
@@ -204,50 +247,40 @@ class CacheMixin(MIXIN_BASE):
         cached_response: CachedResponse,
         **kwargs,
     ) -> AnyResponse:
-        """Attempt to resend the request and cache the new response. If the request fails, delete
-        the stale cache item.
-        """
-        logger.debug('Stale response; attempting to re-send request')
-        try:
-            return self._send_and_cache(request, actions, cached_response, **kwargs)
-        except Exception:
-            self.cache.delete(actions.cache_key)
-            raise
-
-    def _resend_and_ignore(
-        self,
-        request: PreparedRequest,
-        actions: CacheActions,
-        cached_response: CachedResponse,
-        **kwargs,
-    ) -> AnyResponse:
-        """Attempt to resend the request and cache the new response. If there are any errors, ignore
-        them and and return the stale cache item.
+        """Handle a stale cached response by attempting to resend the request and cache a fresh
+        response
         """
-        # Attempt to send the request and cache the new response
         logger.debug('Stale response; attempting to re-send request')
         try:
             response = self._send_and_cache(request, actions, cached_response, **kwargs)
-            response.raise_for_status()
+            if (
+                self.settings.stale_if_error
+                and response.status_code not in self.settings.allowable_codes
+            ):
+                response.raise_for_status()
             return response
         except Exception:
+            return self._handle_error(cached_response, actions)
+
+    def _resend_async(self, *args, **kwargs):
+        """Send a non-blocking request to refresh a cached response"""
+        logger.debug('Using stale response while revalidating')
+        thread = Thread(target=self._send_and_cache, args=args, kwargs=kwargs)
+        thread.start()
+
+    def _handle_error(self, cached_response: CachedResponse, actions: CacheActions) -> AnyResponse:
+        """Handle a request error based on settings:
+        * Default behavior: re-raise the error
+        * stale-if-error: Ignore the error and and return the stale cache item
+        """
+        if actions.is_usable(cached_response, error=True):
             logger.warning(
-                f'Request for URL {request.url} failed; using cached response', exc_info=True
+                f'Request for URL {cached_response.request.url} failed; using cached response',
+                exc_info=True,
             )
             return cached_response
-
-    def _update_revalidated_response(
-        self, actions: CacheActions, response: Response, cached_response: CachedResponse
-    ) -> CachedResponse:
-        """After revalidation, update the cached response's headers and reset its expiration"""
-        logger.debug(
-            f'Response for URL {response.request.url} has not been modified; updating and using cached response'
-        )
-        cached_response.headers.update(response.headers)
-        actions.update_from_response(cached_response)
-        cached_response.expires = actions.expires
-        self.cache.save_response(cached_response, actions.cache_key, actions.expires)
-        return cached_response
+        else:
+            raise
 
     @contextmanager
     def cache_disabled(self):
@@ -260,17 +293,22 @@ class CacheMixin(MIXIN_BASE):
 
             >>> s = CachedSession()
             >>> with s.cache_disabled():
-            ...     s.get('http://httpbin.org/ip')
+            ...     s.get('https://httpbin.org/ip')
 
         """
-        if self._disabled:
+        if self.settings.disabled:
             yield
         else:
-            self._disabled = True
+            self.settings.disabled = True
             try:
                 yield
             finally:
-                self._disabled = False
+                self.settings.disabled = False
+
+    def close(self):
+        """Close the session and any open backend connections"""
+        super().close()
+        self.cache.close()
 
     def remove_expired_responses(self, expire_after: ExpirationTime = None):
         # Deprecated; will be replaced by CachedSession.cache.delete(expired=True)
@@ -283,89 +321,61 @@ class CacheMixin(MIXIN_BASE):
         raise NotImplementedError('CachedSession cannot be pickled')
 
     def __repr__(self):
-        repr_attrs = [
-            'cache',
-            'expire_after',
-            'urls_expire_after',
-            'allowable_codes',
-            'allowable_methods',
-            'stale_if_error',
-            'cache_control',
-        ]
-        attr_strs = [f'{k}={repr(getattr(self, k))}' for k in repr_attrs]
-        return f'<CachedSession({", ".join(attr_strs)})>'
-
-    # The following properties exist for partial forwards-compatibility with CacheSettings in 1.0
-    # All settings will be settable via CachedSession.settings, instead of being split between
-    # CachedSession and BaseCache.
-    @property
-    def settings(self):
-        return self
-
-    @property
-    def ignored_parameters(self) -> Iterable[str]:
-        return self.cache.ignored_parameters or []
-
-    @ignored_parameters.setter
-    def ignored_parameters(self, value: Iterable[str]):
-        self.cache.ignored_parameters = value
-
-    @property
-    def match_headers(self) -> Union[Iterable[str], bool]:
-        return self.cache.match_headers or []
-
-    @match_headers.setter
-    def match_headers(self, value: Union[Iterable[str], bool]):
-        self.cache.match_headers = value
-
-    @property
-    def key_fn(self) -> KEY_FN:
-        return self.cache.key_fn or []
-
-    @key_fn.setter
-    def key_fn(self, value: KEY_FN):
-        self.cache.key_fn = value
+        return f'<CachedSession(cache={repr(self.cache)}, settings={self.settings})>'
 
 
 class CachedSession(CacheMixin, OriginalSession):
     """Session class that extends :py:class:`requests.Session` with caching features.
 
-    See individual :py:mod:`backend classes <requests_cache.backends>` for additional backend-specific arguments.
-    Also see :ref:`user-guide` for more details and examples on how the following arguments
-    affect cache behavior.
+    See individual :py:mod:`backend classes <requests_cache.backends>` for additional
+    backend-specific arguments. Also see :ref:`user-guide` for more details and examples on how the
+    following arguments affect cache behavior.
 
     Args:
-        cache_name: Cache prefix or namespace, depending on backend
+        cache_name: Used as a cache path, prefix, or namespace, depending on the backend
         backend: Cache backend name or instance; name may be one of
             ``['sqlite', 'filesystem', 'mongodb', 'gridfs', 'redis', 'dynamodb', 'memory']``
         serializer: Serializer name or instance; name may be one of
             ``['pickle', 'json', 'yaml', 'bson']``.
-        expire_after: Time after which cached items will expire
+        expire_after: Time after which cached items will expire. See :ref:`expiration` for details.
         urls_expire_after: Expiration times to apply for different URL patterns
-        cache_control: Use Cache-Control headers to set expiration
+        cache_control: Use Cache-Control and other response headers to set expiration
         allowable_codes: Only cache responses with one of these status codes
         allowable_methods: Cache only responses for one of these HTTP methods
-        match_headers: Match request headers when reading from the cache; may be either a boolean
-            or a list of specific headers to match
-        ignored_parameters: List of request parameters to not match against, and exclude from the cache
-        filter_fn: Function that takes a :py:class:`~requests.Response` object and returns a boolean
-            indicating whether or not that response should be cached. Will be applied to both new
-            and previously cached responses.
-        key_fn: Function for generating custom cache keys based on request info
-        stale_if_error: Return stale cache data if a new request raises an exception
+        always_revalidate: Revalidate with the server for every request, even if the cached response
+            is not expired
+        match_headers: Request headers to match, when `Vary` response header is not available. May
+            be a list of headers, or ``True`` to match all.
+        ignored_parameters: Request paramters, headers, and/or JSON body params to exclude from both
+            request matching and cached request data
+        stale_if_error: Return a stale response if a new request raises an exception. Optionally
+            accepts a time value representing maximum staleness to accept.
+        stale_while_revalidate: Return a stale response initially, while a non-blocking request is
+            sent to refresh the response for the next time it's requested
+        filter_fn: Response filtering function that indicates whether or not a given response should
+            be cached. See :ref:`custom-filtering` for details.
+        key_fn: Request matching function for generating custom cache keys. See
+            :ref:`custom-matching` for details.
     """
 
 
+def get_504_response(request: PreparedRequest) -> CachedResponse:
+    """Get a 504: Not Cached error response, for use with only-if-cached option"""
+    return CachedResponse(
+        url=request.url or '',
+        status_code=504,
+        reason='Not Cached',
+        request=request,  # type: ignore
+    )
+
+
 @contextmanager
-def patch_form_boundary(**request_kwargs):
+def patch_form_boundary():
     """If the ``files`` param is present, patch the form boundary used to separate multipart
     uploads. ``requests`` does not provide a way to pass a custom boundary to urllib3, so this just
     monkey-patches it instead.
     """
-    if request_kwargs.get('files'):
-        original_boundary = filepost.choose_boundary
-        filepost.choose_boundary = lambda: '##requests-cache-form-boundary##'
-        yield
-        filepost.choose_boundary = original_boundary
-    else:
-        yield
+    original_boundary = filepost.choose_boundary
+    filepost.choose_boundary = lambda: '##requests-cache-form-boundary##'
+    yield
+    filepost.choose_boundary = original_boundary
diff --git a/setup.cfg b/setup.cfg
index a831e1f..843f922 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -4,13 +4,13 @@ select = E,F,W,C4,C90
 exclude = __pycache__,.tox,.venv,build,dist
 # Defer these rules to black:
 ignore =
-    E203  # whitespace before ':'
-    E225  # missing whitespace around operator
-    E501  # line too long
-    W503  # line break before binary operator
-    W504  # line break after  binary operator
-
-# Tell mypy to ignore external libraries without type annotations
-# TODO: Next release may add support for pyproject.toml config
-[mypy]
-ignore_missing_imports = True
+    # whitespace before ':'
+    E203
+    # missing whitespace around operator
+    E225
+    # line too long
+    E501
+    # line break before binary operator
+    W503
+    # line break after binary operator
+    W504
diff --git a/tests/benchmark_serializers.py b/tests/benchmark_serializers.py
index 96950a9..6354026 100644
--- a/tests/benchmark_serializers.py
+++ b/tests/benchmark_serializers.py
@@ -30,6 +30,8 @@ from time import perf_counter as time
 import ujson
 from cattr.preconf.json import make_converter
 
+from requests_cache.backends.sqlite import SQLiteCache
+
 try:
     from rich import print
 except ImportError:
@@ -42,18 +44,14 @@ except ImportError:
 sys.path.insert(0, os.path.abspath('..'))
 
 from requests_cache import CachedSession
-from requests_cache.serializers import (
-    CattrStage,
-    bson_serializer,
-    json_serializer,
-    pickle_serializer,
-)
+from requests_cache.serializers import CattrStage, bson_serializer, pickle_serializer
 
 ITERATIONS = 10000
 
-session = CachedSession()
-r = session.get('https://httpbin.org/get?x=y')
-r = session.get('https://httpbin.org/get?x=y')
+# Get an initial cached response
+session = CachedSession(SQLiteCache(use_temp=True))
+r = session.get('https://httpbin.org/json')
+r = session.get('https://httpbin.org/json')
 
 
 # def run_jsonpickle():
@@ -65,7 +63,7 @@ def run_pickle():
 
 
 def run_cattrs():
-    run_serialize_deserialize('cattrs', CattrStage)
+    run_serialize_deserialize('cattrs', CattrStage())
 
 
 def run_cattrs_pickle():
@@ -82,8 +80,8 @@ def run_cattrs_pickle():
 
 
 def run_cattrs_ujson():
-    s = CattrStage(converter_factory=make_converter)
-    run_serialize_deserialize('cattrs+ujson', json_serializer)
+    s = CattrStage(factory=make_converter)
+    run_serialize_deserialize('cattrs+ujson', s)
 
 
 def run_cattrs_bson():
diff --git a/tests/compat/test_requests_mock_load_cache.py b/tests/compat/test_requests_mock_load_cache.py
index 2455965..d8d7cc0 100644
--- a/tests/compat/test_requests_mock_load_cache.py
+++ b/tests/compat/test_requests_mock_load_cache.py
@@ -24,7 +24,7 @@ def mock_session():
     adapter = Adapter()
     cache = CachedSession(TEST_DB).cache
 
-    for response in cache.values():
+    for response in cache.responses.values():
         adapter.register_uri(
             response.request.method,
             response.request.url,
diff --git a/tests/compat/test_responses_load_cache.py b/tests/compat/test_responses_load_cache.py
index 793fb0a..4bd72aa 100644
--- a/tests/compat/test_responses_load_cache.py
+++ b/tests/compat/test_responses_load_cache.py
@@ -27,7 +27,7 @@ def get_responses():
     """
     with RequestsMock() as mocker:
         cache = CachedSession(TEST_DB).cache
-        for response in cache.values():
+        for response in cache.responses.values():
             mocker.add(
                 Response(
                     response.request.method,
diff --git a/tests/conftest.py b/tests/conftest.py
index 325fa77..c53d866 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -9,25 +9,41 @@ Note: The protocol ``http(s)+mock://`` helps :py:class:`requests_mock.Adapter` p
 https://requests-mock.readthedocs.io/en/latest/adapter.html
 """
 import os
+import platform
+import warnings
+from contextlib import contextmanager
 from datetime import datetime, timedelta
 from functools import wraps
+from importlib import import_module
 from logging import basicConfig, getLogger
-from os.path import abspath, dirname, join
+from pathlib import Path
+from unittest.mock import MagicMock, patch
 from uuid import uuid4
 
 import pytest
 import requests
+from requests import Request
 from requests_mock import ANY as ANY_METHOD
 from requests_mock import Adapter
+from rich.logging import RichHandler
 from timeout_decorator import timeout
 
 from requests_cache import ALL_METHODS, CachedSession, install_cache, uninstall_cache
 
-CACHE_NAME = 'pytest_cache'
+# Configure logging to show log output when tests fail (or with pytest -s)
+basicConfig(
+    level='INFO',
+    format='%(message)s',
+    datefmt='[%m-%d %H:%M:%S]',
+    handlers=[RichHandler(rich_tracebacks=True, markup=True)],
+)
+# getLogger('requests_cache').setLevel('DEBUG')
+logger = getLogger(__name__)
+
 
 # Allow running longer stress tests with an environment variable
 STRESS_TEST_MULTIPLIER = int(os.getenv('STRESS_TEST_MULTIPLIER', '1'))
-N_WORKERS = 2 * STRESS_TEST_MULTIPLIER
+N_WORKERS = 5 * STRESS_TEST_MULTIPLIER
 N_ITERATIONS = 4 * STRESS_TEST_MULTIPLIER
 N_REQUESTS_PER_ITERATION = 10 + 10 * STRESS_TEST_MULTIPLIER
 
@@ -47,37 +63,28 @@ HTTPBIN_FORMATS = [
     'robots.txt',
     'xml',
 ]
-
 HTTPDATE_STR = 'Fri, 16 APR 2021 21:13:00 GMT'
 HTTPDATE_DATETIME = datetime(2021, 4, 16, 21, 13)
-EXPIRED_DT = datetime.now() - timedelta(1)
+EXPIRED_DT = datetime.utcnow() - timedelta(1)
 ETAG = '"644b5b0155e6404a9cc4bd9d8b1ae730"'
 LAST_MODIFIED = 'Thu, 05 Jul 2012 15:31:30 GMT'
 
 MOCKED_URL = 'http+mock://requests-cache.com/text'
+MOCKED_URL_ETAG = 'http+mock://requests-cache.com/etag'
 MOCKED_URL_HTTPS = 'https+mock://requests-cache.com/text'
 MOCKED_URL_JSON = 'http+mock://requests-cache.com/json'
 MOCKED_URL_REDIRECT = 'http+mock://requests-cache.com/redirect'
 MOCKED_URL_REDIRECT_TARGET = 'http+mock://requests-cache.com/redirect_target'
+MOCKED_URL_VARY = 'http+mock://requests-cache.com/vary'
 MOCKED_URL_404 = 'http+mock://requests-cache.com/nonexistent'
+MOCKED_URL_500 = 'http+mock://requests-cache.com/answer?q=this-statement-is-false'
+MOCKED_URL_200_404 = 'http+mock://requests-cache.com/200-404'
 MOCK_PROTOCOLS = ['mock://', 'http+mock://', 'https+mock://']
 
-PROJECT_DIR = abspath(dirname(dirname(__file__)))
-SAMPLE_DATA_DIR = join(PROJECT_DIR, 'tests', 'sample_data')
-SAMPLE_CACHE_FILES = [join(SAMPLE_DATA_DIR, path) for path in os.listdir(SAMPLE_DATA_DIR)]
-
-AWS_OPTIONS = {
-    'endpoint_url': 'http://localhost:8000',
-    'region_name': 'us-east-1',
-    'aws_access_key_id': 'placeholder',
-    'aws_secret_access_key': 'placeholder',
-}
-
-
-# Configure logging to show log output when tests fail (or with pytest -s)
-basicConfig(level='INFO')
-# getLogger('requests_cache').setLevel('DEBUG')
-logger = getLogger(__name__)
+CACHE_NAME = 'pytest_cache'
+PROJECT_DIR = Path(__file__).parent.parent.absolute()
+SAMPLE_DATA_DIR = PROJECT_DIR / 'tests' / 'sample_data'
+SAMPLE_CACHE_FILES = list(SAMPLE_DATA_DIR.glob('sample.db.*'))
 
 
 def httpbin(path):
@@ -170,6 +177,12 @@ def get_mock_adapter() -> Adapter:
         text='mock response',
         status_code=200,
     )
+    adapter.register_uri(
+        ANY_METHOD,
+        MOCKED_URL_ETAG,
+        headers={'ETag': ETAG},
+        status_code=200,
+    )
     adapter.register_uri(
         ANY_METHOD,
         MOCKED_URL_HTTPS,
@@ -200,12 +213,40 @@ def get_mock_adapter() -> Adapter:
     )
     adapter.register_uri(
         ANY_METHOD,
-        MOCKED_URL_404,
-        status_code=404,
+        MOCKED_URL_VARY,
+        headers={'Content-Type': 'text/plain', 'Vary': 'Accept'},
+        text='mock response with Vary header',
+        status_code=200,
+    )
+    adapter.register_uri(ANY_METHOD, MOCKED_URL_404, status_code=404)
+    adapter.register_uri(ANY_METHOD, MOCKED_URL_500, status_code=500)
+    adapter.register_uri(
+        ANY_METHOD, MOCKED_URL_200_404, [{"status_code": 200}, {"status_code": 404}]
     )
     return adapter
 
 
+def get_mock_response(
+    method='GET',
+    url='https://img.site.com/base/img.jpg',
+    status_code=200,
+    headers={},
+    request_headers={},
+):
+    return MagicMock(
+        url=url,
+        status_code=status_code,
+        headers=headers,
+        request=Request(method=method, url=url, headers=request_headers),
+    )
+
+
+def assert_delta_approx_equal(dt1: datetime, dt2: datetime, target_delta, threshold_seconds=2):
+    """Assert that the given datetimes are approximately ``target_delta`` seconds apart"""
+    diff_in_seconds = (dt2 - dt1).total_seconds()
+    assert abs(diff_in_seconds - target_delta) <= threshold_seconds
+
+
 def fail_if_no_connection(connect_timeout: float = 1.0) -> bool:
     """Decorator for testing a backend connection. This will intentionally cause a test failure if
     the wrapped function doesn't have dependencies installed, doesn't connect after a short timeout,
@@ -229,7 +270,34 @@ def fail_if_no_connection(connect_timeout: float = 1.0) -> bool:
     return decorator
 
 
-def assert_delta_approx_equal(dt1: datetime, dt2: datetime, target_delta, threshold_seconds=2):
-    """Assert that the given datetimes are approximately ``target_delta`` seconds apart"""
-    diff_in_seconds = (dt2 - dt1).total_seconds()
-    assert abs(diff_in_seconds - target_delta) <= threshold_seconds
+def is_installed(module_name: str) -> bool:
+    """Check if a given dependency is installed"""
+    try:
+        import_module(module_name)
+        return True
+    except ImportError:
+        return False
+
+
+def skip_missing_deps(module_name: str) -> pytest.Mark:
+    return pytest.mark.skipif(
+        not is_installed(module_name), reason=f'{module_name} is not installed'
+    )
+
+
+@contextmanager
+def ignore_deprecation():
+    """Temporarily ilence deprecation warnings"""
+    with warnings.catch_warnings():
+        warnings.simplefilter('ignore', category=DeprecationWarning)
+        yield
+
+
+# Some tests must disable url normalization to retain the custom `http+mock://` protocol
+patch_normalize_url = patch('requests_cache.cache_keys.normalize_url', side_effect=lambda x, y: x)
+
+# TODO: Debug OperationalErrors with pypy
+skip_pypy = pytest.mark.skipif(
+    platform.python_implementation() == 'PyPy',
+    reason='pypy-specific database locking issue',
+)
diff --git a/tests/generate_test_db.py b/tests/generate_test_db.py
index 2ad1438..bd7f09a 100755
--- a/tests/generate_test_db.py
+++ b/tests/generate_test_db.py
@@ -17,7 +17,7 @@ def make_sample_db():
 
     for format in HTTPBIN_FORMATS:
         session.get(f'https://httpbin.org/{format}')
-    print(list(session.cache.urls))
+    print(session.cache.urls())
 
 
 if __name__ == '__main__':
diff --git a/tests/integration/base_cache_test.py b/tests/integration/base_cache_test.py
index 7065a12..d37bfa7 100644
--- a/tests/integration/base_cache_test.py
+++ b/tests/integration/base_cache_test.py
@@ -13,11 +13,10 @@ from urllib.parse import parse_qs, urlparse
 
 import pytest
 import requests
-from requests import PreparedRequest, Session
+from requests import ConnectionError, PreparedRequest, Session
 
 from requests_cache import ALL_METHODS, CachedResponse, CachedSession
-from requests_cache.backends.base import BaseCache
-from requests_cache.serializers import SERIALIZERS, SerializerPipeline, safe_pickle_serializer
+from requests_cache.backends import BaseCache
 from tests.conftest import (
     CACHE_NAME,
     ETAG,
@@ -32,16 +31,13 @@ from tests.conftest import (
     USE_PYTEST_HTTPBIN,
     assert_delta_approx_equal,
     httpbin,
+    skip_pypy,
 )
 
 logger = getLogger(__name__)
 
-# Handle optional dependencies if they're not installed; if so, skips will be shown in pytest output
-TEST_SERIALIZERS = SERIALIZERS.copy()
-try:
-    TEST_SERIALIZERS['safe_pickle'] = safe_pickle_serializer(secret_key='hunter2')
-except ImportError:
-    TEST_SERIALIZERS['safe_pickle'] = 'safe_pickle_placeholder'
+
+VALIDATOR_HEADERS = [{'ETag': ETAG}, {'Last-Modified': LAST_MODIFIED}]
 
 
 class BaseCacheTest:
@@ -51,51 +47,51 @@ class BaseCacheTest:
     init_kwargs: Dict = {}
 
     def init_session(self, cache_name=CACHE_NAME, clear=True, **kwargs) -> CachedSession:
+        kwargs = {**self.init_kwargs, **kwargs}
         kwargs.setdefault('allowable_methods', ALL_METHODS)
-        kwargs.setdefault('serializer', 'pickle')
-        backend = self.backend_class(cache_name, **self.init_kwargs, **kwargs)
+        backend = self.backend_class(cache_name, **kwargs)
         if clear:
             backend.clear()
 
-        return CachedSession(backend=backend, **self.init_kwargs, **kwargs)
+        return CachedSession(backend=backend, **kwargs)
 
     @classmethod
     def teardown_class(cls):
-        cls().init_session(clear=True)
+        session = cls().init_session(clear=True)
+        session.close()
 
-    @pytest.mark.parametrize('serializer', TEST_SERIALIZERS.values())
     @pytest.mark.parametrize('method', HTTPBIN_METHODS)
     @pytest.mark.parametrize('field', ['params', 'data', 'json'])
-    def test_all_methods(self, field, method, serializer):
-        """Test all relevant combinations of methods X data fields X serializers.
+    def test_all_methods(self, field, method, serializer=None):
+        """Test all relevant combinations of methods X data fields.
         Requests with different request params, data, or json should be cached under different keys.
-        """
-        if not isinstance(serializer, SerializerPipeline):
-            pytest.skip(f'Dependencies not installed for {serializer}')
 
+        Note: Serializer combinations are only tested for Filesystem backend.
+        """
         url = httpbin(method.lower())
         session = self.init_session(serializer=serializer)
         for params in [{'param_1': 1}, {'param_1': 2}, {'param_2': 2}]:
             assert session.request(method, url, **{field: params}).from_cache is False
             assert session.request(method, url, **{field: params}).from_cache is True
 
-    @pytest.mark.parametrize('serializer', TEST_SERIALIZERS.values())
     @pytest.mark.parametrize('response_format', HTTPBIN_FORMATS)
-    def test_all_response_formats(self, response_format, serializer):
-        """Test that all relevant combinations of response formats X serializers are cached correctly"""
-        if not isinstance(serializer, SerializerPipeline):
-            pytest.skip(f'Dependencies not installed for {serializer}')
-
+    def test_all_response_formats(self, response_format, serializer=None):
+        """Test all relevant combinations of response formats X serializers"""
         session = self.init_session(serializer=serializer)
         # Workaround for this issue: https://github.com/kevin1024/pytest-httpbin/issues/60
         if response_format == 'json' and USE_PYTEST_HTTPBIN:
-            session.allowable_codes = (200, 404)
+            session.settings.allowable_codes = (200, 404)
 
         r1 = session.get(httpbin(response_format))
         r2 = session.get(httpbin(response_format))
         assert r1.from_cache is False
         assert r2.from_cache is True
-        assert r1.content == r2.content
+
+        # For JSON responses, variations like whitespace won't be preserved
+        if r1.text.startswith('{'):
+            assert r1.json() == r2.json()
+        else:
+            assert r1.content == r2.content
 
     def test_response_no_duplicate_read(self):
         """Ensure that response data is read only once per request, whether it's cached or not"""
@@ -104,7 +100,7 @@ class BaseCacheTest:
 
         # Patch storage class to track number of times getitem is called, without changing behavior
         with patch.object(
-            storage_class, '__getitem__', side_effect=storage_class.__getitem__
+            storage_class, '__getitem__', side_effect=lambda k: CachedResponse()
         ) as getitem:
             session.get(httpbin('get'))
             assert getitem.call_count == 1
@@ -115,8 +111,8 @@ class BaseCacheTest:
     @pytest.mark.parametrize('n_redirects', range(1, 5))
     @pytest.mark.parametrize('endpoint', ['redirect', 'absolute-redirect', 'relative-redirect'])
     def test_redirect_history(self, endpoint, n_redirects):
-        """Test redirect caching (in separate `redirects` cache) with all types of redirect endpoints,
-        using different numbers of consecutive redirects
+        """Test redirect caching (in separate `redirects` cache) with all types of redirect
+        endpoints, using different numbers of consecutive redirects
         """
         session = self.init_session()
         session.get(httpbin(f'{endpoint}/{n_redirects}'))
@@ -146,9 +142,11 @@ class BaseCacheTest:
         response_1 = get_json(httpbin('cookies/set/test1/test2'))
         with session.cache_disabled():
             assert get_json(httpbin('cookies')) == response_1
+
         # From cache
         response_2 = get_json(httpbin('cookies'))
         assert response_2 == get_json(httpbin('cookies'))
+
         # Not from cache
         with session.cache_disabled():
             response_3 = get_json(httpbin('cookies/set/test3/test4'))
@@ -206,14 +204,18 @@ class BaseCacheTest:
         response = session.get(httpbin('cache'))
         assert response.from_cache == expected_from_cache
 
+    @pytest.mark.parametrize('validator_headers', VALIDATOR_HEADERS)
     @pytest.mark.parametrize(
-        'validator_headers', [{'ETag': ETAG}, {'Last-Modified': LAST_MODIFIED}]
+        'cache_headers',
+        [
+            {'Cache-Control': 'max-age=0'},
+            {'Cache-Control': 'max-age=0,must-revalidate'},
+            {'Cache-Control': 'no-cache'},
+            {'Expires': '0'},
+        ],
     )
-    @pytest.mark.parametrize('cache_headers', [{'Cache-Control': 'max-age=0'}, {'Expires': '0'}])
-    def test_conditional_request__max_age_0(self, cache_headers, validator_headers):
-        """With both max-age=0 and a validator, the response should be saved and revalidated on next
-        request
-        """
+    def test_conditional_request__response_headers(self, cache_headers, validator_headers):
+        """Test response headers that can initiate revalidation before a cached response expires"""
         url = httpbin('response-headers')
         response_headers = {**cache_headers, **validator_headers}
         session = self.init_session(cache_control=True)
@@ -226,13 +228,10 @@ class BaseCacheTest:
             response = session.get(url, params=response_headers)
 
         assert response.from_cache is True
-        assert response.is_expired is True
 
-    @pytest.mark.parametrize(
-        'validator_headers', [{'ETag': ETAG}, {'Last-Modified': LAST_MODIFIED}]
-    )
+    @pytest.mark.parametrize('validator_headers', VALIDATOR_HEADERS)
     @pytest.mark.parametrize('cache_headers', [{'Cache-Control': 'max-age=0'}])
-    def test_conditional_request_refreshes_expire_date(self, cache_headers, validator_headers):
+    def test_conditional_request__refreshes_expire_date(self, cache_headers, validator_headers):
         """Test that revalidation attempt with 304 responses causes stale entry to become fresh again considering
         Cache-Control header of the 304 response."""
         url = httpbin('response-headers')
@@ -287,7 +286,7 @@ class BaseCacheTest:
         for i in range(5):
             assert session.post(httpbin('post'), files={'file1': BytesIO(b'10' * 1024)}).from_cache
 
-    def test_remove_expired_responses(self):
+    def test_delete__expired(self):
         session = self.init_session(expire_after=1)
 
         # Populate the cache with several responses that should expire immediately
@@ -296,15 +295,16 @@ class BaseCacheTest:
         session.get(httpbin('redirect/1'))
         sleep(1)
 
-        # Cache a response + redirects, which should be the only non-expired cache items
+        # Cache a response and some redirects, which should be the only non-expired cache items
         session.get(httpbin('get'), expire_after=-1)
         session.get(httpbin('redirect/3'), expire_after=-1)
-        session.cache.remove_expired_responses()
+        assert len(session.cache.redirects.keys()) == 4
+        session.cache.delete(expired=True)
 
         assert len(session.cache.responses.keys()) == 2
         assert len(session.cache.redirects.keys()) == 3
-        assert not session.cache.has_url(httpbin('redirect/1'))
-        assert not any([session.cache.has_url(httpbin(f)) for f in HTTPBIN_FORMATS])
+        assert not session.cache.contains(url=httpbin('redirect/1'))
+        assert not any([session.cache.contains(url=httpbin(f)) for f in HTTPBIN_FORMATS])
 
     @pytest.mark.parametrize('method', HTTPBIN_METHODS)
     def test_filter_request_headers(self, method):
@@ -314,7 +314,7 @@ class BaseCacheTest:
         assert response.from_cache is False
         response = session.request(method, url, headers={"Authorization": "<Secret Key>"})
         assert response.from_cache is True
-        assert response.request.headers.get('Authorization') is None
+        assert response.request.headers.get('Authorization') == 'REDACTED'
 
     @pytest.mark.parametrize('method', HTTPBIN_METHODS)
     def test_filter_request_query_parameters(self, method):
@@ -326,23 +326,26 @@ class BaseCacheTest:
         assert response.from_cache is True
         query = urlparse(response.request.url).query
         query_dict = parse_qs(query)
-        assert 'api_key' not in query_dict
+        assert query_dict['api_key'] == ['REDACTED']
 
+    @skip_pypy
     @pytest.mark.parametrize('post_type', ['data', 'json'])
     def test_filter_request_post_data(self, post_type):
         method = 'POST'
         url = httpbin(method.lower())
+        body = {"api_key": "<Secret Key>"}
+        headers = {}
+        if post_type == 'data':
+            body = json.dumps(body)
+            headers = {'Content-Type': 'application/json'}
         session = self.init_session(ignored_parameters=['api_key'])
-        response = session.request(method, url, **{post_type: {"api_key": "<Secret Key>"}})
-        assert response.from_cache is False
-        response = session.request(method, url, **{post_type: {"api_key": "<Secret Key>"}})
+
+        response = session.request(method, url, headers=headers, **{post_type: body})
+        response = session.request(method, url, headers=headers, **{post_type: body})
         assert response.from_cache is True
-        if post_type == 'data':
-            body = parse_qs(response.request.body)
-            assert "api_key" not in body
-        elif post_type == 'json':
-            body = json.loads(response.request.body)
-            assert "api_key" not in body
+
+        parsed_body = json.loads(response.request.body)
+        assert parsed_body['api_key'] == 'REDACTED'
 
     @pytest.mark.parametrize('executor_class', [ThreadPoolExecutor, ProcessPoolExecutor])
     @pytest.mark.parametrize('iteration', range(N_ITERATIONS))
@@ -353,11 +356,15 @@ class BaseCacheTest:
         """
         start = time()
         url = httpbin('anything')
-        self.init_session(clear=True)
 
-        session_factory = partial(self.init_session, clear=False)
-        request_func = partial(_send_request, session_factory, url)
-        with ProcessPoolExecutor(max_workers=N_WORKERS) as executor:
+        # For multithreading, we can share a session object, but we can't for multiprocessing
+        session = self.init_session(clear=True, expire_after=1)
+        if executor_class is ProcessPoolExecutor:
+            session = None
+        session_factory = partial(self.init_session, clear=False, expire_after=1)
+
+        request_func = partial(_send_request, session, session_factory, url)
+        with executor_class(max_workers=N_WORKERS) as executor:
             _ = list(executor.map(request_func, range(N_REQUESTS_PER_ITERATION)))
 
         # Some logging for debug purposes
@@ -371,7 +378,7 @@ class BaseCacheTest:
         )
 
 
-def _send_request(session_factory, url, _=None):
+def _send_request(session, session_factory, url, _=None):
     """Concurrent request function for stress tests. Defined in module scope so it can be serialized
     to multiple processes.
     """
@@ -379,5 +386,15 @@ def _send_request(session_factory, url, _=None):
     n_unique_responses = int(N_REQUESTS_PER_ITERATION / 4)
     i = randint(1, n_unique_responses)
 
-    session = session_factory()
-    return session.get(url, params={f'key_{i}': f'value_{i}'})
+    # Threads can share a session object, but processes will create their own session because it
+    # can't be serialized
+    if session is None:
+        session = session_factory()
+
+    sleep(0.01)
+    try:
+        return session.get(url, params={f'key_{i}': f'value_{i}'})
+    # Sometimes the local http server is the bottleneck here; just retry once
+    except ConnectionError:
+        sleep(0.1)
+        return session.get(url, params={f'key_{i}': f'value_{i}'})
diff --git a/tests/integration/base_storage_test.py b/tests/integration/base_storage_test.py
index e89931c..4870c02 100644
--- a/tests/integration/base_storage_test.py
+++ b/tests/integration/base_storage_test.py
@@ -1,24 +1,26 @@
 """Common tests to run for all backends (BaseStorage subclasses)"""
+from concurrent.futures import ThreadPoolExecutor
+from datetime import datetime
 from typing import Dict, Type
 
 import pytest
+from attrs import define, field
 
 from requests_cache.backends import BaseStorage
-from tests.conftest import CACHE_NAME
+from requests_cache.models import CachedResponse
+from tests.conftest import CACHE_NAME, N_ITERATIONS, N_REQUESTS_PER_ITERATION, N_WORKERS
 
 
-# TODO: Parameterize tests for all serializers?
 class BaseStorageTest:
     """Base class for testing cache storage dict-like interfaces"""
 
     storage_class: Type[BaseStorage] = None
     init_kwargs: Dict = {}
-    picklable: bool = False
     num_instances: int = 10  # Max number of cache instances to test
 
     def init_cache(self, cache_name=CACHE_NAME, index=0, clear=True, **kwargs):
-        kwargs.setdefault('serializer', 'pickle')
-        cache = self.storage_class(cache_name, f'table_{index}', **self.init_kwargs, **kwargs)
+        kwargs = {**self.init_kwargs, **kwargs}
+        cache = self.storage_class(cache_name, f'table_{index}', **kwargs)
         if clear:
             cache.clear()
         return cache
@@ -61,6 +63,16 @@ class BaseStorageTest:
             assert list(cache.items()) == [(f'key_{i}', f'value_{i}')]
             assert dict(cache) == {f'key_{i}': f'value_{i}'}
 
+    def test_cache_key(self):
+        """The cache_key attribute should be available on responses returned from all
+        mapping/collection methods
+        """
+        cache = self.init_cache()
+        cache['key'] = CachedResponse()
+        assert cache['key'].cache_key == 'key'
+        assert list(cache.values())[0].cache_key == 'key'
+        assert list(cache.items())[0][1].cache_key == 'key'
+
     def test_del(self):
         """Some more tests to ensure ``delitem`` deletes only the expected items"""
         cache = self.init_cache()
@@ -84,6 +96,14 @@ class BaseStorageTest:
         assert set(cache.keys()) == {f'key_{i}' for i in range(5, 20)}
         assert set(cache.values()) == {f'value_{i}' for i in range(5, 20)}
 
+    def test_bulk_delete__noop(self):
+        """Just make sure bulk_delete doesn't do anything unexpected if no keys are provided"""
+        cache = self.init_cache()
+        for i in range(20):
+            cache[f'key_{i}'] = f'value_{i}'
+        cache.bulk_delete([])
+        assert len(cache) == 20
+
     def test_keyerrors(self):
         """Accessing or deleting a deleted item should raise a KeyError"""
         cache = self.init_cache()
@@ -96,19 +116,28 @@ class BaseStorageTest:
             cache['key']
 
     def test_picklable_dict(self):
-        if self.picklable:
-            cache = self.init_cache()
-            cache['key_1'] = Picklable()
-            assert cache['key_1'].attr_1 == 'value_1'
-            assert cache['key_1'].attr_2 == 'value_2'
+        cache = self.init_cache(serializer='pickle')
+        original_obj = BasicDataclass(
+            bool_attr=True,
+            datetime_attr=datetime(2022, 2, 2),
+            int_attr=2,
+            str_attr='value',
+        )
+        cache['key_1'] = original_obj
+
+        obj = cache['key_1']
+        assert obj.bool_attr == original_obj.bool_attr
+        assert obj.datetime_attr == original_obj.datetime_attr
+        assert obj.int_attr == original_obj.int_attr
+        assert obj.str_attr == original_obj.str_attr
 
     def test_clear_and_work_again(self):
         cache_1 = self.init_cache()
         cache_2 = self.init_cache(connection=getattr(cache_1, 'connection', None))
 
         for i in range(5):
-            cache_1[i] = i
-            cache_2[i] = i
+            cache_1[f'key_{i}'] = f'value_{i}'
+            cache_2[f'key_{i}'] = f'value_{i}'
 
         assert len(cache_1) == len(cache_2) == 5
         cache_1.clear()
@@ -118,8 +147,8 @@ class BaseStorageTest:
     def test_same_settings(self):
         cache_1 = self.init_cache()
         cache_2 = self.init_cache(connection=getattr(cache_1, 'connection', None))
-        cache_1['key_1'] = 1
-        cache_2['key_2'] = 2
+        cache_1['key_1'] = 'value_1'
+        cache_2['key_2'] = 'value_2'
         assert cache_1 == cache_2
 
     def test_str(self):
@@ -130,7 +159,21 @@ class BaseStorageTest:
         for i in range(10):
             assert f'key_{i}' in str(cache)
 
+    def test_concurrency(self):
+        """Test a large number of concurrent write operations for each backend"""
+        cache = self.init_cache()
+
+        def write(i):
+            cache[f'key_{i}'] = f'value_{i}'
+
+        n_iterations = N_ITERATIONS * N_REQUESTS_PER_ITERATION * 10
+        with ThreadPoolExecutor(max_workers=N_WORKERS * 2) as executor:
+            _ = list(executor.map(write, range(n_iterations)))
+
 
-class Picklable:
-    attr_1 = 'value_1'
-    attr_2 = 'value_2'
+@define
+class BasicDataclass:
+    bool_attr: bool = field(default=None)
+    datetime_attr: datetime = field(default=None)
+    int_attr: int = field(default=None)
+    str_attr: str = field(default=None)
diff --git a/tests/integration/test_dynamodb.py b/tests/integration/test_dynamodb.py
index 2b55339..af54e67 100644
--- a/tests/integration/test_dynamodb.py
+++ b/tests/integration/test_dynamodb.py
@@ -1,12 +1,21 @@
+from collections import OrderedDict
+from decimal import Decimal
 from unittest.mock import patch
 
 import pytest
 
 from requests_cache.backends import DynamoDbCache, DynamoDbDict
-from tests.conftest import AWS_OPTIONS, fail_if_no_connection
+from tests.conftest import CACHE_NAME, fail_if_no_connection
 from tests.integration.base_cache_test import BaseCacheTest
 from tests.integration.base_storage_test import BaseStorageTest
 
+AWS_OPTIONS = {
+    'endpoint_url': 'http://localhost:8000',
+    'region_name': 'us-east-1',
+    'aws_access_key_id': 'placeholder',
+    'aws_secret_access_key': 'placeholder',
+}
+
 
 @pytest.fixture(scope='module', autouse=True)
 @fail_if_no_connection(connect_timeout=5)
@@ -21,14 +30,64 @@ def ensure_connection():
 class TestDynamoDbDict(BaseStorageTest):
     storage_class = DynamoDbDict
     init_kwargs = AWS_OPTIONS
-    picklable = True
+
+    def init_cache(self, cache_name=CACHE_NAME, index=0, clear=True, **kwargs):
+        """For tests that use multiple tables, make index part of the table name"""
+        kwargs = {**self.init_kwargs, **kwargs}
+        cache = self.storage_class(f'{cache_name}_{index}', **kwargs)
+        if clear:
+            cache.clear()
+        return cache
 
     @patch('requests_cache.backends.dynamodb.boto3.resource')
     def test_connection_kwargs(self, mock_resource):
         """A spot check to make sure optional connection kwargs gets passed to connection"""
-        DynamoDbDict('test', region_name='us-east-2', invalid_kwarg='???')
+        DynamoDbDict('test_table', region_name='us-east-2', invalid_kwarg='???')
         mock_resource.assert_called_with('dynamodb', region_name='us-east-2')
 
+    def test_create_table_error(self):
+        """An error other than 'table already exists' should be reraised"""
+        from botocore.exceptions import ClientError
+
+        cache = self.init_cache()
+        error = ClientError({'Error': {'Code': 'NullPointerException'}}, 'CreateTable')
+        with patch.object(cache.connection.meta.client, 'update_time_to_live', side_effect=error):
+            with pytest.raises(ClientError):
+                cache._enable_ttl()
+
+    def test_enable_ttl_error(self):
+        """An error other than 'ttl already enabled' should be reraised"""
+        from botocore.exceptions import ClientError
+
+        cache = self.init_cache()
+        error = ClientError({'Error': {'Code': 'NullPointerException'}}, 'CreateTable')
+        with patch.object(cache.connection, 'create_table', side_effect=error):
+            with pytest.raises(ClientError):
+                cache._create_table()
+
+    @pytest.mark.parametrize('ttl_enabled', [True, False])
+    def test_ttl(self, ttl_enabled):
+        """DynamoDB's TTL removal process can take up to 48 hours to run, so just test if the
+        'ttl' attribute is set correctly if enabled, and not set if disabled.
+        """
+        cache = self.init_cache(ttl=ttl_enabled)
+        item = OrderedDict(foo='bar')
+        item.expires_unix = 60
+        cache['key'] = item
+
+        # 'ttl' is a reserved word, so to retrieve it we need to alias it
+        item = cache._table.get_item(
+            Key={'key': 'key'},
+            ProjectionExpression='#t',
+            ExpressionAttributeNames={'#t': 'ttl'},
+        )
+        ttl_value = item['Item'].get('ttl')
+
+        if ttl_enabled:
+            assert isinstance(ttl_value, Decimal)
+        else:
+            assert ttl_value is None
+
 
 class TestDynamoDbCache(BaseCacheTest):
     backend_class = DynamoDbCache
diff --git a/tests/integration/test_filesystem.py b/tests/integration/test_filesystem.py
index b651186..99625c4 100644
--- a/tests/integration/test_filesystem.py
+++ b/tests/integration/test_filesystem.py
@@ -2,13 +2,33 @@ from shutil import rmtree
 from tempfile import gettempdir
 
 import pytest
-from appdirs import user_cache_dir
+from platformdirs import user_cache_dir
 
 from requests_cache.backends import FileCache, FileDict
-from requests_cache.serializers import SERIALIZERS, SerializerPipeline
+from requests_cache.serializers import (
+    SERIALIZERS,
+    SerializerPipeline,
+    Stage,
+    json_serializer,
+    safe_pickle_serializer,
+    yaml_serializer,
+)
+from tests.conftest import HTTPBIN_FORMATS, HTTPBIN_METHODS
 from tests.integration.base_cache_test import BaseCacheTest
 from tests.integration.base_storage_test import CACHE_NAME, BaseStorageTest
 
+# Handle optional dependencies if they're not installed,
+# so any skipped tests will explicitly be shown in pytest output
+TEST_SERIALIZERS = SERIALIZERS.copy()
+try:
+    TEST_SERIALIZERS['safe_pickle'] = safe_pickle_serializer(secret_key='hunter2')
+except ImportError:
+    TEST_SERIALIZERS['safe_pickle'] = 'safe_pickle_placeholder'
+
+
+def _valid_serializer(serializer) -> bool:
+    return isinstance(serializer, (SerializerPipeline, Stage))
+
 
 class TestFileDict(BaseStorageTest):
     storage_class = FileDict
@@ -19,7 +39,7 @@ class TestFileDict(BaseStorageTest):
         rmtree(CACHE_NAME, ignore_errors=True)
 
     def init_cache(self, index=0, clear=True, **kwargs):
-        cache = FileDict(f'{CACHE_NAME}_{index}', serializer='pickle', use_temp=True, **kwargs)
+        cache = FileDict(f'{CACHE_NAME}_{index}', use_temp=True, **kwargs)
         if clear:
             cache.clear()
         return cache
@@ -36,22 +56,43 @@ class TestFileDict(BaseStorageTest):
         assert not str(relative_path).startswith(gettempdir())
         assert str(temp_path).startswith(gettempdir())
 
-    def test_load_previous_binary_file(self):
-        """If we init a new cache and load a file previously saved in binary mode, the cache should
-        handle this and open future files in binary mode for the rest of the session.
-        """
-        cache = self.init_cache()
-        cache['foo'] = 'bar'
-
-        cache = self.init_cache(clear=False)
-        assert cache['foo'] == 'bar'
-        assert cache.is_binary is True
+    def test_custom_extension(self):
+        cache = self.init_cache(extension='dat')
+        cache['key'] = 'value'
+        assert cache._path('key').suffix == '.dat'
 
 
 class TestFileCache(BaseCacheTest):
     backend_class = FileCache
     init_kwargs = {'use_temp': True}
 
+    @pytest.mark.parametrize('serializer', TEST_SERIALIZERS.values())
+    @pytest.mark.parametrize('method', HTTPBIN_METHODS)
+    @pytest.mark.parametrize('field', ['params', 'data', 'json'])
+    def test_all_methods(self, field, method, serializer):
+        """Test all relevant combinations of methods X data fields X serializers"""
+        if not _valid_serializer(serializer):
+            pytest.skip(f'Dependencies not installed for {serializer}')
+        super().test_all_methods(field, method, serializer)
+
+    @pytest.mark.parametrize('serializer', TEST_SERIALIZERS.values())
+    @pytest.mark.parametrize('response_format', HTTPBIN_FORMATS)
+    def test_all_response_formats(self, response_format, serializer):
+        """Test all relevant combinations of response formats X serializers"""
+        if not _valid_serializer(serializer):
+            pytest.skip(f'Dependencies not installed for {serializer}')
+        serializer.set_decode_content(False)
+        super().test_all_response_formats(response_format, serializer)
+
+    @pytest.mark.parametrize('serializer', [json_serializer, yaml_serializer])
+    @pytest.mark.parametrize('response_format', HTTPBIN_FORMATS)
+    def test_all_response_formats__no_decode_content(self, response_format, serializer):
+        """Test with decode_content=True for text-based serialization formats"""
+        if not _valid_serializer(serializer):
+            pytest.skip(f'Dependencies not installed for {serializer}')
+        serializer.set_decode_content(True)
+        self.test_all_response_formats(response_format, serializer)
+
     @pytest.mark.parametrize('serializer_name', SERIALIZERS.keys())
     def test_paths(self, serializer_name):
         if not isinstance(SERIALIZERS[serializer_name], SerializerPipeline):
diff --git a/tests/integration/test_mongodb.py b/tests/integration/test_mongodb.py
index 955152f..1dfae87 100644
--- a/tests/integration/test_mongodb.py
+++ b/tests/integration/test_mongodb.py
@@ -1,41 +1,40 @@
+from concurrent.futures import ProcessPoolExecutor, ThreadPoolExecutor
+from logging import getLogger
+from time import sleep
 from unittest.mock import patch
 
 import pytest
-from gridfs import GridFS
-from gridfs.errors import CorruptGridFile, FileExists
-
-from requests_cache.backends import (
-    GridFSCache,
-    GridFSPickleDict,
-    MongoCache,
-    MongoDict,
-    MongoPickleDict,
-)
-from tests.conftest import fail_if_no_connection
+from tenacity import retry, retry_if_exception_type, stop_after_attempt, wait_fixed
+
+from requests_cache.backends import GridFSCache, GridFSDict, MongoCache, MongoDict
+from requests_cache.policy import NEVER_EXPIRE
+from tests.conftest import N_ITERATIONS, fail_if_no_connection, httpbin
 from tests.integration.base_cache_test import BaseCacheTest
 from tests.integration.base_storage_test import BaseStorageTest
 
+try:
+    from pymongo.errors import ServerSelectionTimeoutError
+except ImportError:
+    pass
+
+logger = getLogger(__name__)
+
 
 @pytest.fixture(scope='module', autouse=True)
-@fail_if_no_connection()
+@fail_if_no_connection(connect_timeout=2)
 def ensure_connection():
     """Fail all tests in this module if MongoDB is not running"""
     from pymongo import MongoClient
 
-    client = MongoClient(serverSelectionTimeoutMS=200)
+    client = MongoClient(serverSelectionTimeoutMS=2000)
     client.server_info()
 
 
 class TestMongoDict(BaseStorageTest):
     storage_class = MongoDict
 
-
-class TestMongoPickleDict(BaseStorageTest):
-    storage_class = MongoPickleDict
-    picklable = True
-
     def test_connection_kwargs(self):
-        """A spot check to make sure optional connection kwargs get passed to connection"""
+        """A spot check to make sure optional connection kwargs gets passed to connection"""
         # MongoClient prevents direct access to private members like __init_kwargs;
         # need to test indirectly using its repr
         cache = MongoDict(
@@ -53,17 +52,67 @@ class TestMongoPickleDict(BaseStorageTest):
 class TestMongoCache(BaseCacheTest):
     backend_class = MongoCache
 
-
-class TestGridFSPickleDict(BaseStorageTest):
-    storage_class = GridFSPickleDict
+    def test_ttl(self):
+        session = self.init_session()
+        session.cache.set_ttl(1)
+
+        session.get(httpbin('get'))
+        response = session.get(httpbin('get'))
+        assert response.from_cache is True
+
+        # Wait for removal background process to run
+        # Unfortunately there doesn't seem to be a way to manually trigger it
+        for i in range(70):
+            if response.cache_key not in session.cache.responses:
+                logger.debug(f'Removed {response.cache_key} after {i} seconds')
+                break
+            sleep(1)
+
+        assert response.cache_key not in session.cache.responses
+
+    def test_ttl__overwrite(self):
+        session = self.init_session()
+        session.cache.set_ttl(60)
+
+        # Should have no effect
+        session.cache.set_ttl(360)
+        assert session.cache.get_ttl() == 60
+
+        # Should create new index
+        session.cache.set_ttl(360, overwrite=True)
+        assert session.cache.get_ttl() == 360
+
+        # Should drop index
+        session.cache.set_ttl(None, overwrite=True)
+        assert session.cache.get_ttl() is None
+
+        # Should attempt to drop non-existent index and ignore error
+        session.cache.set_ttl(NEVER_EXPIRE, overwrite=True)
+        assert session.cache.get_ttl() is None
+
+    @retry(
+        retry=retry_if_exception_type(ServerSelectionTimeoutError),
+        reraise=True,
+        stop=stop_after_attempt(5),
+        wait=wait_fixed(5),
+    )
+    @pytest.mark.parametrize('executor_class', [ThreadPoolExecutor, ProcessPoolExecutor])
+    @pytest.mark.parametrize('iteration', range(N_ITERATIONS))
+    def test_concurrency(self, iteration, executor_class):
+        """On GitHub runners, sometimes the MongoDB container is not ready yet by the time this,
+        runs, so some retries are added here.
+        """
+        super().test_concurrency(iteration, executor_class)
+
+
+class TestGridFSDict(BaseStorageTest):
+    storage_class = GridFSDict
     picklable = True
     num_instances = 1  # Only test a single collecton instead of multiple
 
     def test_connection_kwargs(self):
-        """A spot check to make sure optional connection kwargs get passed to connection"""
-        # MongoClient prevents direct access to private members like __init_kwargs;
-        # need to test indirectly using its repr
-        cache = MongoDict(
+        """A spot check to make sure optional connection kwargs gets passed to connection"""
+        cache = GridFSDict(
             'test',
             host='mongodb://0.0.0.0',
             port=2222,
@@ -76,12 +125,18 @@ class TestGridFSPickleDict(BaseStorageTest):
 
     def test_corrupt_file(self):
         """A corrupted file should be handled and raise a KeyError instead"""
+        from gridfs import GridFS
+        from gridfs.errors import CorruptGridFile
+
         cache = self.init_cache()
         cache['key'] = 'value'
         with pytest.raises(KeyError), patch.object(GridFS, 'find_one', side_effect=CorruptGridFile):
             cache['key']
 
     def test_file_exists(self):
+        from gridfs import GridFS
+        from gridfs.errors import FileExists
+
         cache = self.init_cache()
 
         # This write should just quiety fail
diff --git a/tests/integration/test_redis.py b/tests/integration/test_redis.py
index 08f1ee0..2a34899 100644
--- a/tests/integration/test_redis.py
+++ b/tests/integration/test_redis.py
@@ -1,9 +1,10 @@
 from unittest.mock import patch
 
 import pytest
+from redis import StrictRedis
 
-from requests_cache.backends.redis import RedisCache, RedisDict, RedisHashDict
-from tests.conftest import fail_if_no_connection
+from requests_cache.backends import RedisCache, RedisDict, RedisHashDict
+from tests.conftest import fail_if_no_connection, httpbin
 from tests.integration.base_cache_test import BaseCacheTest
 from tests.integration.base_storage_test import BaseStorageTest
 
@@ -32,7 +33,30 @@ class TestRedisHashDict(TestRedisDict):
     storage_class = RedisHashDict
     num_instances: int = 10  # Supports multiple instances, since this stores items under hash keys
     picklable = True
+    init_kwargs = {'serializer': 'pickle'}
 
 
 class TestRedisCache(BaseCacheTest):
     backend_class = RedisCache
+
+    @patch.object(StrictRedis, 'setex')
+    def test_ttl(self, mock_setex):
+        session = self.init_session(expire_after=60)
+        session.get(httpbin('get'))
+        call_args = mock_setex.mock_calls[0][1]
+        assert call_args[1] == 3660  # Should be expiration + default offset
+
+    @patch.object(StrictRedis, 'setex')
+    def test_ttl__offset(self, mock_setex):
+        session = self.init_session(expire_after=60, ttl_offset=500)
+        session.get(httpbin('get'))
+        call_args = mock_setex.mock_calls[0][1]
+        assert call_args[1] == 560  # Should be expiration + custom offset
+
+    @patch.object(StrictRedis, 'setex')
+    @patch.object(StrictRedis, 'set')
+    def test_ttl__disabled(self, mock_set, mock_setex):
+        session = self.init_session(expire_after=60, ttl=False)
+        session.get(httpbin('get'))
+        mock_setex.assert_not_called()
+        mock_set.assert_called()
diff --git a/tests/integration/test_sqlite.py b/tests/integration/test_sqlite.py
index 1840a78..04e4b61 100644
--- a/tests/integration/test_sqlite.py
+++ b/tests/integration/test_sqlite.py
@@ -1,19 +1,24 @@
 import os
+import pickle
+from datetime import datetime, timedelta
 from os.path import join
 from tempfile import NamedTemporaryFile, gettempdir
 from threading import Thread
 from unittest.mock import patch
 
 import pytest
-from appdirs import user_cache_dir
+from platformdirs import user_cache_dir
 
-from requests_cache.backends.base import BaseCache
-from requests_cache.backends.sqlite import MEMORY_URI, SQLiteCache, SQLiteDict, SQLitePickleDict
+from requests_cache.backends import BaseCache, SQLiteCache, SQLiteDict
+from requests_cache.backends.sqlite import MEMORY_URI
+from requests_cache.models import CachedResponse
+from tests.conftest import skip_pypy
 from tests.integration.base_cache_test import BaseCacheTest
 from tests.integration.base_storage_test import CACHE_NAME, BaseStorageTest
 
 
-class SQLiteTestCase(BaseStorageTest):
+class TestSQLiteDict(BaseStorageTest):
+    storage_class = SQLiteDict
     init_kwargs = {'use_temp': True}
 
     @classmethod
@@ -23,6 +28,12 @@ class SQLiteTestCase(BaseStorageTest):
         except Exception:
             pass
 
+    @patch('requests_cache.backends.sqlite.sqlite3')
+    def test_connection_kwargs(self, mock_sqlite):
+        """A spot check to make sure optional connection kwargs gets passed to connection"""
+        cache = self.storage_class('test', use_temp=True, timeout=0.5, invalid_kwarg='???')
+        mock_sqlite.connect.assert_called_with(cache.db_path, timeout=0.5, check_same_thread=False)
+
     def test_use_cache_dir(self):
         relative_path = self.storage_class(CACHE_NAME).db_path
         cache_dir_path = self.storage_class(CACHE_NAME, use_cache_dir=True).db_path
@@ -51,7 +62,7 @@ class SQLiteTestCase(BaseStorageTest):
         assert len(cache) == 0
 
     def test_use_memory__uri(self):
-        self.init_cache(':memory:').db_path == ':memory:'
+        assert self.init_cache(':memory:').db_path == ':memory:'
 
     def test_non_dir_parent_exists(self):
         """Expect a custom error message if a parent path already exists but isn't a directory"""
@@ -72,7 +83,7 @@ class SQLiteTestCase(BaseStorageTest):
         assert set(cache.keys()) == {f'key_{i}' for i in range(n_items)}
         assert set(cache.values()) == {f'value_{i}' for i in range(n_items)}
 
-    def test_chunked_bulk_delete(self):
+    def test_bulk_delete__chunked(self):
         """When deleting more items than SQLite can handle in a single statement, it should be
         chunked into multiple smaller statements
         """
@@ -94,9 +105,23 @@ class SQLiteTestCase(BaseStorageTest):
         cache.bulk_delete(keys)
         assert len(cache) == 0
 
+    def test_bulk_commit__noop(self):
+        def do_noop_bulk(cache):
+            with cache.bulk_commit():
+                pass
+            del cache
+
+        cache = self.init_cache()
+        thread = Thread(target=do_noop_bulk, args=(cache,))
+        thread.start()
+        thread.join()
+
+        # make sure connection is not closed by the thread
+        cache['key_1'] = 'value_1'
+        assert list(cache.keys()) == ['key_1']
+
     def test_switch_commit(self):
         cache = self.init_cache()
-        cache.clear()
         cache['key_1'] = 'value_1'
         cache = self.init_cache(clear=False)
         assert 'key_1' in cache
@@ -108,47 +133,138 @@ class SQLiteTestCase(BaseStorageTest):
         assert 2 not in cache
         assert cache._can_commit is True
 
-    def test_fast_save(self):
-        cache_1 = self.init_cache(1, fast_save=True)
-        cache_2 = self.init_cache(2, fast_save=True)
+    @skip_pypy
+    @pytest.mark.parametrize('kwargs', [{'fast_save': True}, {'wal': True}])
+    def test_pragma(self, kwargs):
+        """Test settings that make additional PRAGMA statements"""
+        cache_1 = self.init_cache('cache_1', **kwargs)
+        cache_2 = self.init_cache('cache_2', **kwargs)
 
-        n = 1000
+        n = 500
         for i in range(n):
-            cache_1[i] = i
-            cache_2[i * 2] = i
+            cache_1[f'key_{i}'] = f'value_{i}'
+            cache_2[f'key_{i*2}'] = f'value_{i}'
 
-        assert set(cache_1.keys()) == set(range(n))
-        assert set(cache_2.values()) == set(range(n))
+        assert set(cache_1.keys()) == {f'key_{i}' for i in range(n)}
+        assert set(cache_2.values()) == {f'value_{i}' for i in range(n)}
 
-    def test_noop(self):
-        def do_noop_bulk(cache):
-            with cache.bulk_commit():
-                pass
-            del cache
+    @skip_pypy
+    @pytest.mark.parametrize('limit', [None, 50])
+    def test_sorted__by_size(self, limit):
+        cache = self.init_cache()
+
+        # Insert items with decreasing size
+        for i in range(100):
+            suffix = 'padding' * (100 - i)
+            cache[f'key_{i}'] = f'value_{i}_{suffix}'
 
+        # Sorted items should be in ascending order by size
+        items = list(cache.sorted(key='size'))
+        assert len(items) == limit or 100
+
+        prev_item = None
+        for i, item in enumerate(items):
+            assert prev_item is None or len(prev_item) > len(item)
+
+    @skip_pypy
+    def test_sorted__reversed(self):
         cache = self.init_cache()
-        thread = Thread(target=do_noop_bulk, args=(cache,))
-        thread.start()
-        thread.join()
 
-        # make sure connection is not closed by the thread
+        for i in range(100):
+            cache[f'key_{i+1:03}'] = f'value_{i+1}'
+
+        items = list(cache.sorted(key='key', reversed=True))
+        assert len(items) == 100
+        for i, item in enumerate(items):
+            assert item == f'value_{100-i}'
+
+    @skip_pypy
+    def test_sorted__invalid_sort_key(self):
+        cache = self.init_cache()
         cache['key_1'] = 'value_1'
-        assert list(cache.keys()) == ['key_1']
+        with pytest.raises(ValueError):
+            list(cache.sorted(key='invalid_key'))
 
-    @patch('requests_cache.backends.sqlite.sqlite3')
-    def test_connection_kwargs(self, mock_sqlite):
-        """A spot check to make sure optional connection kwargs gets passed to connection"""
-        cache = self.storage_class('test', use_temp=True, timeout=0.5, invalid_kwarg='???')
-        mock_sqlite.connect.assert_called_with(cache.db_path, timeout=0.5)
+    @skip_pypy
+    @pytest.mark.parametrize('limit', [None, 50])
+    def test_sorted__by_expires(self, limit):
+        cache = self.init_cache()
+        now = datetime.utcnow()
 
+        # Insert items with decreasing expiration time
+        for i in range(100):
+            response = CachedResponse(expires=now + timedelta(seconds=101 - i))
+            cache[f'key_{i}'] = response
 
-class TestSQLiteDict(SQLiteTestCase):
-    storage_class = SQLiteDict
+        # Sorted items should be in ascending order by expiration time
+        items = list(cache.sorted(key='expires'))
+        assert len(items) == limit or 100
 
+        prev_item = None
+        for i, item in enumerate(items):
+            assert prev_item is None or prev_item.expires < item.expires
 
-class TestSQLitePickleDict(SQLiteTestCase):
-    storage_class = SQLitePickleDict
-    picklable = True
+    @skip_pypy
+    def test_sorted__exclude_expired(self):
+        cache = self.init_cache()
+        now = datetime.utcnow()
+
+        # Make only odd numbered items expired
+        for i in range(100):
+            delta = 101 - i
+            if i % 2 == 1:
+                delta -= 101
+
+            response = CachedResponse(status_code=i, expires=now + timedelta(seconds=delta))
+            cache[f'key_{i}'] = response
+
+        # Items should only include unexpired (even numbered) items, and still be in sorted order
+        items = list(cache.sorted(key='expires', expired=False))
+        assert len(items) == 50
+        prev_item = None
+
+        for i, item in enumerate(items):
+            assert prev_item is None or prev_item.expires < item.expires
+            assert item.status_code % 2 == 0
+
+    @skip_pypy
+    def test_sorted__error(self):
+        """sorted() should handle deserialization errors and not return invalid responses"""
+
+        class BadSerializer:
+            def loads(self, value):
+                response = pickle.loads(value)
+                if response.cache_key == 'key_42':
+                    raise pickle.PickleError()
+                return response
+
+            def dumps(self, value):
+                return pickle.dumps(value)
+
+        cache = self.init_cache(serializer=BadSerializer())
+
+        for i in range(100):
+            response = CachedResponse(status_code=i)
+            response.cache_key = f'key_{i}'
+            cache[f'key_{i}'] = response
+
+        # Items should only include unexpired (even numbered) items, and still be in sorted order
+        items = list(cache.sorted())
+        assert len(items) == 99
+
+    @pytest.mark.parametrize(
+        'db_path, use_temp',
+        [
+            ('filesize_test', True),
+            (':memory:', False),
+        ],
+    )
+    def test_size(self, db_path, use_temp):
+        """Test approximate expected size of a database, for both file-based and in-memory databases"""
+        cache = self.init_cache(db_path, use_temp=use_temp)
+        for i in range(100):
+            cache[f'key_{i}'] = f'value_{i}'
+        assert 10000 < cache.size() < 200000
 
 
 class TestSQLiteCache(BaseCacheTest):
@@ -188,3 +304,48 @@ class TestSQLiteCache(BaseCacheTest):
         """
         session = self.init_session()
         assert session.cache.db_path == session.cache.responses.db_path
+
+    def test_count(self):
+        """count() should work the same as len(), but with the option to exclude expired responses"""
+        session = self.init_session()
+        now = datetime.utcnow()
+        session.cache.responses['key_1'] = CachedResponse(expires=now + timedelta(1))
+        session.cache.responses['key_2'] = CachedResponse(expires=now - timedelta(1))
+
+        assert session.cache.count() == 2
+        assert session.cache.count(expired=False) == 1
+
+    @patch.object(SQLiteDict, 'sorted')
+    def test_filter__expired(self, mock_sorted):
+        """Filtering by expired should use a more efficient SQL query"""
+        session = self.init_session()
+
+        session.cache.filter()
+        mock_sorted.assert_called_with(expired=True)
+
+        session.cache.filter(expired=False)
+        mock_sorted.assert_called_with(expired=False)
+
+    def test_sorted(self):
+        """Test wrapper method for SQLiteDict.sorted(), with all arguments combined"""
+        session = self.init_session(clear=False)
+        now = datetime.utcnow()
+
+        # Insert items with decreasing expiration time
+        for i in range(500):
+            delta = 1000 - i
+            if i > 400:
+                delta -= 2000
+
+            response = CachedResponse(status_code=i, expires=now + timedelta(seconds=delta))
+            session.cache.responses[f'key_{i}'] = response
+
+        # Sorted items should be in ascending order by expiration time
+        items = list(session.cache.sorted(key='expires', expired=False, reversed=True, limit=100))
+        assert len(items) == 100
+
+        prev_item = None
+        for i, item in enumerate(items):
+            assert prev_item is None or prev_item.expires < item.expires
+            assert item.cache_key
+            assert not item.is_expired
diff --git a/tests/integration/test_compat.py b/tests/integration/test_upgrade.py
similarity index 68%
rename from tests/integration/test_compat.py
rename to tests/integration/test_upgrade.py
index 381b675..162024e 100644
--- a/tests/integration/test_compat.py
+++ b/tests/integration/test_upgrade.py
@@ -3,11 +3,9 @@ from shutil import copyfile
 import pytest
 
 from requests_cache import CachedSession
-from tests.conftest import HTTPBIN_FORMATS, SAMPLE_CACHE_FILES
+from tests.conftest import HTTPBIN_FORMATS, SAMPLE_CACHE_FILES, httpbin
 
 
-# TODO: Debug why this sometimes fails (mostly just on GitHub Actions)
-@pytest.mark.flaky(reruns=3)
 @pytest.mark.parametrize('db_path', SAMPLE_CACHE_FILES)
 def test_version_upgrade(db_path, tempfile_path):
     """Load SQLite cache files created with older versions of requests-cache.
@@ -21,5 +19,5 @@ def test_version_upgrade(db_path, tempfile_path):
     session = CachedSession(tempfile_path)
 
     for response_format in HTTPBIN_FORMATS:
-        session.get(f'https://httpbin.org/{response_format}').from_cache
-        assert session.get(f'https://httpbin.org/{response_format}').from_cache is True
+        session.get(httpbin(response_format)).from_cache
+        assert session.get(httpbin(response_format)).from_cache is True
diff --git a/tests/sample_data/sample.db.0.7.5 b/tests/sample_data/sample.db.0.7.5
new file mode 100644
index 0000000..bc0d2ad
Binary files /dev/null and b/tests/sample_data/sample.db.0.7.5 differ
diff --git a/tests/sample_data/sample.db.0.8.1 b/tests/sample_data/sample.db.0.8.1
new file mode 100644
index 0000000..8da448d
Binary files /dev/null and b/tests/sample_data/sample.db.0.8.1 differ
diff --git a/tests/sample_data/sample.db.0.9.0 b/tests/sample_data/sample.db.0.9.0
new file mode 100644
index 0000000..4964b00
Binary files /dev/null and b/tests/sample_data/sample.db.0.9.0 differ
diff --git a/tests/sample_data/sample.db.0.9.1 b/tests/sample_data/sample.db.0.9.1
new file mode 100644
index 0000000..f4b8803
Binary files /dev/null and b/tests/sample_data/sample.db.0.9.1 differ
diff --git a/tests/sample_data/sample.db.0.9.2 b/tests/sample_data/sample.db.0.9.2
new file mode 100644
index 0000000..42c2965
Binary files /dev/null and b/tests/sample_data/sample.db.0.9.2 differ
diff --git a/tests/sample_data/sample.db.0.9.3 b/tests/sample_data/sample.db.0.9.3
new file mode 100644
index 0000000..9fd788b
Binary files /dev/null and b/tests/sample_data/sample.db.0.9.3 differ
diff --git a/tests/sample_data/sample.db.0.9.4 b/tests/sample_data/sample.db.0.9.4
new file mode 100644
index 0000000..e3611c4
Binary files /dev/null and b/tests/sample_data/sample.db.0.9.4 differ
diff --git a/tests/sample_data/sample.db.0.9.5 b/tests/sample_data/sample.db.0.9.5
new file mode 100644
index 0000000..b8aba8c
Binary files /dev/null and b/tests/sample_data/sample.db.0.9.5 differ
diff --git a/tests/sample_data/sample.db.0.9.6 b/tests/sample_data/sample.db.0.9.6
new file mode 100644
index 0000000..9e64314
Binary files /dev/null and b/tests/sample_data/sample.db.0.9.6 differ
diff --git a/tests/sample_data/sample.db.0.9.7 b/tests/sample_data/sample.db.0.9.7
new file mode 100644
index 0000000..a5488ea
Binary files /dev/null and b/tests/sample_data/sample.db.0.9.7 differ
diff --git a/tests/sample_data/sample.db.0.9.8 b/tests/sample_data/sample.db.0.9.8
new file mode 100644
index 0000000..e77941a
Binary files /dev/null and b/tests/sample_data/sample.db.0.9.8 differ
diff --git a/tests/sample_data/sample.db.1.0.0 b/tests/sample_data/sample.db.1.0.0
new file mode 100644
index 0000000..9d829c5
Binary files /dev/null and b/tests/sample_data/sample.db.1.0.0 differ
diff --git a/tests/sample_data/vcr_requests_cache.yaml b/tests/sample_data/vcr_requests_cache.yaml
new file mode 100644
index 0000000..a827b0e
--- /dev/null
+++ b/tests/sample_data/vcr_requests_cache.yaml
@@ -0,0 +1,46 @@
+http_interactions:
+- recorded_at: '2021-08-15T02:14:11.003635'
+  request:
+    body: !!binary |
+      Tm9uZQ==
+    headers:
+      Accept:
+      - '*/*'
+      Accept-Encoding:
+      - gzip, deflate
+      Connection:
+      - keep-alive
+      User-Agent:
+      - python-requests/2.26.0
+    method: GET
+    uri: https://httpbin.org/get
+  response:
+    body:
+      encoding: utf-8
+      string: !!binary |
+        ewogICJhcmdzIjoge30sIAogICJoZWFkZXJzIjogewogICAgIkFjY2VwdCI6ICIqLyoiLCAKICAg
+        ICJBY2NlcHQtRW5jb2RpbmciOiAiZ3ppcCwgZGVmbGF0ZSIsIAogICAgIkhvc3QiOiAiaHR0cGJp
+        bi5vcmciLCAKICAgICJVc2VyLUFnZW50IjogInB5dGhvbi1yZXF1ZXN0cy8yLjI2LjAiLCAKICAg
+        ICJYLUFtem4tVHJhY2UtSWQiOiAiUm9vdD0xLTYxMTg3ODcyLTM4YjE1NmZhMjliOTU3ODMzM2Nj
+        NmJmMCIKICB9LCAKICAib3JpZ2luIjogIjE3My4yMS4xMjYuMTQ1IiwgCiAgInVybCI6ICJodHRw
+        czovL2h0dHBiaW4ub3JnL2dldCIKfQo=
+    headers:
+      Access-Control-Allow-Credentials:
+      - 'true'
+      Access-Control-Allow-Origin:
+      - '*'
+      Connection:
+      - keep-alive
+      Content-Length:
+      - '308'
+      Content-Type:
+      - application/json
+      Date:
+      - Sun, 15 Aug 2021 02:14:10 GMT
+      Server:
+      - gunicorn/19.9.0
+    status:
+      code: 200
+      reason: OK
+    url: https://httpbin.org/get
+recorded_with: requests-cache 0.8.0
diff --git a/tests/sample_data/vcr_vcrpy.yaml b/tests/sample_data/vcr_vcrpy.yaml
new file mode 100644
index 0000000..bcf8836
--- /dev/null
+++ b/tests/sample_data/vcr_vcrpy.yaml
@@ -0,0 +1,76 @@
+interactions:
+- request:
+    body: null
+    headers:
+      Accept:
+      - '*/*'
+      Accept-Encoding:
+      - gzip, deflate
+      Connection:
+      - keep-alive
+      User-Agent:
+      - python-requests/2.26.0
+    method: GET
+    uri: https://httpbin.org/get
+  response:
+    body:
+      string: "{\n  \"args\": {}, \n  \"headers\": {\n    \"Accept\": \"*/*\", \n
+        \   \"Accept-Encoding\": \"gzip, deflate\", \n    \"Host\": \"httpbin.org\",
+        \n    \"User-Agent\": \"python-requests/2.26.0\", \n    \"X-Amzn-Trace-Id\":
+        \"Root=1-61269390-110f184a5a3b52ef31278574\"\n  }, \n  \"origin\": \"173.21.126.145\",
+        \n  \"url\": \"https://httpbin.org/get\"\n}\n"
+    headers:
+      Access-Control-Allow-Credentials:
+      - 'true'
+      Access-Control-Allow-Origin:
+      - '*'
+      Connection:
+      - keep-alive
+      Content-Length:
+      - '308'
+      Content-Type:
+      - application/json
+      Date:
+      - Wed, 25 Aug 2021 19:01:36 GMT
+      Server:
+      - gunicorn/19.9.0
+    status:
+      code: 200
+      message: OK
+- request:
+    body: null
+    headers:
+      Accept:
+      - '*/*'
+      Accept-Encoding:
+      - gzip, deflate
+      Connection:
+      - keep-alive
+      User-Agent:
+      - python-requests/2.26.0
+    method: GET
+    uri: https://httpbin.org/ip
+  response:
+    body:
+      string: "{\n  \"origin\": \"173.21.126.145\"\n}\n"
+    headers:
+      Access-Control-Allow-Credentials:
+      - 'true'
+      Access-Control-Allow-Origin:
+      - '*'
+      Connection:
+      - keep-alive
+      Content-Length:
+      - '33'
+      Content-Type:
+      - application/json
+      Date:
+      - Wed, 25 Aug 2021 19:09:56 GMT
+      Server:
+      - gunicorn/19.9.0
+      Foo:
+      - bar
+    status:
+      code: 200
+      message: OK
+version: 1
diff --git a/tests/unit/models/test_base.py b/tests/unit/models/test_base.py
new file mode 100644
index 0000000..d91ddc2
--- /dev/null
+++ b/tests/unit/models/test_base.py
@@ -0,0 +1,32 @@
+from typing import List
+
+from attr import define, field
+
+from requests_cache.models import RichMixin
+
+
+@define
+class DemoModel(RichMixin):
+    str_attr: str = field(default=None)
+    int_attr: int = field(default=None)
+    list_attr: List[str] = field(factory=list)
+    _private_attr: bool = field(default=False, repr=False)
+
+
+def test_rich_mixin():
+    """Test that RichMixin.__rich_repr__ informs rich about all public attributes, current values,
+    and defaults
+    """
+    model = DemoModel(str_attr='str', int_attr=1, list_attr=['a', 'b'])
+    repr_tokens = list(model.__rich_repr__())
+    assert repr_tokens == [
+        ('str_attr', 'str', None),
+        ('int_attr', 1, None),
+        ('list_attr', ['a', 'b'], []),
+    ]
+
+
+def test_repr():
+    """Test that regular __repr__ excludes default values"""
+    assert repr(DemoModel() == 'DemoModel()')
+    assert repr(DemoModel(str_attr='str') == "DemoModel(str_attr='str')")
diff --git a/tests/unit/models/test_raw_response.py b/tests/unit/models/test_raw_response.py
index 06acbf9..9ec1591 100644
--- a/tests/unit/models/test_raw_response.py
+++ b/tests/unit/models/test_raw_response.py
@@ -28,6 +28,12 @@ def test_read():
     assert raw._fp.closed is True
 
 
+def test_read__always_decode():
+    """read() with decode_content=False is not supported"""
+    raw = CachedHTTPResponse(body=b'mock response', headers={'Content-Encoding': 'gzip'})
+    assert raw.read(decode_content=False) == b'mock response'
+
+
 def test_close():
     raw = CachedHTTPResponse(body=b'mock response')
     raw.close()
diff --git a/tests/unit/models/test_request.py b/tests/unit/models/test_request.py
index aa3487a..3834103 100644
--- a/tests/unit/models/test_request.py
+++ b/tests/unit/models/test_request.py
@@ -14,3 +14,4 @@ def test_from_request(mock_session):
     assert response.request.method == request.method == 'GET'
     assert response.request.path_url == request.path_url == '/text'
     assert response.request.url == request.url == MOCKED_URL
+    assert response.request._cookies == request._cookies == request.cookies == {}
diff --git a/tests/unit/models/test_response.py b/tests/unit/models/test_response.py
index 4608bc1..d7f40b9 100644
--- a/tests/unit/models/test_response.py
+++ b/tests/unit/models/test_response.py
@@ -75,7 +75,7 @@ def test_iterator(mock_session):
         last_request_chunks = chunks
 
 
-def test_revalidate__extend_expiration(mock_session):
+def test_reset_expiration__extend_expiration(mock_session):
     # Start with an expired response
     response = CachedResponse.from_response(
         mock_session.get(MOCKED_URL),
@@ -83,14 +83,14 @@ def test_revalidate__extend_expiration(mock_session):
     )
     assert response.is_expired is True
 
-    # Set expiration in the future and revalidate
-    is_expired = response.revalidate(datetime.utcnow() + timedelta(seconds=0.01))
+    # Set expiration in the future
+    is_expired = response.reset_expiration(datetime.utcnow() + timedelta(seconds=0.01))
     assert is_expired is response.is_expired is False
     sleep(0.1)
     assert response.is_expired is True
 
 
-def test_revalidate__shorten_expiration(mock_session):
+def test_reset_expiration__shorten_expiration(mock_session):
     # Start with a non-expired response
     response = CachedResponse.from_response(
         mock_session.get(MOCKED_URL),
@@ -98,8 +98,8 @@ def test_revalidate__shorten_expiration(mock_session):
     )
     assert response.is_expired is False
 
-    # Set expiration in the past and revalidate
-    is_expired = response.revalidate(datetime.utcnow() - timedelta(seconds=1))
+    # Set expiration in the past
+    is_expired = response.reset_expiration(datetime.utcnow() - timedelta(seconds=1))
     assert is_expired is response.is_expired is True
 
 
diff --git a/tests/unit/policy/test_actions.py b/tests/unit/policy/test_actions.py
index 5b14334..c50e212 100644
--- a/tests/unit/policy/test_actions.py
+++ b/tests/unit/policy/test_actions.py
@@ -1,26 +1,24 @@
-from datetime import datetime, timedelta, timezone
-from unittest.mock import MagicMock, patch
+from datetime import datetime, timedelta
+from unittest.mock import patch
 
 import pytest
-from requests import PreparedRequest
+from requests import PreparedRequest, Request
 
-from requests_cache.models.response import CachedResponse
-from requests_cache.policy import (
-    DO_NOT_CACHE,
-    CacheActions,
-    get_expiration_datetime,
-    get_url_expiration,
-)
-from tests.conftest import ETAG, HTTPDATE_DATETIME, HTTPDATE_STR, LAST_MODIFIED
+from requests_cache.cache_keys import create_key
+from requests_cache.models import CachedResponse
+from requests_cache.policy.actions import EXPIRE_IMMEDIATELY, CacheActions
+from requests_cache.policy.settings import CacheSettings
+from tests.conftest import ETAG, HTTPDATE_STR, LAST_MODIFIED, MOCKED_URL, get_mock_response
 
 IGNORED_DIRECTIVES = [
-    'must-revalidate',
     'no-transform',
     'private',
     'proxy-revalidate',
     'public',
     's-maxage=<seconds>',
 ]
+BASIC_REQUEST = Request(method='GET', url='https://site.com/img.jpg', headers={})
+EXPIRED_RESPONSE = CachedResponse(expires=datetime.utcnow() - timedelta(1))
 
 
 @pytest.mark.parametrize(
@@ -48,13 +46,8 @@ def test_init(
         request.headers = {'Cache-Control': f'max-age={request_expire_after}'}
     get_url_expiration.return_value = url_expire_after
 
-    actions = CacheActions.from_request(
-        cache_key='key',
-        request=request,
-        request_expire_after=request_expire_after,
-        session_expire_after=1,
-        cache_control=True,
-    )
+    settings = CacheSettings(cache_control=True, expire_after=1)
+    actions = CacheActions.from_request(cache_key='key', request=request, settings=settings)
     assert actions.expire_after == expected_expiration
 
 
@@ -65,101 +58,107 @@ def test_init(
         ({'Expires': HTTPDATE_STR}, None),  # Only valid for response headers
         ({'Cache-Control': 'max-age=60'}, 60),
         ({'Cache-Control': 'public, max-age=60'}, 60),
-        ({'Cache-Control': b'public, max-age=60'}, 60),
-        ({'Cache-Control': 'max-age=0'}, DO_NOT_CACHE),
-        ({'Cache-Control': 'no-store'}, DO_NOT_CACHE),
+        ({'Cache-Control': b'public, max-age=60'}, 60),  # requests-oauthlib casts headers to bytes
+        ({'Cache-Control': 'max-age=0'}, EXPIRE_IMMEDIATELY),
     ],
 )
 def test_init_from_headers(headers, expected_expiration):
     """Test with Cache-Control request headers"""
-    actions = CacheActions.from_request(
-        cache_key='key', cache_control=True, request=MagicMock(headers=headers)
-    )
+    settings = CacheSettings(cache_control=True)
+    request = Request(method='GET', url=MOCKED_URL, headers=headers).prepare()
+    actions = CacheActions.from_request('key', request, settings)
 
     assert actions.cache_key == 'key'
-    if expected_expiration == DO_NOT_CACHE:
-        assert actions.skip_read is True
-        assert actions.skip_write is True
-    else:
+    if expected_expiration != EXPIRE_IMMEDIATELY:
         assert actions.expire_after == expected_expiration
         assert actions.skip_read is False
         assert actions.skip_write is False
 
 
+def test_init_from_headers__no_store():
+    """Test with Cache-Control request headers"""
+    settings = CacheSettings(cache_control=True)
+    request = Request(method='GET', url=MOCKED_URL, headers={'Cache-Control': 'no-store'}).prepare()
+    actions = CacheActions.from_request('key', request, settings)
+
+    assert actions.skip_read is True
+    assert actions.skip_write is True
+
+
 @pytest.mark.parametrize(
     'url, request_expire_after, expected_expiration',
     [
-        ('img.site_1.com', None, timedelta(hours=12)),
-        ('img.site_1.com', 60, 60),
-        ('http://img.site.com/base/', None, 1),
+        ('https://img.site_1.com', None, timedelta(hours=12)),
+        ('https://img.site_1.com', 60, 60),
+        ('https://img.site.com/base/', None, 1),
         ('https://img.site.com/base/img.jpg', None, 1),
-        ('site_2.com/resource_1', None, timedelta(hours=20)),
-        ('http://site_2.com/resource_1/index.html', None, timedelta(hours=20)),
+        ('http://site_2.com/resource_1', None, timedelta(hours=20)),
+        ('ftp://site_2.com/resource_1/index.html', None, timedelta(hours=20)),
         ('http://site_2.com/resource_2/', None, timedelta(days=7)),
         ('http://site_2.com/static/', None, -1),
         ('http://site_2.com/static/img.jpg', None, -1),
-        ('site_2.com', None, 1),
-        ('site_2.com', 60, 60),
-        ('some_other_site.com', None, 1),
-        ('some_other_site.com', 60, 60),
+        ('http://site_2.com', None, 1),
+        ('http://site_2.com', 60, 60),
+        ('https://some_other_site.com', None, 1),
+        ('https://some_other_site.com', 60, 60),
     ],
 )
 def test_init_from_settings(url, request_expire_after, expected_expiration):
     """Test with per-session, per-request, and per-URL expiration"""
-    urls_expire_after = {
-        '*.site_1.com': timedelta(hours=12),
-        'site_2.com/resource_1': timedelta(hours=20),
-        'site_2.com/resource_2': timedelta(days=7),
-        'site_2.com/static': -1,
-    }
-    request = MagicMock(url=url)
+    settings = CacheSettings(
+        expire_after=1,
+        urls_expire_after={
+            '*.site_1.com': timedelta(hours=12),
+            'site_2.com/resource_1': timedelta(hours=20),
+            'site_2.com/resource_2': timedelta(days=7),
+            'site_2.com/static': -1,
+        },
+    )
+    request = Request(method='GET', url=url)
     if request_expire_after:
         request.headers = {'Cache-Control': f'max-age={request_expire_after}'}
 
-    actions = CacheActions.from_request(
-        cache_key='key',
-        request=request,
-        session_expire_after=1,
-        urls_expire_after=urls_expire_after,
-    )
+    actions = CacheActions.from_request('key', request.prepare(), settings)
     assert actions.expire_after == expected_expiration
 
 
 @pytest.mark.parametrize(
-    'cache_control, headers, expire_after, expected_expiration, expected_skip_read',
+    'headers, expire_after, expected_expiration, expected_skip_read',
     [
-        (False, {'Cache-Control': 'max-age=60'}, 1, 60, False),
-        (False, {}, 1, 1, False),
-        (False, {}, 0, 0, True),
-        (True, {'Cache-Control': 'max-age=60'}, 1, 60, False),
-        (True, {'Cache-Control': 'max-age=0'}, 1, 0, True),
-        (True, {'Cache-Control': 'no-store'}, 1, 1, True),
-        (True, {'Cache-Control': 'no-cache'}, 1, 1, True),
-        (True, {}, 1, 1, False),
-        (True, {}, 0, 0, False),
+        ({'Cache-Control': 'max-age=60'}, 1, 60, False),
+        ({}, 1, 1, False),
+        ({}, 0, 0, False),
+        ({'Cache-Control': 'max-age=60'}, 1, 60, False),
+        ({'Cache-Control': 'max-age=0'}, 1, 0, False),
+        ({'Cache-Control': 'no-store'}, 1, 1, True),
+        ({'Cache-Control': 'no-cache'}, 1, 1, True),
     ],
 )
 def test_init_from_settings_and_headers(
-    cache_control, headers, expire_after, expected_expiration, expected_skip_read
+    headers, expire_after, expected_expiration, expected_skip_read
 ):
-    """Test behavior with both cache settings and request headers. The only variation in behavior
-    with cache_control=True is that expire_after=0 should *not* cause the cache read to be skipped.
-    """
-    request = MagicMock(
-        url='https://img.site.com/base/img.jpg',
-        headers=headers,
-    )
+    """Test behavior with both cache settings and request headers."""
+    request = Request(method='GET', url=MOCKED_URL, headers=headers)
+    settings = CacheSettings(expire_after=expire_after)
+    actions = CacheActions.from_request('key', request, settings)
 
-    actions = CacheActions.from_request(
-        cache_key='key',
-        cache_control=cache_control,
-        request=request,
-        session_expire_after=expire_after,
-    )
     assert actions.expire_after == expected_expiration
     assert actions.skip_read == expected_skip_read
 
 
+def test_update_from_cached_response__new_request():
+    actions = CacheActions.from_request('key', BASIC_REQUEST)
+    actions.update_from_cached_response(None)
+    assert actions.send_request is True
+
+
+def test_update_from_cached_response__resend_request():
+    actions = CacheActions.from_request('key', BASIC_REQUEST)
+
+    actions.update_from_cached_response(EXPIRED_RESPONSE)
+    assert actions.resend_request is True
+
+
 @pytest.mark.parametrize(
     'response_headers, expected_validation_headers',
     [
@@ -172,43 +171,238 @@ def test_init_from_settings_and_headers(
         ),
     ],
 )
-def test_update_from_cached_response(response_headers, expected_validation_headers):
-    """Test that conditional request headers are added if the cached response is expired"""
-    actions = CacheActions.from_request(
-        cache_key='key',
-        request=MagicMock(url='https://img.site.com/base/img.jpg'),
-    )
+def test_update_from_cached_response__revalidate(response_headers, expected_validation_headers):
+    """Conditional request headers should be added if the cached response is expired"""
+    actions = CacheActions.from_request('key', BASIC_REQUEST)
     cached_response = CachedResponse(
-        headers=response_headers, expires=datetime.now() - timedelta(1)
+        headers=response_headers, expires=datetime.utcnow() - timedelta(1)
     )
 
     actions.update_from_cached_response(cached_response)
-    assert actions.validation_headers == expected_validation_headers
+    assert actions.send_request is bool(expected_validation_headers)
+    assert actions._validation_headers == expected_validation_headers
 
 
-def test_update_from_cached_response__ignored():
-    """Test that conditional request headers are NOT applied if the cached response is not expired"""
+@pytest.mark.parametrize(
+    'response_headers',
+    [
+        {'Cache-Control': 'no-cache'},
+        {'Cache-Control': 'max-age=0,must-revalidate'},
+    ],
+)
+@pytest.mark.parametrize('cache_control', [True, False])
+def test_update_from_cached_response__force_revalidate(cache_control, response_headers):
+    """Conditional request headers should be added if requested by response headers, even if the
+    response is not expired
+    """
     actions = CacheActions.from_request(
-        cache_key='key',
-        request=MagicMock(url='https://img.site.com/base/img.jpg'),
+        'key',
+        request=Request(url='https://img.site.com/base/img.jpg', headers={}),
+        settings=CacheSettings(cache_control=cache_control),
     )
+    cached_response = CachedResponse(headers={'ETag': ETAG, **response_headers}, expires=None)
+
+    actions.update_from_cached_response(cached_response)
+
+    # cache_control=False overrides revalidation in this case
+    if cache_control is False:
+        assert actions.send_request is False
+        assert not actions._validation_headers
+    else:
+        assert actions.send_request is True
+        assert actions._validation_headers == {'If-None-Match': ETAG}
+
+
+def test_update_from_cached_response__no_revalidation():
+    """Conditional request headers should NOT be added if the cached response is not expired and
+    revalidation is otherwise not requested"""
+    actions = CacheActions.from_request('key', BASIC_REQUEST)
     cached_response = CachedResponse(
         headers={'ETag': ETAG, 'Last-Modified': LAST_MODIFIED}, expires=None
     )
 
     actions.update_from_cached_response(cached_response)
-    assert actions.validation_headers == {}
+    assert actions._validation_headers == {}
+
+
+def test_update_from_cached_response__504():
+    settings = CacheSettings(only_if_cached=True)
+    actions = CacheActions.from_request('key', BASIC_REQUEST, settings=settings)
+    actions.update_from_cached_response(EXPIRED_RESPONSE)
+    assert actions.error_504 is True
+
+
+def test_update_from_cached_response__stale_if_error():
+    settings = CacheSettings(only_if_cached=True, stale_if_error=True)
+    actions = CacheActions.from_request('key', BASIC_REQUEST, settings=settings)
+    actions.update_from_cached_response(EXPIRED_RESPONSE)
+    assert actions.error_504 is False and actions.resend_request is False
+
+
+def test_update_from_cached_response__stale_while_revalidate():
+    settings = CacheSettings(only_if_cached=True, stale_while_revalidate=True)
+    actions = CacheActions.from_request('key', BASIC_REQUEST, settings=settings)
+    actions.update_from_cached_response(EXPIRED_RESPONSE)
+    assert actions.resend_async is True
+
+
+@pytest.mark.parametrize(
+    'vary, cached_headers, new_headers, expected_match',
+    [
+        ({}, {}, {}, True),
+        ({'Vary': 'Accept'}, {'Accept': 'application/json'}, {'Accept': 'application/json'}, True),
+        ({'Vary': 'Accept'}, {'Accept': 'application/json'}, {}, False),
+        (
+            {'Vary': 'Accept'},
+            {'Accept': 'application/json'},
+            {'Accept': 'application/json', 'Accept-Language': 'en'},
+            True,
+        ),
+        (
+            {'Vary': 'Accept-Encoding'},
+            {'Accept': 'application/json'},
+            {'Accept': 'text/html'},
+            True,
+        ),
+        ({'Vary': 'Accept'}, {'Accept': 'application/json'}, {'Accept': 'text/html'}, False),
+        (
+            {'Vary': 'Accept-Encoding'},
+            {'Accept-Encoding': 'gzip,deflate'},
+            {'Accept-Encoding': 'gzip,deflate'},
+            True,
+        ),
+        # Only basic header normalization is done in create_key() (whitespace, case, order)
+        (
+            {'Vary': 'Accept-Encoding'},
+            {'Accept-Encoding': 'gzip,deflate'},
+            {'Accept-Encoding': 'dEfLaTe,  GZIP, '},
+            True,
+        ),
+        (
+            {'Vary': 'Accept-Encoding'},
+            {'Accept-Encoding': 'gzip,deflate'},
+            {'Accept-Encoding': 'gzip,br'},
+            False,
+        ),
+        (
+            {'Vary': 'Accept, Accept-Encoding'},
+            {'Accept': 'application/json', 'Accept-Encoding': 'gzip,deflate'},
+            {'Accept': 'application/json', 'Accept-Encoding': 'gzip,deflate'},
+            True,
+        ),
+        (
+            {'Vary': 'Accept, Accept-Encoding'},
+            {'Accept': 'application/json', 'Accept-Encoding': 'gzip,deflate'},
+            {'Accept': 'application/json', 'Accept-Encoding': 'br'},
+            False,
+        ),
+        (
+            {'Vary': 'Accept, Accept-Encoding'},
+            {'Accept': 'application/json', 'Accept-Encoding': 'gzip,deflate'},
+            {'Accept': 'text/html', 'Accept-Encoding': 'gzip,deflate'},
+            False,
+        ),
+        (
+            {'Vary': 'Accept, Accept-Encoding'},
+            {'Accept': 'application/json', 'Accept-Encoding': 'gzip,deflate'},
+            {'Accept-Encoding': 'gzip,deflate'},
+            False,
+        ),
+        ({'Vary': '*'}, {}, {}, False),
+        ({'Vary': '*'}, {'Accept': 'application/json'}, {'Accept': 'application/json'}, False),
+    ],
+)
+def test_update_from_cached_response__vary(vary, cached_headers, new_headers, expected_match):
+    cached_response = CachedResponse(
+        headers=vary,
+        request=Request(method='GET', url='https://site.com/img.jpg', headers=cached_headers),
+    )
+    request = Request(method='GET', url='https://site.com/img.jpg', headers=new_headers)
+    actions = CacheActions.from_request('key', request)
+    actions.update_from_cached_response(cached_response, create_key=create_key)
+
+    # If the headers don't match wrt. Vary, expect a new request to be sent (cache miss)
+    assert actions.send_request is not expected_match
+
+
+@pytest.mark.parametrize('max_stale, usable', [(5, False), (15, True)])
+def test_is_usable__max_stale(max_stale, usable):
+    """For a response that expired 10 seconds ago, it may be either accepted or rejected based on
+    max-stale
+    """
+    request = Request(
+        url='https://img.site.com/base/img.jpg',
+        headers={'Cache-Control': f'max-stale={max_stale}'},
+    )
+    actions = CacheActions.from_request('key', request)
+    cached_response = CachedResponse(expires=datetime.utcnow() - timedelta(seconds=10))
+    assert actions.is_usable(cached_response) is usable
+
+
+@pytest.mark.parametrize('min_fresh, usable', [(5, True), (15, False)])
+def test_is_usable__min_fresh(min_fresh, usable):
+    """For a response that expires in 10 seconds, it may be either accepted or rejected based on
+    min-fresh
+    """
+    request = Request(
+        url='https://img.site.com/base/img.jpg',
+        headers={'Cache-Control': f'min-fresh={min_fresh}'},
+    )
+    actions = CacheActions.from_request('key', request)
+    cached_response = CachedResponse(expires=datetime.utcnow() + timedelta(seconds=10))
+    assert actions.is_usable(cached_response) is usable
+
+
+@pytest.mark.parametrize(
+    'stale_if_error, error, usable',
+    [
+        (5, True, False),
+        (15, True, True),
+        (15, False, False),
+    ],
+)
+def test_is_usable__stale_if_error(stale_if_error, error, usable):
+    """For a response that expired 10 seconds ago, if an error occured while refreshing, it may be
+    either accepted or rejected based on stale-if-error
+    """
+    request = Request(
+        url='https://img.site.com/base/img.jpg',
+        headers={'Cache-Control': f'stale-if-error={stale_if_error}'},
+    )
+    actions = CacheActions.from_request('key', request)
+    cached_response = CachedResponse(expires=datetime.utcnow() - timedelta(seconds=10))
+    assert actions.is_usable(cached_response, error=error) is usable
+
+
+@pytest.mark.parametrize(
+    'stale_while_revalidate, usable',
+    [
+        (5, False),
+        (15, True),
+    ],
+)
+def test_is_usable__stale_while_revalidate(stale_while_revalidate, usable):
+    """For a response that expired 10 seconds ago, if an error occured while refreshing, it may be
+    either accepted or rejected based on stale-while-revalidate
+    """
+    request = Request(
+        url='https://img.site.com/base/img.jpg',
+        headers={'Cache-Control': f'stale-while-revalidate={stale_while_revalidate}'},
+    )
+    actions = CacheActions.from_request('key', request)
+    cached_response = CachedResponse(expires=datetime.utcnow() - timedelta(seconds=10))
+    assert actions.is_usable(cached_response=cached_response) is usable
 
 
 @pytest.mark.parametrize(
     'headers, expected_expiration',
     [
         ({}, None),
-        ({'Cache-Control': 'no-cache'}, None),  # Only valid for request headers
+        ({'Cache-Control': 'no-cache'}, None),  # Forces revalidation, but no effect on expiration
+        ({'Cache-Control': 'max-age=0'}, 0),
         ({'Cache-Control': 'max-age=60'}, 60),
         ({'Cache-Control': 'public, max-age=60'}, 60),
-        ({'Cache-Control': 'max-age=0'}, DO_NOT_CACHE),
-        ({'Cache-Control': 'no-store'}, DO_NOT_CACHE),
+        ({'Cache-Control': 'max-age=0'}, 0),
         ({'Cache-Control': 'immutable'}, -1),
         ({'Cache-Control': 'immutable, max-age=60'}, -1),  # Immutable should take precedence
         ({'Expires': HTTPDATE_STR}, HTTPDATE_STR),
@@ -217,44 +411,36 @@ def test_update_from_cached_response__ignored():
 )
 def test_update_from_response(headers, expected_expiration):
     """Test with Cache-Control response headers"""
-    url = 'https://img.site.com/base/img.jpg'
-    actions = CacheActions.from_request(
-        cache_key='key',
-        request=MagicMock(url=url),
-        cache_control=True,
-    )
-    actions.update_from_response(MagicMock(url=url, headers=headers))
+    actions = CacheActions.from_request('key', BASIC_REQUEST, CacheSettings(cache_control=True))
+    actions.update_from_response(get_mock_response(headers=headers))
 
-    if expected_expiration == DO_NOT_CACHE:
-        assert not actions.expire_after  # May be either 0 or None
-        assert actions.skip_write is True
-    else:
-        assert actions.expire_after == expected_expiration
-        assert actions.skip_write is False
+    assert actions.expire_after == expected_expiration
+    assert actions.skip_write is (expected_expiration == EXPIRE_IMMEDIATELY)
+
+
+def test_update_from_response__no_store():
+    actions = CacheActions.from_request('key', BASIC_REQUEST, CacheSettings(cache_control=True))
+    actions.update_from_response(get_mock_response(headers={'Cache-Control': 'no-store'}))
+    assert actions.skip_write is True
 
 
 def test_update_from_response__ignored():
-    url = 'https://img.site.com/base/img.jpg'
-    actions = CacheActions.from_request(
-        cache_key='key', request=MagicMock(url=url), cache_control=False
-    )
-    actions.update_from_response(MagicMock(url=url, headers={'Cache-Control': 'max-age=5'}))
+    actions = CacheActions.from_request('key', BASIC_REQUEST, CacheSettings(cache_control=False))
+    actions.update_from_response(get_mock_response(headers={'Cache-Control': 'max-age=5'}))
     assert actions.expire_after is None
 
 
 @pytest.mark.parametrize('validator_headers', [{'ETag': ETAG}, {'Last-Modified': LAST_MODIFIED}])
 @pytest.mark.parametrize('cache_headers', [{'Cache-Control': 'max-age=0'}, {'Expires': '0'}])
-@patch('requests_cache.policy.actions.datetime')
+@patch('requests_cache.expiration.datetime')
 def test_update_from_response__revalidate(mock_datetime, cache_headers, validator_headers):
     """If expiration is 0 and there's a validator, the response should be cached, but with immediate
     expiration
     """
-    url = 'https://img.site.com/base/img.jpg'
-    headers = {**cache_headers, **validator_headers}
-    actions = CacheActions.from_request(
-        cache_key='key', request=MagicMock(url=url), cache_control=True
-    )
-    actions.update_from_response(MagicMock(url=url, headers=headers))
+    actions = CacheActions.from_request('key', BASIC_REQUEST, CacheSettings(cache_control=True))
+    response = get_mock_response(headers={**cache_headers, **validator_headers})
+    actions.update_from_response(response)
+
     assert actions.expires == mock_datetime.utcnow()
     assert actions.skip_write is False
 
@@ -262,91 +448,12 @@ def test_update_from_response__revalidate(mock_datetime, cache_headers, validato
 @pytest.mark.parametrize('directive', IGNORED_DIRECTIVES)
 def test_ignored_headers(directive):
     """Ensure that currently unimplemented Cache-Control headers do not affect behavior"""
-    request = PreparedRequest()
-    request.url = 'https://img.site.com/base/img.jpg'
-    request.headers = {'Cache-Control': directive}
-    actions = CacheActions.from_request(
-        cache_key='key',
-        request=request,
-        session_expire_after=1,
-        cache_control=True,
-    )
-    assert actions.expire_after == 1
-
-
-@patch('requests_cache.policy.actions.datetime')
-def test_get_expiration_datetime__no_expiration(mock_datetime):
-    assert get_expiration_datetime(None) is None
-    assert get_expiration_datetime(-1) is None
-    assert get_expiration_datetime(DO_NOT_CACHE) == mock_datetime.utcnow()
-
-
-@pytest.mark.parametrize(
-    'expire_after, expected_expiration_delta',
-    [
-        (timedelta(seconds=60), timedelta(seconds=60)),
-        (60, timedelta(seconds=60)),
-        (33.3, timedelta(seconds=33.3)),
-    ],
-)
-def test_get_expiration_datetime__relative(expire_after, expected_expiration_delta):
-    expires = get_expiration_datetime(expire_after)
-    expected_expiration = datetime.utcnow() + expected_expiration_delta
-    # Instead of mocking datetime (which adds some complications), check for approximate value
-    assert abs((expires - expected_expiration).total_seconds()) <= 1
+    request = Request(
+        method='GET', url='https://img.site.com/base/img.jpg', headers={'Cache-Control': directive}
+    ).prepare()
+    settings = CacheSettings(expire_after=1, cache_control=True)
+    actions = CacheActions.from_request('key', request, settings)
 
-
-def test_get_expiration_datetime__tzinfo():
-    tz = timezone(-timedelta(hours=5))
-    dt = datetime(2021, 2, 1, 7, 0, tzinfo=tz)
-    assert get_expiration_datetime(dt) == datetime(2021, 2, 1, 12, 0)
-
-
-def test_get_expiration_datetime__httpdate():
-    assert get_expiration_datetime(HTTPDATE_STR) == HTTPDATE_DATETIME
-    assert get_expiration_datetime('P12Y34M56DT78H90M12.345S', ignore_invalid_httpdate=True) is None
-
-
-@pytest.mark.parametrize(
-    'url, expected_expire_after',
-    [
-        ('img.site_1.com', 60 * 60),
-        ('http://img.site_1.com/base/img.jpg', 60 * 60),
-        ('https://img.site_2.com/base/img.jpg', None),
-        ('site_2.com/resource_1', 60 * 60 * 2),
-        ('http://site_2.com/resource_1/index.html', 60 * 60 * 2),
-        ('http://site_2.com/resource_2/', 60 * 60 * 24),
-        ('http://site_2.com/static/', -1),
-        ('http://site_2.com/static/img.jpg', -1),
-        ('site_2.com', None),
-        ('some_other_site.com', None),
-        (None, None),
-    ],
-)
-def test_get_url_expiration(url, expected_expire_after, mock_session):
-    urls_expire_after = {
-        '*.site_1.com': 60 * 60,
-        'site_2.com/resource_1': 60 * 60 * 2,
-        'site_2.com/resource_2': 60 * 60 * 24,
-        'site_2.com/static': -1,
-    }
-    assert get_url_expiration(url, urls_expire_after) == expected_expire_after
-
-
-@pytest.mark.parametrize(
-    'url, expected_expire_after',
-    [
-        ('https://img.site_1.com/image.jpeg', 60 * 60),
-        ('https://img.site_1.com/resource/1', 60 * 60 * 2),
-        ('https://site_2.com', 1),
-        ('https://any_other_site.com', 1),
-    ],
-)
-def test_get_url_expiration__evaluation_order(url, expected_expire_after):
-    """If there are multiple matches, the first match should be used in the order defined"""
-    urls_expire_after = {
-        '*.site_1.com/resource': 60 * 60 * 2,
-        '*.site_1.com': 60 * 60,
-        '*': 1,
-    }
-    assert get_url_expiration(url, urls_expire_after) == expected_expire_after
+    assert actions.expire_after == 1
+    assert actions.skip_read is False
+    assert actions.skip_write is False
diff --git a/tests/unit/policy/test_expiration.py b/tests/unit/policy/test_expiration.py
new file mode 100644
index 0000000..0a453db
--- /dev/null
+++ b/tests/unit/policy/test_expiration.py
@@ -0,0 +1,95 @@
+import re
+from datetime import datetime, timedelta, timezone
+from unittest.mock import patch
+
+import pytest
+
+from requests_cache.policy.expiration import (
+    EXPIRE_IMMEDIATELY,
+    get_expiration_datetime,
+    get_url_expiration,
+)
+from tests.conftest import HTTPDATE_DATETIME, HTTPDATE_STR
+
+
+@patch('requests_cache.expiration.datetime')
+def test_get_expiration_datetime__no_expiration(mock_datetime):
+    assert get_expiration_datetime(None) is None
+    assert get_expiration_datetime(-1) is None
+    assert get_expiration_datetime(EXPIRE_IMMEDIATELY) == mock_datetime.utcnow()
+
+
+@pytest.mark.parametrize(
+    'expire_after, expected_expiration_delta',
+    [
+        (timedelta(seconds=60), timedelta(seconds=60)),
+        (60, timedelta(seconds=60)),
+        (33.3, timedelta(seconds=33.3)),
+    ],
+)
+def test_get_expiration_datetime__relative(expire_after, expected_expiration_delta):
+    expires = get_expiration_datetime(expire_after)
+    expected_expiration = datetime.utcnow() + expected_expiration_delta
+    # Instead of mocking datetime (which adds some complications), check for approximate value
+    assert abs((expires - expected_expiration).total_seconds()) <= 1
+
+
+def test_get_expiration_datetime__tzinfo():
+    tz = timezone(-timedelta(hours=5))
+    dt = datetime(2021, 2, 1, 7, 0, tzinfo=tz)
+    assert get_expiration_datetime(dt) == datetime(2021, 2, 1, 12, 0)
+
+
+def test_get_expiration_datetime__httpdate():
+    assert get_expiration_datetime(HTTPDATE_STR) == HTTPDATE_DATETIME
+    assert get_expiration_datetime('P12Y34M56DT78H90M12.345S', ignore_invalid_httpdate=True) is None
+    with pytest.raises(ValueError):
+        get_expiration_datetime('P12Y34M56DT78H90M12.345S')
+
+
+@pytest.mark.parametrize(
+    'url, expected_expire_after',
+    [
+        ('img.site_1.com', 60 * 60),
+        ('http://img.site_1.com/base/img.jpg', 60 * 60),
+        ('https://img.site_2.com/base/img.jpg', None),
+        ('site_2.com/resource_1', 60 * 60 * 2),
+        ('http://site_2.com/resource_1/index.html', 60 * 60 * 2),
+        ('http://site_2.com/resource_2/', 60 * 60 * 24),
+        ('http://site_2.com/static/', -1),
+        ('http://site_2.com/api/resource/123', 60 * 60 * 24 * 7),
+        ('http://site_2.com/api/resource/xyz', None),
+        ('http://site_2.com/static/img.jpg', -1),
+        ('site_2.com', None),
+        ('some_other_site.com', None),
+        (None, None),
+    ],
+)
+def test_get_url_expiration(url, expected_expire_after, mock_session):
+    urls_expire_after = {
+        '*.site_1.com': 60 * 60,
+        'site_2.com/resource_1': 60 * 60 * 2,
+        'site_2.com/resource_2': 60 * 60 * 24,
+        re.compile(r'site_2\.com/api/resource/\d+'): 60 * 60 * 24 * 7,
+        'site_2.com/static': -1,
+    }
+    assert get_url_expiration(url, urls_expire_after) == expected_expire_after
+
+
+@pytest.mark.parametrize(
+    'url, expected_expire_after',
+    [
+        ('https://img.site_1.com/image.jpeg', 60 * 60),
+        ('https://img.site_1.com/resource/1', 60 * 60 * 2),
+        ('https://site_2.com', 1),
+        ('https://any_other_site.com', 1),
+    ],
+)
+def test_get_url_expiration__evaluation_order(url, expected_expire_after):
+    """If there are multiple matches, the first match should be used in the order defined"""
+    urls_expire_after = {
+        '*.site_1.com/resource': 60 * 60 * 2,
+        '*.site_1.com': 60 * 60,
+        '*': 1,
+    }
+    assert get_url_expiration(url, urls_expire_after) == expected_expire_after
diff --git a/tests/unit/test_base_cache.py b/tests/unit/test_base_cache.py
new file mode 100644
index 0000000..b57ed09
--- /dev/null
+++ b/tests/unit/test_base_cache.py
@@ -0,0 +1,411 @@
+"""BaseCache tests that use mocked responses only"""
+import pickle
+from datetime import datetime, timedelta
+from logging import getLogger
+from pickle import PickleError
+from time import sleep
+from unittest.mock import patch
+
+import pytest
+from requests import Request
+
+from requests_cache.backends import BaseCache, SQLiteCache, SQLiteDict
+from requests_cache.cache_keys import create_key
+from requests_cache.models import CachedRequest, CachedResponse
+from requests_cache.session import CachedSession
+from tests.conftest import (
+    MOCKED_URL,
+    MOCKED_URL_ETAG,
+    MOCKED_URL_HTTPS,
+    MOCKED_URL_JSON,
+    MOCKED_URL_REDIRECT,
+    ignore_deprecation,
+    mount_mock_adapter,
+    patch_normalize_url,
+)
+
+YESTERDAY = datetime.utcnow() - timedelta(days=1)
+logger = getLogger(__name__)
+
+
+class InvalidResponse:
+    """Class that will raise an error when unpickled"""
+
+    def __init__(self):
+        self.foo = 'bar'
+
+    def __setstate__(self, value):
+        raise ValueError('Invalid response!')
+
+
+def test_contains__key(mock_session):
+    mock_session.get(MOCKED_URL, params={'foo': 'bar'})
+    key = list(mock_session.cache.responses.keys())[0]
+    assert mock_session.cache.contains(key)
+    assert not mock_session.cache.contains(f'{key}_b')
+
+
+def test_contains__request(mock_session):
+    mock_session.get(MOCKED_URL, params={'foo': 'bar'})
+    request = Request('GET', MOCKED_URL, params={'foo': 'bar'})
+    assert mock_session.cache.contains(request=request)
+    request.params = None
+    assert not mock_session.cache.contains(request=request)
+
+
+def test_contains__url(mock_session):
+    mock_session.get(MOCKED_URL)
+    assert mock_session.cache.contains(url=MOCKED_URL)
+    assert not mock_session.cache.contains(url=f'{MOCKED_URL}?foo=bar')
+
+
+@patch_normalize_url
+def test_delete__expired(mock_normalize_url, mock_session):
+    unexpired_url = f'{MOCKED_URL}?x=1'
+    mock_session.mock_adapter.register_uri(
+        'GET', unexpired_url, status_code=200, text='mock response'
+    )
+    mock_session.settings.expire_after = 1
+    mock_session.get(MOCKED_URL)
+    mock_session.get(MOCKED_URL_JSON)
+    sleep(1.1)
+    mock_session.settings.expire_after = 2
+    mock_session.get(unexpired_url)
+
+    # At this point we should have 1 unexpired response and 2 expired responses
+    assert len(mock_session.cache.responses) == 3
+
+    # Use the generic BaseCache implementation, not the SQLite-specific one
+    BaseCache.delete(mock_session.cache, expired=True)
+    assert len(mock_session.cache.responses) == 1
+    cached_response = list(mock_session.cache.responses.values())[0]
+    assert cached_response.url == unexpired_url
+
+    # Now the last response should be expired as well
+    sleep(2)
+    BaseCache.delete(mock_session.cache, expired=True)
+    assert len(mock_session.cache.responses) == 0
+
+
+def test_delete__expired__per_request(mock_session):
+    # Cache 3 responses with different expiration times
+    second_url = f'{MOCKED_URL}/endpoint_2'
+    third_url = f'{MOCKED_URL}/endpoint_3'
+    mock_session.mock_adapter.register_uri('GET', second_url, status_code=200)
+    mock_session.mock_adapter.register_uri('GET', third_url, status_code=200)
+    mock_session.get(MOCKED_URL)
+    mock_session.get(second_url, expire_after=2)
+    mock_session.get(third_url, expire_after=4)
+
+    # All 3 responses should still be cached
+    mock_session.cache.delete(expired=True)
+    for response in mock_session.cache.responses.values():
+        logger.info(f'Expires in {response.expires_delta} seconds')
+    assert len(mock_session.cache.responses) == 3
+
+    # One should be expired after 2s, and another should be expired after 4s
+    sleep(2)
+    mock_session.cache.delete(expired=True)
+    assert len(mock_session.cache.responses) == 2
+    sleep(2)
+    mock_session.cache.delete(expired=True)
+    assert len(mock_session.cache.responses) == 1
+
+
+def test_delete__invalid(tempfile_path):
+    class BadSerialzier:
+        def dumps(self, value):
+            return pickle.dumps(value)
+
+        def loads(self, value):
+            response = pickle.loads(value)
+            if response.url.endswith('/json'):
+                raise PickleError
+            return response
+
+    mock_session = CachedSession(
+        cache_name=tempfile_path, backend='sqlite', serializer=BadSerialzier()
+    )
+    mock_session = mount_mock_adapter(mock_session)
+
+    # Start with two cached responses, one of which will raise an error
+    response_1 = mock_session.get(MOCKED_URL)
+    response_2 = mock_session.get(MOCKED_URL_JSON)
+
+    # Use the generic BaseCache implementation, not the SQLite-specific one
+    BaseCache.delete(mock_session.cache, expired=True, invalid=True)
+
+    assert len(mock_session.cache.responses) == 1
+    assert mock_session.get(MOCKED_URL).from_cache is True
+    assert mock_session.get(MOCKED_URL_JSON).from_cache is False
+
+
+def test_delete__older_than(mock_session):
+    # Cache 4 responses with different creation times
+    response_0 = CachedResponse(request=CachedRequest(method='GET', url='https://test.com/test_0'))
+    mock_session.cache.save_response(response_0)
+    response_1 = CachedResponse(request=CachedRequest(method='GET', url='https://test.com/test_1'))
+    response_1.created_at -= timedelta(seconds=1)
+    mock_session.cache.save_response(response_1)
+    response_2 = CachedResponse(request=CachedRequest(method='GET', url='https://test.com/test_2'))
+    response_2.created_at -= timedelta(seconds=2)
+    mock_session.cache.save_response(response_2)
+    response_3 = CachedResponse(request=CachedRequest(method='GET', url='https://test.com/test_3'))
+    response_3.created_at -= timedelta(seconds=3)
+    mock_session.cache.save_response(response_3)
+
+    # Incrementally remove responses older than 3, 2, and 1 seconds
+    assert len(mock_session.cache.responses) == 4
+    mock_session.cache.delete(older_than=timedelta(seconds=3))
+    assert len(mock_session.cache.responses) == 3
+    mock_session.cache.delete(older_than=timedelta(seconds=2))
+    assert len(mock_session.cache.responses) == 2
+    mock_session.cache.delete(older_than=timedelta(seconds=1))
+    assert len(mock_session.cache.responses) == 1
+
+    # Remove the last response after it's 1 second old
+    sleep(1)
+    mock_session.cache.delete(older_than=timedelta(seconds=1))
+    assert len(mock_session.cache.responses) == 0
+
+
+def test_delete__urls(mock_session):
+    urls = [MOCKED_URL, MOCKED_URL_JSON, MOCKED_URL_REDIRECT]
+    for url in urls:
+        mock_session.get(url)
+
+    mock_session.cache.delete(urls=urls)
+
+    for url in urls:
+        assert not mock_session.cache.contains(url=url)
+
+
+def test_delete__requests(mock_session):
+    urls = [MOCKED_URL, MOCKED_URL_JSON, MOCKED_URL_REDIRECT]
+    for url in urls:
+        mock_session.get(url)
+
+    requests = [Request('GET', url).prepare() for url in urls]
+    mock_session.cache.delete(requests=requests)
+
+    for request in requests:
+        assert not mock_session.cache.contains(request=request)
+
+
+def test_recreate_keys(mock_session):
+    # Cache some initial responses with default key function
+    urls = [MOCKED_URL, MOCKED_URL_JSON, MOCKED_URL_ETAG]
+    for url in urls:
+        mock_session.get(url)
+    old_cache_keys = set(mock_session.cache.responses.keys())
+
+    # Switch to a new key function and recreate keys
+    def new_key_fn(*args, **kwargs):
+        return create_key(*args, **kwargs) + '_suffix'
+
+    # Check that responses are saved with new keys
+    mock_session.settings.key_fn = new_key_fn
+    mock_session.cache.recreate_keys()
+    new_cache_keys = set(mock_session.cache.responses.keys())
+    assert len(old_cache_keys) == len(new_cache_keys) == len(urls)
+    assert old_cache_keys != new_cache_keys
+
+    # Check that responses are returned from the cache correctly using the new key function
+    for url in urls:
+        assert mock_session.get(url).from_cache is True
+
+
+def test_recreate_keys__same_key_fn(mock_session):
+    urls = [MOCKED_URL, MOCKED_URL_JSON, MOCKED_URL_ETAG]
+    for url in urls:
+        mock_session.get(url)
+    old_cache_keys = set(mock_session.cache.responses.keys())
+
+    mock_session.cache.recreate_keys()
+    new_cache_keys = set(mock_session.cache.responses.keys())
+    assert old_cache_keys == new_cache_keys
+
+    # Check that responses are returned from the cache correctly using the new key function
+    for url in urls:
+        assert mock_session.get(url).from_cache is True
+
+
+def test_reset_expiration__extend_expiration(mock_session):
+    # Start with an expired response
+    mock_session.settings.expire_after = datetime.utcnow() - timedelta(seconds=1)
+    mock_session.get(MOCKED_URL)
+
+    # Set expiration in the future
+    mock_session.cache.reset_expiration(datetime.utcnow() + timedelta(seconds=1))
+    assert len(mock_session.cache.responses) == 1
+    response = mock_session.get(MOCKED_URL)
+    assert response.is_expired is False and response.from_cache is True
+
+
+def test_reset_expiration__shorten_expiration(mock_session):
+    # Start with a non-expired response
+    mock_session.settings.expire_after = datetime.utcnow() + timedelta(seconds=1)
+    mock_session.get(MOCKED_URL)
+
+    # Set expiration in the past
+    mock_session.cache.reset_expiration(datetime.utcnow() - timedelta(seconds=1))
+    response = mock_session.get(MOCKED_URL)
+    assert response.is_expired is False and response.from_cache is False
+
+
+def test_clear(mock_session):
+    mock_session.get(MOCKED_URL)
+    mock_session.get(MOCKED_URL_REDIRECT)
+    mock_session.cache.clear()
+    assert not mock_session.cache.contains(url=MOCKED_URL)
+    assert not mock_session.cache.contains(url=MOCKED_URL_REDIRECT)
+
+
+def test_save_response__manual(mock_session):
+    response = mock_session.get(MOCKED_URL)
+    mock_session.cache.clear()
+    mock_session.cache.save_response(response)
+
+
+def test_update(mock_session):
+    src_cache = BaseCache()
+    for i in range(20):
+        src_cache.responses[f'key_{i}'] = f'value_{i}'
+        src_cache.redirects[f'key_{i}'] = f'value_{i}'
+
+    mock_session.cache.update(src_cache)
+    assert len(mock_session.cache.responses) == 20
+    assert len(mock_session.cache.redirects) == 20
+
+
+@patch_normalize_url
+def test_urls(mock_normalize_url, mock_session):
+    for url in [MOCKED_URL, MOCKED_URL, MOCKED_URL_JSON, MOCKED_URL_HTTPS]:
+        mock_session.get(url)
+
+    expected_urls = [MOCKED_URL_JSON, MOCKED_URL, MOCKED_URL_HTTPS]
+    assert mock_session.cache.urls() == expected_urls
+
+
+def test_urls__error(mock_session):
+    responses = [mock_session.get(url) for url in [MOCKED_URL, MOCKED_URL_JSON, MOCKED_URL_HTTPS]]
+    responses[2] = None
+    with patch.object(SQLiteDict, 'deserialize', side_effect=responses):
+        expected_urls = [MOCKED_URL_JSON, MOCKED_URL]
+        assert mock_session.cache.urls() == expected_urls
+
+    # The invalid response should be skipped, but remain in the cache
+    assert len(mock_session.cache.responses.keys()) == 3
+
+
+# Deprecated methods
+# --------------------
+
+
+def test_has_key(mock_session):
+    response = CachedResponse()
+    mock_session.cache.responses['12345'] = response
+    # flake8: noqa: W601
+    assert mock_session.cache.has_key('12345')
+    assert not mock_session.cache.has_key('1234')
+
+
+def test_has_url(mock_session):
+    mock_session.get(MOCKED_URL, params={'foo': 'bar'})
+    with ignore_deprecation():
+        assert mock_session.cache.has_url(MOCKED_URL, params={'foo': 'bar'})
+        assert not mock_session.cache.has_url(MOCKED_URL)
+
+
+def test_delete_url(mock_session):
+    mock_session.get(MOCKED_URL)
+    with ignore_deprecation():
+        mock_session.cache.delete_url(MOCKED_URL)
+        assert not mock_session.cache.has_url(MOCKED_URL)
+
+
+def test_delete_url__request_args(mock_session):
+    mock_session.get(MOCKED_URL, params={'foo': 'bar'})
+    with ignore_deprecation():
+        mock_session.cache.delete_url(MOCKED_URL, params={'foo': 'bar'})
+        assert not mock_session.cache.has_url(MOCKED_URL, params={'foo': 'bar'})
+
+
+def test_delete_url__nonexistent_response(mock_session):
+    """Deleting a response that was either already deleted (or never added) should fail silently"""
+    with ignore_deprecation():
+        mock_session.cache.delete_url(MOCKED_URL)
+
+        mock_session.get(MOCKED_URL)
+        mock_session.cache.delete_url(MOCKED_URL)
+
+        assert not mock_session.cache.has_url(MOCKED_URL)
+        mock_session.cache.delete_url(MOCKED_URL)  # Should fail silently
+
+
+def test_delete_urls(mock_session):
+    mock_session.get(MOCKED_URL)
+    with ignore_deprecation():
+        mock_session.cache.delete_urls([MOCKED_URL])
+        assert not mock_session.cache.has_url(MOCKED_URL)
+
+
+def test_keys(mock_session):
+    for url in [MOCKED_URL, MOCKED_URL_JSON, MOCKED_URL_REDIRECT]:
+        mock_session.get(url)
+
+    with ignore_deprecation():
+        response_keys = set(mock_session.cache.responses.keys())
+        redirect_keys = set(mock_session.cache.redirects.keys())
+        assert set(mock_session.cache.keys()) == response_keys | redirect_keys
+        assert len(list(mock_session.cache.keys(check_expiry=True))) == 5
+
+
+def test_remove_expired_responses(mock_session):
+    """Test for backwards-compatibility"""
+    with ignore_deprecation(), patch.object(
+        mock_session.cache, 'delete'
+    ) as mock_delete, patch.object(mock_session.cache, 'reset_expiration') as mock_reset:
+        mock_session.cache.remove_expired_responses(expire_after=1)
+        mock_delete.assert_called_once_with(expired=True, invalid=True)
+        mock_reset.assert_called_once_with(1)
+
+        mock_session.cache.remove_expired_responses()
+        assert mock_delete.call_count == 2 and mock_reset.call_count == 1
+
+
+@pytest.mark.parametrize('check_expiry, expected_count', [(True, 2), (False, 3)])
+def test_response_count(check_expiry, expected_count, mock_session):
+    """response_count() should always exclude invalid responses, and optionally exclude expired
+    responses"""
+    mock_session.get(MOCKED_URL)
+    mock_session.get(MOCKED_URL_JSON)
+
+    mock_session.cache.responses['expired_response'] = CachedResponse(expires=YESTERDAY)
+    mock_session.cache.responses['invalid_response'] = InvalidResponse()
+    with ignore_deprecation():
+        response_count = mock_session.cache.response_count(check_expiry=check_expiry)
+    assert response_count == expected_count
+
+
+def test_values(mock_session):
+    for url in [MOCKED_URL, MOCKED_URL_JSON, MOCKED_URL_HTTPS]:
+        mock_session.get(url)
+
+    with ignore_deprecation():
+        responses = list(mock_session.cache.values())
+    assert len(responses) == 3
+    assert all([isinstance(response, CachedResponse) for response in responses])
+
+
+def test_values__with_invalid_responses(mock_session):
+    responses = [mock_session.get(url) for url in [MOCKED_URL, MOCKED_URL_JSON, MOCKED_URL_HTTPS]]
+    responses[1] = None
+    responses[2] = CachedResponse(expires=YESTERDAY, url='test')
+
+    with ignore_deprecation(), patch.object(SQLiteCache, 'filter', side_effect=responses):
+        values = mock_session.cache.values(check_expiry=True)
+        assert len(list(values)) == 1
+
+    # The invalid response should be skipped, but remain in the cache for now
+    assert len(mock_session.cache.responses.keys()) == 3
diff --git a/tests/unit/test_cache_keys.py b/tests/unit/test_cache_keys.py
index dd79bf6..40cca7f 100644
--- a/tests/unit/test_cache_keys.py
+++ b/tests/unit/test_cache_keys.py
@@ -4,11 +4,11 @@ This just contains tests for some extra edge cases not covered elsewhere.
 import json
 
 import pytest
-from requests import PreparedRequest, Request
+from requests import Request
 
 from requests_cache.cache_keys import MAX_NORM_BODY_SIZE, create_key, normalize_request
 
-CACHE_KEY = 'e8cb526891875e37'
+CACHE_KEY = 'e25f7e6326966e82'
 
 
 @pytest.mark.parametrize(
@@ -41,6 +41,25 @@ def test_create_key__normalize_key_only_params():
     request_2 = Request(method='GET', url='https://img.site.com/base/img.jpg?param_2')
     assert create_key(request_1) != create_key(request_2)
 
+    request_1 = Request(method='GET', url='https://img.site.com/base/img.jpg?k=v&param_1')
+    request_2 = Request(method='GET', url='https://img.site.com/base/img.jpg?param_1&k=v')
+    assert create_key(request_1) == create_key(request_2)
+
+
+def test_create_key__normalize_duplicate_params():
+    request_1 = Request(method='GET', url='https://img.site.com/base/img.jpg?param_1=a&param_1=b')
+    request_2 = Request(method='GET', url='https://img.site.com/base/img.jpg?param_1=a')
+    request_3 = Request(method='GET', url='https://img.site.com/base/img.jpg?param_1=b')
+    assert create_key(request_1) != create_key(request_2) != create_key(request_3)
+
+    request_1 = Request(
+        method='GET', url='https://img.site.com/base/img.jpg?param_1=a&param_1=b&k=v'
+    )
+    request_2 = Request(
+        method='GET', url='https://img.site.com/base/img.jpg?param_1=b&param_1=a', params={'k': 'v'}
+    )
+    assert create_key(request_1) == create_key(request_2)
+
 
 def test_normalize_request__json_body():
     request = Request(
@@ -50,7 +69,7 @@ def test_normalize_request__json_body():
         headers={'Content-Type': 'application/json'},
     )
     norm_request = normalize_request(request, ignored_parameters=['param_2'])
-    assert norm_request.body == b'{"param_1": "value_1"}'
+    assert norm_request.body == b'{"param_1": "value_1", "param_2": "REDACTED"}'
 
 
 def test_normalize_request__json_body_list():
@@ -86,6 +105,16 @@ def test_normalize_request__json_body_invalid():
     assert normalize_request(request, ignored_parameters=['param_2']).body == b'invalid JSON!'
 
 
+def test_normalize_request__json_body_empty():
+    request = Request(
+        method='GET',
+        url='https://img.site.com/base/img.jpg',
+        data=b'{}',
+        headers={'Content-Type': 'application/json'},
+    )
+    assert normalize_request(request, ignored_parameters=['param_2']).body == b'{}'
+
+
 def test_normalize_request__binary_body():
     request = Request(
         method='GET',
@@ -109,11 +138,20 @@ def test_normalize_request__ovsersized_body():
     assert normalize_request(request, ignored_parameters=['param']).body == encoded_body
 
 
+def test_normalize_request__headers():
+    request = Request(
+        method='GET',
+        url='https://img.site.com/base/img.jpg',
+        headers={'Accept': 'gzip,  deflate,Venmo,  PayPal, '},
+    )
+    norm_request = normalize_request(request.prepare())
+    assert norm_request.headers == {'Accept': 'deflate, gzip, paypal, venmo'}
+
+
 def test_remove_ignored_headers__empty():
-    request = PreparedRequest()
-    request.prepare(
+    request = Request(
         method='GET',
         url='https://img.site.com/base/img.jpg',
         headers={'foo': 'bar'},
     )
-    assert normalize_request(request, ignored_parameters=None).headers == request.headers
+    assert normalize_request(request.prepare(), ignored_parameters=None).headers == request.headers
diff --git a/tests/unit/test_patcher.py b/tests/unit/test_patcher.py
index 1004dd8..8977271 100644
--- a/tests/unit/test_patcher.py
+++ b/tests/unit/test_patcher.py
@@ -6,7 +6,7 @@ from requests.sessions import Session as OriginalSession
 import requests_cache
 from requests_cache import CachedSession
 from requests_cache.backends import BaseCache, SQLiteCache
-from tests.conftest import CACHE_NAME
+from tests.conftest import CACHE_NAME, ignore_deprecation
 
 
 def test_install_uninstall():
@@ -73,21 +73,32 @@ def test_enabled(cached_request, original_request, tempfile_path):
     assert original_request.call_count == 0
 
 
-@patch.object(BaseCache, 'remove_expired_responses')
-def test_remove_expired_responses(remove_expired_responses, tempfile_path):
-    requests_cache.install_cache(tempfile_path, expire_after=360)
-    requests_cache.remove_expired_responses()
-    assert remove_expired_responses.called is True
+def test_is_installed():
+    assert requests_cache.is_installed() is False
+    requests_cache.install_cache(name=CACHE_NAME, use_temp=True)
+    assert requests_cache.is_installed() is True
     requests_cache.uninstall_cache()
+    assert requests_cache.is_installed() is False
 
 
-@patch.object(BaseCache, 'remove_expired_responses')
-def test_remove_expired_responses__cache_not_installed(remove_expired_responses):
-    requests_cache.remove_expired_responses()
-    assert remove_expired_responses.called is False
+@patch.object(BaseCache, 'delete')
+def test_delete__expired_responses(mock_delete):
+    requests_cache.install_cache(backend='memory', expire_after=360)
+    requests_cache.delete(expired=True)
+    assert mock_delete.called is True
+    requests_cache.uninstall_cache()
+
 
+@patch.object(BaseCache, 'delete')
+def test_delete__cache_not_installed(mock_delete):
+    requests_cache.delete(expired=True)
+    assert mock_delete.called is False
 
-@patch.object(BaseCache, 'remove_expired_responses')
-def test_remove_expired_responses__no_expiration(remove_expired_responses, installed_session):
-    requests_cache.remove_expired_responses()
-    assert remove_expired_responses.called is True
+
+@patch.object(BaseCache, 'delete')
+def test_remove_expired_responses(mock_delete):
+    requests_cache.install_cache(backend='memory', expire_after=360)
+    with ignore_deprecation():
+        requests_cache.remove_expired_responses()
+    assert mock_delete.called is True
+    requests_cache.uninstall_cache()
diff --git a/tests/unit/test_serializers.py b/tests/unit/test_serializers.py
index b69c7e8..caed84a 100644
--- a/tests/unit/test_serializers.py
+++ b/tests/unit/test_serializers.py
@@ -10,8 +10,6 @@ from uuid import uuid4
 
 import pytest
 from cattr import BaseConverter, GenConverter
-from itsdangerous import Signer
-from itsdangerous.exc import BadSignature
 
 from requests_cache import (
     CachedResponse,
@@ -23,6 +21,7 @@ from requests_cache import (
     safe_pickle_serializer,
     utf8_encoder,
 )
+from tests.conftest import skip_missing_deps
 
 
 def test_stdlib_json():
@@ -37,6 +36,7 @@ def test_stdlib_json():
     reload(requests_cache.serializers.preconf)
 
 
+@skip_missing_deps('ujson')
 def test_ujson():
     import ujson
 
@@ -45,6 +45,22 @@ def test_ujson():
     assert module_json is ujson
 
 
+@skip_missing_deps('bson')
+def test_standalone_bson():
+    """Handle different method names for standalone bson codec vs pymongo"""
+    import requests_cache.serializers.preconf
+
+    # Can't easily install both pymongo and bson (standalone) for tests;
+    # Using json module here since it has same functions as bson (standalone)
+    with patch.dict(sys.modules, {'bson': json, 'pymongo': None}):
+        reload(requests_cache.serializers.preconf)
+        bson_functions = requests_cache.serializers.preconf._get_bson_functions()
+
+        assert bson_functions == {'dumps': 'dumps', 'loads': 'loads'}
+
+    reload(requests_cache.serializers.preconf)
+
+
 def test_optional_dependencies():
     import requests_cache.serializers.preconf
 
@@ -60,6 +76,8 @@ def test_optional_dependencies():
         for obj in [bson_serializer, yaml_serializer]:
             with pytest.raises(ImportError):
                 obj.dumps('')
+            with pytest.raises(ImportError):
+                obj.loads('')
 
         with pytest.raises(ImportError):
             safe_pickle_serializer('')
@@ -67,7 +85,11 @@ def test_optional_dependencies():
     reload(requests_cache.serializers.preconf)
 
 
+@skip_missing_deps('itsdangerous')
 def test_cache_signing(tempfile_path):
+    from itsdangerous import Signer
+    from itsdangerous.exc import BadSignature
+
     serializer = safe_pickle_serializer(secret_key=str(uuid4()))
     session = CachedSession(tempfile_path, serializer=serializer)
     assert isinstance(session.cache.responses.serializer.stages[-1].obj, Signer)
diff --git a/tests/unit/test_session.py b/tests/unit/test_session.py
index 5049825..7ed2c1b 100644
--- a/tests/unit/test_session.py
+++ b/tests/unit/test_session.py
@@ -1,35 +1,54 @@
-"""CachedSession + BaseCache tests that use mocked responses only"""
-# TODO: This could be split up into some smaller test modules
+"""CachedSession tests that use mocked responses only"""
 import json
 import pickle
-import time
 from collections import UserDict, defaultdict
 from datetime import datetime, timedelta
+from logging import getLogger
 from pathlib import Path
 from pickle import PickleError
+from time import sleep, time
 from unittest.mock import patch
 from urllib.parse import urlencode
 
 import pytest
 import requests
-from requests import Request, RequestException
+from requests import HTTPError, Request, RequestException
 from requests.structures import CaseInsensitiveDict
 
-from requests_cache import ALL_METHODS, CachedResponse, CachedSession
+from requests_cache import ALL_METHODS, CachedSession
 from requests_cache._utils import get_placeholder_class
-from requests_cache.backends import BACKEND_CLASSES, BaseCache, SQLiteDict, SQLitePickleDict
+from requests_cache.backends import BACKEND_CLASSES, BaseCache
 from requests_cache.backends.base import DESERIALIZE_ERRORS
-from requests_cache.cache_keys import create_key
+from requests_cache.policy.expiration import DO_NOT_CACHE, EXPIRE_IMMEDIATELY, NEVER_EXPIRE
 from tests.conftest import (
     MOCKED_URL,
+    MOCKED_URL_200_404,
     MOCKED_URL_404,
+    MOCKED_URL_500,
+    MOCKED_URL_ETAG,
     MOCKED_URL_HTTPS,
     MOCKED_URL_JSON,
     MOCKED_URL_REDIRECT,
     MOCKED_URL_REDIRECT_TARGET,
+    MOCKED_URL_VARY,
+    ignore_deprecation,
+    patch_normalize_url,
 )
 
-YESTERDAY = datetime.utcnow() - timedelta(days=1)
+logger = getLogger(__name__)
+
+# Basic initialization
+# -----------------------------------------------------
+
+
+class MyCache(BaseCache):
+    pass
+
+
+def test_init_backend_instance():
+    backend = MyCache()
+    session = CachedSession(backend=backend)
+    assert session.cache is backend
 
 
 def test_init_unregistered_backend():
@@ -37,51 +56,58 @@ def test_init_unregistered_backend():
         CachedSession(backend='nonexistent')
 
 
-def test_cache_path_expansion():
+def test_init_cache_path_expansion():
     session = CachedSession('~', backend='filesystem')
     assert session.cache.cache_dir == Path("~").expanduser()
 
 
-@patch.dict(BACKEND_CLASSES, {'mongo': get_placeholder_class()})
+@patch.dict(BACKEND_CLASSES, {'mongodb': get_placeholder_class()})
 def test_init_missing_backend_dependency():
     """Test that the correct error is thrown when a user does not have a dependency installed"""
     with pytest.raises(ImportError):
-        CachedSession(backend='mongo')
+        CachedSession(backend='mongodb')
 
 
-class MyCache(BaseCache):
-    pass
+def test_repr(mock_session):
+    """Test session and cache string representations"""
+    mock_session.settings.expire_after = 11
+    mock_session.settings.cache_control = True
 
+    assert mock_session.cache.cache_name in repr(mock_session)
+    assert 'expire_after=11' in repr(mock_session)
+    assert 'cache_control=True' in repr(mock_session)
 
-def test_init_backend_instance():
-    backend = MyCache()
-    session = CachedSession(backend=backend)
-    assert session.cache is backend
 
+def test_pickle__disabled():
+    with pytest.raises(NotImplementedError):
+        pickle.dumps(CachedSession(backend='memory'))
 
-def test_init_backend_instance__kwargs():
-    backend = MyCache()
-    session = CachedSession(
-        'test_cache',
-        backend=backend,
-        ignored_parameters=['foo'],
-        include_get_headers=True,
-    )
 
-    assert session.cache.cache_name == 'test_cache'
-    assert session.cache.ignored_parameters == ['foo']
-    assert session.cache.match_headers is True
+def test_response_defaults(mock_session):
+    """Both cached and new responses should always have the following attributes"""
+    mock_session.settings.expire_after = datetime.utcnow() + timedelta(days=1)
+    response_1 = mock_session.get(MOCKED_URL)
+    response_2 = mock_session.get(MOCKED_URL)
+    response_3 = mock_session.get(MOCKED_URL)
+    cache_key = '29de1c4491126e0b'
 
+    assert response_1.cache_key == cache_key
+    assert isinstance(response_1.created_at, datetime)
+    assert isinstance(response_1.expires, datetime)
+    assert response_1.from_cache is False
+    assert response_1.is_expired is False
 
-def test_init_backend_class():
-    session = CachedSession('test_cache', backend=MyCache)
-    assert isinstance(session.cache, MyCache)
-    assert session.cache.cache_name == 'test_cache'
+    assert isinstance(response_2.created_at, datetime)
+    assert isinstance(response_2.expires, datetime)
+    assert response_2.cache_key == cache_key
+    assert response_2.created_at == response_3.created_at
+    assert response_2.expires == response_3.expires
+    assert response_2.from_cache is response_3.from_cache is True
+    assert response_2.is_expired is response_3.is_expired is False
 
 
-def test_pickle__disabled():
-    with pytest.raises(NotImplementedError):
-        pickle.dumps(CachedSession(backend='memory'))
+# Main combinations of request methods and data fields
+# -----------------------------------------------------
 
 
 @pytest.mark.parametrize('method', ALL_METHODS)
@@ -101,11 +127,11 @@ def test_all_methods__ignored_parameters__not_matched(field, method, mock_sessio
     """Test all relevant combinations of methods and data fields. Requests with different request
     params, data, or json should not be cached under different keys based on an ignored param.
     """
-    mock_session.cache.ignored_parameters = ['ignored']
-    mock_session.cache.match_headers = True
-    params_1 = {'ignored': 'value_1', 'not_ignored': 'value_1'}
-    params_2 = {'ignored': 'value_2', 'not_ignored': 'value_1'}
-    params_3 = {'ignored': 'value_2', 'not_ignored': 'value_2'}
+    mock_session.settings.ignored_parameters = ['ignored']
+    mock_session.settings.match_headers = True
+    params_1 = {'ignored': 'value_1', 'param': 'value_1'}
+    params_2 = {'ignored': 'value_2', 'param': 'value_1'}
+    params_3 = {'ignored': 'value_2', 'param': 'value_2'}
 
     assert mock_session.request(method, MOCKED_URL, **{field: params_1}).from_cache is False
     assert mock_session.request(method, MOCKED_URL, **{field: params_1}).from_cache is True
@@ -120,15 +146,27 @@ def test_all_methods__ignored_parameters__redacted(field, method, mock_session):
     """Test all relevant combinations of methods and data fields. Requests with ignored params
     should have those values redacted from the cached response.
     """
-    mock_session.cache.ignored_parameters = ['access_token']
-    params_1 = {'access_token': 'asdf', 'not_ignored': 'value_1'}
+    mock_session.settings.ignored_parameters = ['ignored']
+    params_1 = {'ignored': 'asdf', 'param': 'value_1'}
 
     mock_session.request(method, MOCKED_URL, **{field: params_1})
     cached_response = mock_session.request(method, MOCKED_URL, **{field: params_1})
-    assert 'access_token' not in cached_response.url
-    assert 'access_token' not in cached_response.request.url
-    assert 'access_token' not in cached_response.request.headers
-    assert 'access_token' not in cached_response.request.body.decode('utf-8')
+    request_url = cached_response.request.url
+    headers = cached_response.request.headers
+    body = cached_response.request.body.decode('utf-8')
+
+    assert 'ignored' not in cached_response.url or 'ignored=REDACTED' in cached_response.url
+    assert 'ignored' not in request_url or 'ignored=REDACTED' in request_url
+    assert 'ignored' not in headers or headers['ignored'] == 'REDACTED'
+    if field == 'data':
+        assert 'ignored=REDACTED' in body
+    elif field == 'json':
+        body = json.loads(body)
+        assert body['ignored'] == 'REDACTED'
+
+
+# Variations of relevant request arguments
+# -----------------------------------------------------
 
 
 def test_params_positional_arg(mock_session):
@@ -164,138 +202,8 @@ def test_response_history(mock_session):
     assert len(mock_session.cache.redirects) == 1
 
 
-def test_repr(mock_session):
-    """Test session and cache string representations"""
-    assert repr(mock_session.cache) == '<SQLiteCache(name=http_cache)>'
-    assert str(mock_session.cache) == '<SQLiteCache(name=http_cache)>'
-
-
-def test_urls(mock_session):
-    for url in [MOCKED_URL, MOCKED_URL_JSON, MOCKED_URL_HTTPS]:
-        mock_session.get(url)
-
-    expected_urls = [MOCKED_URL, MOCKED_URL_JSON, MOCKED_URL_HTTPS]
-    assert set(mock_session.cache.urls) == set(expected_urls)
-
-
-def test_urls__with_invalid_response(mock_session):
-    responses = [mock_session.get(url) for url in [MOCKED_URL, MOCKED_URL_JSON, MOCKED_URL_HTTPS]]
-    responses[2] = AttributeError
-    with patch.object(SQLitePickleDict, '__getitem__', side_effect=responses):
-        expected_urls = [MOCKED_URL, MOCKED_URL_JSON]
-        assert set(mock_session.cache.urls) == set(expected_urls)
-
-    # The invalid response should be skipped, but remain in the cache for now
-    assert len(mock_session.cache.responses.keys()) == 3
-
-
-def test_keys(mock_session):
-    for url in [MOCKED_URL, MOCKED_URL_JSON, MOCKED_URL_REDIRECT]:
-        mock_session.get(url)
-
-    all_keys = set(mock_session.cache.responses.keys()) | set(mock_session.cache.redirects.keys())
-    assert set(mock_session.cache.keys()) == all_keys
-
-
-def test_update(mock_session):
-    src_cache = BaseCache()
-    for i in range(20):
-        src_cache.responses[f'key_{i}'] = f'value_{i}'
-        src_cache.redirects[f'key_{i}'] = f'value_{i}'
-
-    mock_session.cache.update(src_cache)
-    assert len(mock_session.cache.responses) == 20
-    assert len(mock_session.cache.redirects) == 20
-
-
-def test_values(mock_session):
-    for url in [MOCKED_URL, MOCKED_URL_JSON, MOCKED_URL_HTTPS]:
-        mock_session.get(url)
-
-    responses = list(mock_session.cache.values())
-    assert len(responses) == 3
-    assert all([isinstance(response, CachedResponse) for response in responses])
-
-
-@pytest.mark.parametrize('check_expiry, expected_count', [(True, 1), (False, 2)])
-def test_values__with_invalid_responses(check_expiry, expected_count, mock_session):
-    """values() should always exclude invalid responses, and optionally exclude expired responses"""
-    responses = [mock_session.get(url) for url in [MOCKED_URL, MOCKED_URL_JSON, MOCKED_URL_HTTPS]]
-    responses[1] = AttributeError
-    responses[2] = CachedResponse(expires=YESTERDAY, url='test')
-
-    with patch.object(SQLitePickleDict, '__getitem__', side_effect=responses):
-        values = mock_session.cache.values(check_expiry=check_expiry)
-        assert len(list(values)) == expected_count
-
-    # The invalid response should be skipped, but remain in the cache for now
-    assert len(mock_session.cache.responses.keys()) == 3
-
-
-class TimeBomb:
-    """Class that will raise an error when unpickled"""
-
-    def __init__(self):
-        self.foo = 'bar'
-
-    def __setstate__(self, value):
-        raise ValueError('Invalid response!')
-
-
-@pytest.mark.parametrize('check_expiry, expected_count', [(True, 2), (False, 3)])
-def test_response_count(check_expiry, expected_count, mock_session):
-    """response_count() should always exclude invalid responses, and optionally exclude expired responses"""
-    mock_session.get(MOCKED_URL)
-    mock_session.get(MOCKED_URL_JSON)
-
-    mock_session.cache.responses['expired_response'] = CachedResponse(expires=YESTERDAY)
-    mock_session.cache.responses['invalid_response'] = TimeBomb()
-    assert mock_session.cache.response_count(check_expiry=check_expiry) == expected_count
-
-
-def test_filter_fn(mock_session):
-    mock_session.filter_fn = lambda r: r.request.url != MOCKED_URL_JSON
-    mock_session.get(MOCKED_URL)
-    mock_session.get(MOCKED_URL_JSON)
-
-    assert mock_session.cache.has_url(MOCKED_URL)
-    assert not mock_session.cache.has_url(MOCKED_URL_JSON)
-
-
-def test_filter_fn__retroactive(mock_session):
-    """filter_fn should also apply to previously cached responses"""
-    mock_session.get(MOCKED_URL_JSON)
-    mock_session.filter_fn = lambda r: r.request.url != MOCKED_URL_JSON
-    mock_session.get(MOCKED_URL_JSON)
-
-    assert not mock_session.cache.has_url(MOCKED_URL_JSON)
-
-
-# def test_key_fn(mock_session):
-#     def create_key(request, **kwargs):
-#         """Create a key based on only the request URL (without params)"""
-#         return request.url.split('?')[0]
-
-#     mock_session.cache.key_fn = create_key
-#     mock_session.get(MOCKED_URL)
-#     response = mock_session.get(MOCKED_URL, params={'k': 'v'})
-#     assert response.from_cache is True
-
-
-def test_hooks(mock_session):
-    state = defaultdict(int)
-    mock_session.get(MOCKED_URL)
-
-    for hook in ('response',):
-
-        def hook_func(r, *args, **kwargs):
-            state[hook] += 1
-            assert r.from_cache is True
-            return r
-
-        for i in range(5):
-            mock_session.get(MOCKED_URL, hooks={hook: hook_func})
-        assert state[hook] == 5
+# Request matching
+# -----------------------------------------------------
 
 
 @pytest.mark.parametrize('method', ['POST', 'PUT'])
@@ -379,98 +287,9 @@ def test_normalize_params__url(mock_session):
     assert len(set(keys)) == 1
 
 
-def test_clear(mock_session):
-    mock_session.get(MOCKED_URL)
-    mock_session.get(MOCKED_URL_REDIRECT)
-    mock_session.cache.clear()
-    assert not mock_session.cache.has_url(MOCKED_URL)
-    assert not mock_session.cache.has_url(MOCKED_URL_REDIRECT)
-
-
-def test_has_url(mock_session):
-    mock_session.get(MOCKED_URL)
-    assert mock_session.cache.has_url(MOCKED_URL)
-    assert not mock_session.cache.has_url(MOCKED_URL_REDIRECT)
-
-
-def test_has_url__request_args(mock_session):
-    mock_session.get(MOCKED_URL, params={'foo': 'bar'})
-    assert mock_session.cache.has_url(MOCKED_URL, params={'foo': 'bar'})
-    assert not mock_session.cache.has_url(MOCKED_URL)
-
-
-def test_delete_url(mock_session):
-    mock_session.get(MOCKED_URL)
-    mock_session.cache.delete_url(MOCKED_URL)
-    assert not mock_session.cache.has_url(MOCKED_URL)
-
-
-def test_delete_url__request_args(mock_session):
-    mock_session.get(MOCKED_URL, params={'foo': 'bar'})
-    mock_session.cache.delete_url(MOCKED_URL, params={'foo': 'bar'})
-    assert not mock_session.cache.has_url(MOCKED_URL, params={'foo': 'bar'})
-
-
-def test_delete_url__nonexistent_response(mock_session):
-    """Deleting a response that was either already deleted (or never added) should fail silently"""
-    mock_session.cache.delete_url(MOCKED_URL)
-
-    mock_session.get(MOCKED_URL)
-    mock_session.cache.delete_url(MOCKED_URL)
-    assert not mock_session.cache.has_url(MOCKED_URL)
-    mock_session.cache.delete_url(MOCKED_URL)  # Should fail silently
-
-
-def test_delete_url__redirect(mock_session):
-    mock_session.get(MOCKED_URL_REDIRECT)
-    assert mock_session.cache.has_url(MOCKED_URL_REDIRECT)
-
-    mock_session.cache.delete_url(MOCKED_URL_REDIRECT)
-    assert not mock_session.cache.has_url(MOCKED_URL_REDIRECT)
-
-
-def test_delete_urls(mock_session):
-    urls = [MOCKED_URL, MOCKED_URL_JSON, MOCKED_URL_REDIRECT]
-    for url in urls:
-        mock_session.get(url)
-
-    mock_session.cache.delete_urls(urls)
-    for url in urls:
-        assert not mock_session.cache.has_url(MOCKED_URL_REDIRECT)
-
-
-def test_save_response_manual(mock_session):
-    response = mock_session.get(MOCKED_URL)
-    mock_session.cache.clear()
-    mock_session.cache.save_response(response)
-
-
-def test_response_defaults(mock_session):
-    """Both cached and new responses should always have the following attributes"""
-    mock_session.expire_after = datetime.utcnow() + timedelta(days=1)
-    response_1 = mock_session.get(MOCKED_URL)
-    response_2 = mock_session.get(MOCKED_URL)
-    response_3 = mock_session.get(MOCKED_URL)
-    cache_key = 'd7fa9fb7317b7412'
-
-    assert response_1.cache_key == cache_key
-    assert response_1.created_at is None
-    assert response_1.expires is None
-    assert response_1.from_cache is False
-    assert response_1.is_expired is False
-
-    assert isinstance(response_2.created_at, datetime)
-    assert isinstance(response_2.expires, datetime)
-    assert response_2.cache_key == cache_key
-    assert response_2.created_at == response_3.created_at
-    assert response_2.expires == response_3.expires
-    assert response_2.from_cache is response_3.from_cache is True
-    assert response_2.is_expired is response_3.is_expired is False
-
-
 def test_match_headers(mock_session):
     """With match_headers, requests with different headers should have different cache keys"""
-    mock_session.cache.match_headers = True
+    mock_session.settings.match_headers = True
     headers_list = [
         {'Accept': 'application/json'},
         {'Accept': 'text/xml'},
@@ -484,7 +303,7 @@ def test_match_headers(mock_session):
 
 def test_match_headers__normalize(mock_session):
     """With match_headers, the same headers (in any order) should have the same cache key"""
-    mock_session.cache.match_headers = True
+    mock_session.settings.match_headers = True
     headers = {'Accept': 'application/json', 'Custom': 'abc'}
     reversed_headers = {'Custom': 'abc', 'Accept': 'application/json'}
     assert mock_session.get(MOCKED_URL, headers=headers).from_cache is False
@@ -493,7 +312,7 @@ def test_match_headers__normalize(mock_session):
 
 def test_match_headers__list(mock_session):
     """match_headers can optionally be a list of specific headers to include"""
-    mock_session.cache.match_headers = ['Accept']
+    mock_session.settings.match_headers = ['Accept']
     headers_1 = {'Accept': 'application/json', 'User-Agent': 'qutebrowser'}
     headers_2 = {'Accept': 'application/json', 'User-Agent': 'Firefox'}
     headers_3 = {'Accept': 'text/plain', 'User-Agent': 'qutebrowser'}
@@ -504,64 +323,125 @@ def test_match_headers__list(mock_session):
     assert mock_session.get(MOCKED_URL, headers=headers_3).from_cache is False
 
 
+def test_match_headers__vary(mock_session):
+    """Vary should be used to validate headers, if available.
+    It should also override `match_headers` for the secondary cache key, if both are provided.
+    """
+    # mock_session.settings.match_headers = ['Accept-Encoding']
+    headers_1 = {'Accept': 'application/json', 'User-Agent': 'qutebrowser'}
+    headers_2 = {'Accept': 'application/json', 'User-Agent': 'Firefox'}
+    headers_3 = {'Accept': 'text/plain', 'User-Agent': 'qutebrowser'}
+
+    assert mock_session.get(MOCKED_URL_VARY, headers=headers_1).from_cache is False
+    assert mock_session.get(MOCKED_URL_VARY, headers=headers_1).from_cache is True
+    assert mock_session.get(MOCKED_URL_VARY, headers=headers_2).from_cache is True
+    assert mock_session.get(MOCKED_URL_VARY, headers=headers_3).from_cache is False
+
+
 def test_include_get_headers():
     """include_get_headers is aliased to match_headers for backwards-compatibility"""
     session = CachedSession(include_get_headers=True, backend='memory')
-    assert session.cache.match_headers is True
+    assert session.settings.match_headers is True
+
+
+# Error handling
+# -----------------------------------------------------
 
 
 @pytest.mark.parametrize('exception_cls', DESERIALIZE_ERRORS)
 def test_cache_error(exception_cls, mock_session):
     """If there is an error while fetching a cached response, a new one should be fetched"""
     mock_session.get(MOCKED_URL)
-    with patch.object(SQLiteDict, '__getitem__', side_effect=exception_cls):
+
+    with patch.object(mock_session.cache.responses.serializer, 'loads', side_effect=exception_cls):
         assert mock_session.get(MOCKED_URL).from_cache is False
 
 
 def test_expired_request_error(mock_session):
     """Without stale_if_error (default), if there is an error while re-fetching an expired
-    response, the request should be re-raised and the expired item deleted"""
-    mock_session.stale_if_error = False
-    mock_session.expire_after = 1
+    response, the request should be re-raised
+    """
+    mock_session.settings.stale_if_error = False
+    mock_session.settings.expire_after = 1
     mock_session.get(MOCKED_URL)
-    time.sleep(1)
+    sleep(1)
 
     with patch.object(mock_session.cache, 'save_response', side_effect=ValueError):
         with pytest.raises(ValueError):
             mock_session.get(MOCKED_URL)
-    assert len(mock_session.cache.responses) == 0
 
 
 def test_stale_if_error__exception(mock_session):
     """With stale_if_error, expect to get old cache data if there is an exception during a request"""
-    mock_session.stale_if_error = True
-    mock_session.expire_after = 1
+    mock_session.settings.stale_if_error = True
+    mock_session.settings.expire_after = 1
 
     assert mock_session.get(MOCKED_URL).from_cache is False
     assert mock_session.get(MOCKED_URL).from_cache is True
-    time.sleep(1)
+    sleep(1)
     with patch.object(mock_session.cache, 'save_response', side_effect=RequestException):
         response = mock_session.get(MOCKED_URL)
         assert response.from_cache is True and response.is_expired is True
 
 
 def test_stale_if_error__error_code(mock_session):
-    """With stale_if_error, expect to get old cache data if a response has an error status code"""
-    mock_session.stale_if_error = True
-    mock_session.expire_after = 1
-    mock_session.allowable_codes = (200, 404)
+    """With stale_if_error, expect to get old cache data if a response has an error status code,
+    that is not in allowable_codes.
+    """
+    mock_session.settings.stale_if_error = True
+    mock_session.settings.expire_after = 1
+    mock_session.settings.allowable_codes = (200,)
 
-    assert mock_session.get(MOCKED_URL_404).from_cache is False
+    assert mock_session.get(MOCKED_URL_200_404).status_code == 200
 
-    time.sleep(1)
-    response = mock_session.get(MOCKED_URL_404)
-    assert response.from_cache is True and response.is_expired is True
+    sleep(1)
+
+    response = mock_session.get(MOCKED_URL_200_404)
+    assert response.status_code == 200
+    assert response.from_cache is True
+    assert response.is_expired is True
+
+
+def test_stale_if_error__error_code_in_allowable_codes(mock_session):
+    """With stale_if_error, expect to get the failed response if a response has an error status code,
+    that is in allowable_codes.
+    """
+    mock_session.settings.stale_if_error = True
+    mock_session.settings.expire_after = 1
+    mock_session.settings.allowable_codes = (200, 404)
+
+    assert mock_session.get(MOCKED_URL_200_404).status_code == 200
+
+    sleep(1)
+
+    response = mock_session.get(MOCKED_URL_200_404)
+    assert response.status_code == 404
+    assert response.from_cache is False
+    assert response.is_expired is False
+
+
+def test_stale_if_error__max_stale(mock_session):
+    """With stale_if_error as a time value, expect to get old cache data if a response has an error
+    status code AND it is expired by less than the specified time
+    """
+    mock_session.settings.stale_if_error = timedelta(seconds=15)
+    mock_session.settings.expire_after = datetime.utcnow() - timedelta(seconds=10)
+    mock_session.settings.allowable_codes = (200,)
+    mock_session.get(MOCKED_URL_200_404).from_cache
+
+    response = mock_session.get(MOCKED_URL_200_404)
+    assert response.from_cache is True
+    assert response.is_expired is True
+
+    mock_session.settings.stale_if_error = 5
+    with pytest.raises(HTTPError):
+        mock_session.get(MOCKED_URL_200_404)
 
 
 def test_old_data_on_error():
     """stale_if_error is aliased to old_data_on_error for backwards-compatibility"""
     session = CachedSession(old_data_on_error=True, backend='memory')
-    assert session.stale_if_error is True
+    assert session.settings.stale_if_error is True
 
 
 def test_cache_disabled(mock_session):
@@ -582,17 +462,186 @@ def test_cache_disabled__nested(mock_session):
     assert mock_session.get(MOCKED_URL).from_cache is True
 
 
+def test_unpickle_errors(mock_session):
+    """If there is an error during deserialization, the request should be made again"""
+    assert mock_session.get(MOCKED_URL_JSON).from_cache is False
+
+    with patch.object(mock_session.cache.responses.serializer, 'loads', side_effect=PickleError):
+        resp = mock_session.get(MOCKED_URL_JSON)
+        assert resp.from_cache is False
+        assert resp.json()['message'] == 'mock json response'
+
+    resp = mock_session.get(MOCKED_URL_JSON)
+    assert resp.from_cache is True
+    assert resp.json()['message'] == 'mock json response'
+
+
+# Additional CachedSession settings and methods
+# -----------------------------------------------------
+
+
+def test_allowable_codes(mock_session):
+    mock_session.settings.allowable_codes = (200, 404)
+
+    # This request should be cached
+    mock_session.get(MOCKED_URL_404)
+    assert mock_session.cache.contains(url=MOCKED_URL_404)
+    assert mock_session.get(MOCKED_URL_404).from_cache is True
+
+    # This request should be filtered out on both read and write
+    mock_session.get(MOCKED_URL_500)
+    assert not mock_session.cache.contains(url=MOCKED_URL_500)
+    assert mock_session.get(MOCKED_URL_500).from_cache is False
+
+
+def test_allowable_methods(mock_session):
+    mock_session.settings.allowable_methods = ['GET', 'OPTIONS']
+
+    # This request should be cached
+    mock_session.options(MOCKED_URL)
+    assert mock_session.cache.contains(request=Request('OPTIONS', MOCKED_URL))
+    assert mock_session.options(MOCKED_URL).from_cache is True
+
+    # These requests should be filtered out on both read and write
+    mock_session.put(MOCKED_URL)
+    assert not mock_session.cache.contains(request=Request('PUT', MOCKED_URL))
+    assert mock_session.put(MOCKED_URL).from_cache is False
+
+    mock_session.patch(MOCKED_URL)
+    assert not mock_session.cache.contains(request=Request('PATCH', MOCKED_URL))
+    assert mock_session.patch(MOCKED_URL).from_cache is False
+
+    mock_session.delete(MOCKED_URL)
+    assert not mock_session.cache.contains(request=Request('DELETE', MOCKED_URL))
+    assert mock_session.delete(MOCKED_URL).from_cache is False
+
+
+def test_always_revalidate(mock_session):
+    """The session always_revalidate option should send a conditional request, if possible"""
+    mock_session.settings.expire_after = 60
+    response_1 = mock_session.get(MOCKED_URL_ETAG)
+    response_2 = mock_session.get(MOCKED_URL_ETAG)
+    mock_session.mock_adapter.register_uri('GET', MOCKED_URL_ETAG, status_code=304)
+
+    mock_session.settings.always_revalidate = True
+    response_3 = mock_session.get(MOCKED_URL_ETAG)
+    response_4 = mock_session.get(MOCKED_URL_ETAG)
+
+    assert response_1.from_cache is False
+    assert response_2.from_cache is True
+    assert response_3.from_cache is True and response_3.revalidated is True
+    assert response_4.from_cache is True and response_4.revalidated is True
+
+    # Expect expiration to get reset after revalidation
+    assert response_2.expires < response_4.expires
+
+
+def test_default_ignored_parameters(mock_session):
+    """Common auth params and headers (for OAuth2, etc.) should be ignored by default"""
+    mock_session.get(
+        MOCKED_URL,
+        params={'access_token': 'token'},
+        headers={'Authorization': 'Bearer token'},
+    )
+    response = mock_session.get(
+        MOCKED_URL,
+        params={'access_token': 'token'},
+        headers={'Authorization': 'Bearer token'},
+    )
+    assert response.from_cache is True
+
+    unauthenticated_response = mock_session.get(MOCKED_URL)
+    assert unauthenticated_response.from_cache is False
+
+    assert 'access_token=REDACTED' in response.url
+    assert 'access_token=REDACTED' in response.request.url
+    assert response.request.headers['Authorization'] == 'REDACTED'
+
+
+@patch_normalize_url
+def test_filter_fn(mock_normalize_url, mock_session):
+    mock_session.settings.filter_fn = lambda r: r.request.url != MOCKED_URL_JSON
+
+    # This request should be cached
+    mock_session.get(MOCKED_URL)
+    assert mock_session.cache.contains(url=MOCKED_URL)
+    assert mock_session.get(MOCKED_URL).from_cache is True
+
+    # This request should be filtered out on both read and write
+    mock_session.get(MOCKED_URL_JSON)
+    assert not mock_session.cache.contains(url=MOCKED_URL_JSON)
+    assert mock_session.get(MOCKED_URL_JSON).from_cache is False
+
+
+@patch_normalize_url
+def test_filter_fn__retroactive(mock_normalize_url, mock_session):
+    """filter_fn should also apply to previously cached responses"""
+    mock_session.get(MOCKED_URL_JSON)
+    mock_session.settings.filter_fn = lambda r: r.request.url != MOCKED_URL_JSON
+    mock_session.get(MOCKED_URL_JSON)
+    assert not mock_session.cache.contains(url=MOCKED_URL_JSON)
+
+
+def test_key_fn(mock_session):
+    def create_custom_key(request, **kwargs):
+        """Create a key based on only the request URL (without params)"""
+        return request.url.split('?')[0]
+
+    mock_session.settings.key_fn = create_custom_key
+    mock_session.get(MOCKED_URL)
+    response = mock_session.get(MOCKED_URL, params={'k': 'v'})
+    assert response.from_cache is True
+
+
+def test_hooks(mock_session):
+    state = defaultdict(int)
+    mock_session.get(MOCKED_URL)
+
+    for hook in ('response',):
+
+        def hook_func(r, *args, **kwargs):
+            state[hook] += 1
+            assert r.from_cache is True
+            return r
+
+        for i in range(5):
+            mock_session.get(MOCKED_URL, hooks={hook: hook_func})
+        assert state[hook] == 5
+
+
+def test_expire_after_alias(mock_session):
+    """CachedSession has an `expire_after` property for backwards-compatibility"""
+    mock_session.expire_after = 60
+    assert mock_session.expire_after == mock_session.settings.expire_after == 60
+
+
 def test_do_not_cache(mock_session):
-    """expire_after=0 should bypass the cache on both read and write"""
-    # Bypass read
+    """DO_NOT_CACHE should bypass the cache on both read and write"""
     mock_session.get(MOCKED_URL)
-    assert mock_session.cache.has_url(MOCKED_URL)
-    assert mock_session.get(MOCKED_URL, expire_after=0).from_cache is False
+    assert mock_session.cache.contains(url=MOCKED_URL)
 
-    # Bypass write
-    mock_session.expire_after = 0
+    # Skip read
+    response = mock_session.get(MOCKED_URL, expire_after=DO_NOT_CACHE)
+    assert response.from_cache is False
+
+    # Skip write
+    mock_session.settings.expire_after = DO_NOT_CACHE
     mock_session.get(MOCKED_URL_JSON)
-    assert not mock_session.cache.has_url(MOCKED_URL_JSON)
+    assert not mock_session.cache.contains(url=MOCKED_URL_JSON)
+
+
+def test_expire_immediately(mock_session):
+    """EXPIRE_IMMEDIATELY should save a response only if it has a validator"""
+    # Without validator
+    mock_session.settings.expire_after = EXPIRE_IMMEDIATELY
+    mock_session.get(MOCKED_URL)
+    response = mock_session.get(MOCKED_URL)
+    assert not mock_session.cache.contains(url=MOCKED_URL)
+    assert response.from_cache is False
+
+    # With validator
+    mock_session.get(MOCKED_URL_ETAG)
+    response = mock_session.get(MOCKED_URL_ETAG)
 
 
 @pytest.mark.parametrize(
@@ -613,7 +662,7 @@ def test_304_not_modified(
 ):
     url = f'{MOCKED_URL}/endpoint_2'
     if cache_expired:
-        mock_session.expire_after = datetime.now() - timedelta(1)
+        mock_session.settings.expire_after = datetime.utcnow() - timedelta(1)
     if cache_hit:
         mock_session.mock_adapter.register_uri('GET', url, status_code=200)
         mock_session.get(url)
@@ -625,38 +674,15 @@ def test_304_not_modified(
 
 def test_url_allowlist(mock_session):
     """If the default is 0, only URLs matching patterns in urls_expire_after should be cached"""
-    mock_session.urls_expire_after = {
+    mock_session.settings.urls_expire_after = {
         MOCKED_URL_JSON: 60,
-        '*': 0,
+        '*': DO_NOT_CACHE,
     }
     mock_session.get(MOCKED_URL_JSON)
     assert mock_session.get(MOCKED_URL_JSON).from_cache is True
     mock_session.get(MOCKED_URL)
     assert mock_session.get(MOCKED_URL).from_cache is False
-
-
-def test_remove_expired_responses(mock_session):
-    unexpired_url = f'{MOCKED_URL}?x=1'
-    mock_session.mock_adapter.register_uri(
-        'GET', unexpired_url, status_code=200, text='mock response'
-    )
-    mock_session.expire_after = timedelta(seconds=0.2)
-    mock_session.get(MOCKED_URL)
-    mock_session.get(MOCKED_URL_JSON)
-    time.sleep(0.2)
-    mock_session.get(unexpired_url)
-
-    # At this point we should have 1 unexpired response and 2 expired responses
-    assert len(mock_session.cache.responses) == 3
-    mock_session.remove_expired_responses()
-    assert len(mock_session.cache.responses) == 1
-    cached_response = list(mock_session.cache.responses.values())[0]
-    assert cached_response.url == unexpired_url
-
-    # Now the last response should be expired as well
-    time.sleep(0.2)
-    mock_session.remove_expired_responses()
-    assert len(mock_session.cache.responses) == 0
+    assert not mock_session.cache.contains(url=MOCKED_URL)
 
 
 def test_invalid_expiration(mock_session):
@@ -674,123 +700,251 @@ def test_invalid_expiration(mock_session):
         mock_session.get(MOCKED_URL)
 
 
-def test_remove_expired_responses__error(mock_session):
-    # Start with two cached responses, one of which will raise an error
-    mock_session.get(MOCKED_URL)
-    mock_session.get(MOCKED_URL_JSON)
+def test_stale_while_revalidate(mock_session):
+    # Start with expired responses
+    mocked_url_2 = f'{MOCKED_URL_ETAG}?k=v'
+    mock_session.settings.stale_while_revalidate = True
+    mock_session.get(MOCKED_URL_ETAG, expire_after=timedelta(seconds=-2))
+    mock_session.get(mocked_url_2, expire_after=timedelta(seconds=-2))
+    assert mock_session.cache.contains(url=MOCKED_URL_ETAG)
+
+    # First, check that the correct method is called
+    mock_session.mock_adapter.register_uri('GET', MOCKED_URL_ETAG, status_code=304)
+    with patch.object(CachedSession, '_resend_async') as mock_send:
+        response = mock_session.get(MOCKED_URL_ETAG)
+        mock_send.assert_called_once()
+
+    def slow_request(*args, **kwargs):
+        sleep(0.1)
+        return mock_session._send_and_cache(*args, **kwargs)
+
+    # Next, test that the revalidation request is non-blocking
+    start = time()
+    with patch.object(CachedSession, '_send_and_cache', side_effect=slow_request) as mock_send:
+        response = mock_session.get(mocked_url_2, expire_after=60)
+        assert response.from_cache is True and response.is_expired is True
+        assert time() - start < 0.1  # Response should be returned immediately; request takes 0.1s
+        sleep(1)  # Background thread may be slow on CI runner
+        mock_send.assert_called()
 
-    def error_on_key(key):
-        if key == create_key(method='GET', url=MOCKED_URL_JSON):
-            raise PickleError
-        return mock_session.get(MOCKED_URL_JSON)
+    # An extra sleep AFTER patching magically fixes this test on pypy, and I have no idea why
+    sleep(1)
 
-    with patch.object(SQLitePickleDict, '__getitem__', side_effect=error_on_key):
-        mock_session.remove_expired_responses()
-    assert len(mock_session.cache.responses) == 1
-    assert mock_session.get(MOCKED_URL).from_cache is True
-    assert mock_session.get(MOCKED_URL_JSON).from_cache is False
+    # Finally, check that the cached response has been refreshed
+    response = mock_session.get(mocked_url_2)
+    assert response.from_cache is True and response.is_expired is False
 
 
-def test_remove_expired_responses__extend_expiration(mock_session):
-    # Start with an expired response
-    mock_session.expire_after = datetime.utcnow() - timedelta(seconds=0.01)
-    mock_session.get(MOCKED_URL)
+def test_stale_while_revalidate__time(mock_session):
+    """stale_while_revalidate should also accept a time value (max acceptable staleness)"""
+    mocked_url_2 = f'{MOCKED_URL_ETAG}?k=v'
+    mock_session.settings.stale_while_revalidate = timedelta(seconds=3)
+    mock_session.get(MOCKED_URL_ETAG, expire_after=timedelta(seconds=-2))
+    response = mock_session.get(mocked_url_2, expire_after=timedelta(seconds=-4))
 
-    # Set expiration in the future and revalidate
-    mock_session.remove_expired_responses(expire_after=datetime.utcnow() + timedelta(seconds=1))
-    assert len(mock_session.cache.responses) == 1
-    response = mock_session.get(MOCKED_URL)
-    assert response.is_expired is False and response.from_cache is True
+    # stale_while_revalidate should apply to this response (expired 2 seconds ago)
+    response = mock_session.get(MOCKED_URL_ETAG)
+    assert response.from_cache is True and response.is_expired is True
 
+    # but not this response (expired 4 seconds ago)
+    response = mock_session.get(mocked_url_2)
+    assert response.from_cache is False and response.is_expired is False
 
-def test_remove_expired_responses__shorten_expiration(mock_session):
-    # Start with a non-expired response
-    mock_session.expire_after = datetime.utcnow() + timedelta(seconds=1)
-    mock_session.get(MOCKED_URL)
 
-    # Set expiration in the past and revalidate
-    mock_session.remove_expired_responses(expire_after=datetime.utcnow() - timedelta(seconds=0.01))
-    assert len(mock_session.cache.responses) == 0
-    response = mock_session.get(MOCKED_URL)
-    assert response.is_expired is False and response.from_cache is False
+def test_stale_while_revalidate__refresh(mock_session):
+    """stale_while_revalidate should also apply to normal refresh requests"""
+    mock_session.settings.stale_while_revalidate = True
+    mock_session.get(MOCKED_URL, expire_after=1)
+    sleep(1)  # An expired response without a validator won't be cached, so need to sleep
 
+    response = mock_session.get(MOCKED_URL)
+    assert response.from_cache is True and response.is_expired is True
 
-def test_remove_expired_responses__per_request(mock_session):
-    # Cache 3 responses with different expiration times
-    second_url = f'{MOCKED_URL}/endpoint_2'
-    third_url = f'{MOCKED_URL}/endpoint_3'
-    mock_session.mock_adapter.register_uri('GET', second_url, status_code=200)
-    mock_session.mock_adapter.register_uri('GET', third_url, status_code=200)
-    mock_session.get(MOCKED_URL)
-    mock_session.get(second_url, expire_after=1)
-    mock_session.get(third_url, expire_after=2)
+    sleep(0.2)
+    response = mock_session.get(MOCKED_URL)
+    assert response.from_cache is True and response.is_expired is False
 
-    # All 3 responses should still be cached
-    mock_session.remove_expired_responses()
-    for response in mock_session.cache.responses.values():
-        print('Expires:', response.expires - datetime.utcnow() if response.expires else None)
-    assert len(mock_session.cache.responses) == 3
 
-    # One should be expired after 1s, and another should be expired after 2s
-    time.sleep(1)
-    mock_session.remove_expired_responses()
-    assert len(mock_session.cache.responses) == 2
-    time.sleep(2)
-    mock_session.remove_expired_responses()
-    assert len(mock_session.cache.responses) == 1
+# Additional request() and send() options
+# -----------------------------------------------------
 
 
-def test_per_request__enable_expiration(mock_session):
+def test_request_expire_after__enable_expiration(mock_session):
     """No per-session expiration is set, but then overridden for a single request"""
-    mock_session.expire_after = None
+    mock_session.settings.expire_after = None
     response = mock_session.get(MOCKED_URL, expire_after=1)
     assert response.from_cache is False
     assert mock_session.get(MOCKED_URL).from_cache is True
 
-    time.sleep(1)
+    sleep(1)
     response = mock_session.get(MOCKED_URL)
     assert response.from_cache is False
 
 
-def test_per_request__disable_expiration(mock_session):
+def test_request_expire_after__disable_expiration(mock_session):
     """A per-session expiration is set, but then disabled for a single request"""
-    mock_session.expire_after = 60
-    response = mock_session.get(MOCKED_URL, expire_after=-1)
-    response = mock_session.get(MOCKED_URL, expire_after=-1)
+    mock_session.settings.expire_after = 60
+    response = mock_session.get(MOCKED_URL, expire_after=NEVER_EXPIRE)
+    response = mock_session.get(MOCKED_URL, expire_after=NEVER_EXPIRE)
     assert response.from_cache is True
     assert response.expires is None
 
 
-def test_per_request__prepared_request(mock_session):
-    """The same should work for PreparedRequests with CachedSession.send()"""
-    mock_session.expire_after = None
-    request = Request(method='GET', url=MOCKED_URL, headers={}, data=None).prepare()
+def test_request_expire_after__prepared_request(mock_session):
+    """Pre-request expiration should also work for PreparedRequests with CachedSession.send()"""
+    mock_session.settings.expire_after = None
+    request = Request('GET', MOCKED_URL, headers={}, data=None).prepare()
     response = mock_session.send(request, expire_after=1)
     assert response.from_cache is False
     assert mock_session.send(request).from_cache is True
 
-    time.sleep(1)
+    sleep(1)
     response = mock_session.get(MOCKED_URL)
     assert response.from_cache is False
 
 
-def test_per_request__no_expiration(mock_session):
-    """A per-session expiration is set, but then overridden with no per-request expiration"""
-    mock_session.expire_after = 1
-    response = mock_session.get(MOCKED_URL, expire_after=-1)
-    assert response.from_cache is False
-    assert response.expires is None
+def test_request_only_if_cached__cached(mock_session):
+    """only_if_cached has no effect if the response is already cached"""
+    mock_session.get(MOCKED_URL)
+    response = mock_session.get(MOCKED_URL, only_if_cached=True)
+    assert response.from_cache is True
+    assert response.is_expired is False
 
 
-def test_unpickle_errors(mock_session):
-    """If there is an error during deserialization, the request should be made again"""
-    assert mock_session.get(MOCKED_URL_JSON).from_cache is False
+def test_request_only_if_cached__uncached(mock_session):
+    """only_if_cached should return a 504 response if it is not already cached"""
+    response = mock_session.get(MOCKED_URL, only_if_cached=True)
+    assert response.status_code == 504
+    with pytest.raises(HTTPError):
+        response.raise_for_status()
 
-    with patch.object(SQLitePickleDict, '__getitem__', side_effect=PickleError):
-        resp = mock_session.get(MOCKED_URL_JSON)
-        assert resp.from_cache is False
-        assert resp.json()['message'] == 'mock json response'
 
-    resp = mock_session.get(MOCKED_URL_JSON)
-    assert resp.from_cache is True
-    assert resp.json()['message'] == 'mock json response'
+def test_request_only_if_cached__expired(mock_session):
+    """By default, only_if_cached will not return an expired response"""
+    mock_session.get(MOCKED_URL, expire_after=1)
+    sleep(1)
+
+    response = mock_session.get(MOCKED_URL, only_if_cached=True)
+    assert response.status_code == 504
+
+
+def test_request_only_if_cached__stale_if_error__expired(mock_session):
+    """only_if_cached *will* return an expired response if stale_if_error is also set"""
+    mock_session.get(MOCKED_URL, expire_after=1)
+    sleep(1)
+
+    mock_session.settings.stale_if_error = True
+    response = mock_session.get(MOCKED_URL, only_if_cached=True)
+    assert response.status_code == 200
+    assert response.from_cache is True
+    assert response.is_expired is True
+
+
+def test_request_only_if_cached__skips_revalidate(mock_session):
+    """only_if_cached should skip other revalidation conditions if the response isn't expired.
+    This includes taking precedence over refresh=True.
+    """
+    mock_session.get(MOCKED_URL)
+    response = mock_session.get(MOCKED_URL, only_if_cached=True, refresh=True)
+    assert response.from_cache is True
+    assert response.is_expired is False
+
+
+def test_request_only_if_cached__prepared_request(mock_session):
+    """The only_if_cached option should also work for PreparedRequests with CachedSession.send()"""
+    request = Request('GET', MOCKED_URL, headers={}).prepare()
+    response = mock_session.send(request, only_if_cached=True)
+    assert response.status_code == 504
+    with pytest.raises(HTTPError):
+        response.raise_for_status()
+
+
+def test_request_refresh(mock_session):
+    """The refresh option should send a conditional request, if possible"""
+    response_1 = mock_session.get(MOCKED_URL_ETAG, expire_after=60)
+    response_2 = mock_session.get(MOCKED_URL_ETAG)
+    mock_session.mock_adapter.register_uri('GET', MOCKED_URL_ETAG, status_code=304)
+
+    response_3 = mock_session.get(MOCKED_URL_ETAG, refresh=True, expire_after=60)
+    response_4 = mock_session.get(MOCKED_URL_ETAG)
+
+    assert response_1.from_cache is False
+    assert response_2.from_cache is True
+    assert response_3.from_cache is True and response_3.revalidated is True
+    assert response_4.from_cache is True and response_4.revalidated is False
+
+    # Expect expiration to get reset after revalidation
+    assert response_2.expires < response_4.expires
+
+
+def test_request_refresh__no_validator(mock_session):
+    """The refresh option should result in a new (unconditional) request if the cached response has
+    no validator
+    """
+    response_1 = mock_session.get(MOCKED_URL, expire_after=60)
+    response_2 = mock_session.get(MOCKED_URL)
+    mock_session.mock_adapter.register_uri('GET', MOCKED_URL, status_code=304)
+
+    response_3 = mock_session.get(MOCKED_URL, refresh=True, expire_after=60)
+    response_4 = mock_session.get(MOCKED_URL)
+
+    assert response_1.from_cache is False
+    assert response_2.from_cache is True
+    assert response_3.from_cache is True and response_3.revalidated is False
+    assert response_2.expires == response_4.expires
+
+
+def test_request_refresh__prepared_request(mock_session):
+    """The refresh option should also work for PreparedRequests with CachedSession.send()"""
+    mock_session.settings.expire_after = 60
+    request = Request('GET', MOCKED_URL_ETAG, headers={}, data=None).prepare()
+    response_1 = mock_session.send(request)
+    response_2 = mock_session.send(request)
+    mock_session.mock_adapter.register_uri('GET', MOCKED_URL_ETAG, status_code=304)
+
+    response_3 = mock_session.send(request, refresh=True)
+    response_4 = mock_session.send(request)
+
+    assert response_1.from_cache is False
+    assert response_2.from_cache is True
+    assert response_3.from_cache is True
+
+    # Expect expiration to get reset after revalidation
+    assert response_2.expires < response_4.expires
+
+
+def test_request_force_refresh(mock_session):
+    """The force_refresh option should send and cache a new request. Any expire_after value provided
+    should overwrite the previous value."""
+    response_1 = mock_session.get(MOCKED_URL, expire_after=NEVER_EXPIRE)
+    response_2 = mock_session.get(MOCKED_URL, expire_after=360, force_refresh=True)
+    response_3 = mock_session.get(MOCKED_URL)
+
+    assert response_1.from_cache is False
+    assert response_2.from_cache is False
+    assert response_3.from_cache is True
+    assert response_3.expires is not None
+
+
+def test_request_force_refresh__prepared_request(mock_session):
+    """The force_refresh option should also work for PreparedRequests with CachedSession.send()"""
+    mock_session.settings.expire_after = 60
+    request = Request('GET', MOCKED_URL, headers={}, data=None)
+    response_1 = mock_session.send(request.prepare())
+    response_2 = mock_session.send(request.prepare(), force_refresh=True)
+    response_3 = mock_session.send(request.prepare())
+
+    assert response_1.from_cache is False
+    assert response_2.from_cache is False
+    assert response_3.from_cache is True
+    assert response_3.expires is not None
+
+
+# Deprecated methods
+# --------------------
+
+
+def test_remove_expired_responses(mock_session):
+    with ignore_deprecation(), patch.object(mock_session.cache, 'delete') as mock_delete:
+        mock_session.remove_expired_responses()
+        mock_delete.assert_called_once_with(expired=True, invalid=True)

More details

Full run details

Historical runs