New Upstream Release - python-werkzeug

Ready changes

Summary

Merged new upstream version: 2.2.3+ds (was: 2.2.2).

Diff

diff --git a/.flake8 b/.flake8
new file mode 100644
index 0000000..6ac59c8
--- /dev/null
+++ b/.flake8
@@ -0,0 +1,29 @@
+[flake8]
+extend-select =
+    # bugbear
+    B
+    # bugbear opinions
+    B9
+    # implicit str concat
+    ISC
+extend-ignore =
+    # slice notation whitespace, invalid
+    E203
+    # import at top, too many circular import fixes
+    E402
+    # line length, handled by bugbear B950
+    E501
+    # bare except, handled by bugbear B001
+    E722
+    # zip with strict=, requires python >= 3.10
+    B905
+    # string formatting opinion, B028 renamed to B907
+    B028
+    B907
+# up to 88 allowed by bugbear B950
+max-line-length = 80
+per-file-ignores =
+    # __init__ exports names
+    **/__init__.py: F401
+    # LocalProxy assigns lambdas
+    src/werkzeug/local.py: E731
diff --git a/.github/workflows/lock.yaml b/.github/workflows/lock.yaml
index b4f7633..c790fae 100644
--- a/.github/workflows/lock.yaml
+++ b/.github/workflows/lock.yaml
@@ -1,15 +1,25 @@
 name: 'Lock threads'
+# Lock closed issues that have not received any further activity for
+# two weeks. This does not close open issues, only humans may do that.
+# We find that it is easier to respond to new issues with fresh examples
+# rather than continuing discussions on old issues.
 
 on:
   schedule:
     - cron: '0 0 * * *'
 
+permissions:
+  issues: write
+  pull-requests: write
+
+concurrency:
+  group: lock
+
 jobs:
   lock:
     runs-on: ubuntu-latest
     steps:
-      - uses: dessant/lock-threads@v3
+      - uses: dessant/lock-threads@c1b35aecc5cdb1a34539d14196df55838bb2f836
         with:
-          github-token: ${{ github.token }}
           issue-inactive-days: 14
           pr-inactive-days: 14
diff --git a/.github/workflows/publish.yaml b/.github/workflows/publish.yaml
new file mode 100644
index 0000000..0ed4955
--- /dev/null
+++ b/.github/workflows/publish.yaml
@@ -0,0 +1,72 @@
+name: Publish
+on:
+  push:
+    tags:
+      - '*'
+jobs:
+  build:
+    runs-on: ubuntu-latest
+    outputs:
+      hash: ${{ steps.hash.outputs.hash }}
+    steps:
+      - uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c
+      - uses: actions/setup-python@5ccb29d8773c3f3f653e1705f474dfaa8a06a912
+        with:
+          python-version: '3.x'
+          cache: 'pip'
+          cache-dependency-path: 'requirements/*.txt'
+      - run: pip install -r requirements/build.txt
+      # Use the commit date instead of the current date during the build.
+      - run: echo "SOURCE_DATE_EPOCH=$(git log -1 --pretty=%ct)" >> $GITHUB_ENV
+      - run: python -m build
+      # Generate hashes used for provenance.
+      - name: generate hash
+        id: hash
+        run: cd dist && echo "hash=$(sha256sum * | base64 -w0)" >> $GITHUB_OUTPUT
+      - uses: actions/upload-artifact@0b7f8abb1508181956e8e162db84b466c27e18ce
+        with:
+          path: ./dist
+  provenance:
+    needs: ['build']
+    permissions:
+      actions: read
+      id-token: write
+      contents: write
+    # Can't pin with hash due to how this workflow works.
+    uses: slsa-framework/slsa-github-generator/.github/workflows/generator_generic_slsa3.yml@v1.4.0
+    with:
+      base64-subjects: ${{ needs.build.outputs.hash }}
+  create-release:
+    # Upload the sdist, wheels, and provenance to a GitHub release. They remain
+    # available as build artifacts for a while as well.
+    needs: ['provenance']
+    runs-on: ubuntu-latest
+    permissions:
+      contents: write
+    steps:
+      - uses: actions/download-artifact@9bc31d5ccc31df68ecc42ccf4149144866c47d8a
+      - name: create release
+        run: >
+          gh release create --draft --repo ${{ github.repository }}
+          ${{ github.ref_name }}
+          *.intoto.jsonl/* artifact/*
+        env:
+          GH_TOKEN: ${{ github.token }}
+  publish-pypi:
+    needs: ['provenance']
+    # Wait for approval before attempting to upload to PyPI. This allows reviewing the
+    # files in the draft release.
+    environment: 'publish'
+    runs-on: ubuntu-latest
+    steps:
+      - uses: actions/download-artifact@9bc31d5ccc31df68ecc42ccf4149144866c47d8a
+      # Try uploading to Test PyPI first, in case something fails.
+      - uses: pypa/gh-action-pypi-publish@c7f29f7adef1a245bd91520e94867e5c6eedddcc
+        with:
+          password: ${{ secrets.TEST_PYPI_TOKEN }}
+          repository_url: https://test.pypi.org/legacy/
+          packages_dir: artifact/
+      - uses: pypa/gh-action-pypi-publish@c7f29f7adef1a245bd91520e94867e5c6eedddcc
+        with:
+          password: ${{ secrets.PYPI_TOKEN }}
+          packages_dir: artifact/
diff --git a/.github/workflows/tests.yaml b/.github/workflows/tests.yaml
index d4441ff..a85f6c7 100644
--- a/.github/workflows/tests.yaml
+++ b/.github/workflows/tests.yaml
@@ -24,18 +24,19 @@ jobs:
       fail-fast: false
       matrix:
         include:
-          - {name: Linux, python: '3.10', os: ubuntu-latest, tox: py310}
-          - {name: Windows, python: '3.10', os: windows-latest, tox: py310}
-          - {name: Mac, python: '3.10', os: macos-latest, tox: py310}
-          - {name: '3.11-dev', python: '3.11-dev', os: ubuntu-latest, tox: py311}
+          - {name: Linux, python: '3.11', os: ubuntu-latest, tox: py311}
+          - {name: Windows, python: '3.11', os: windows-latest, tox: py311}
+          - {name: Mac, python: '3.11', os: macos-latest, tox: py311}
+          - {name: '3.12-dev', python: '3.12-dev', os: ubuntu-latest, tox: py312}
+          - {name: '3.10', python: '3.10', os: ubuntu-latest, tox: py310}
           - {name: '3.9', python: '3.9', os: ubuntu-latest, tox: py39}
           - {name: '3.8', python: '3.8', os: ubuntu-latest, tox: py38}
           - {name: '3.7', python: '3.7', os: ubuntu-latest, tox: py37}
-          - {name: 'PyPy', python: 'pypy-3.7', os: ubuntu-latest, tox: pypy37}
-          - {name: Typing, python: '3.10', os: ubuntu-latest, tox: typing}
+          - {name: 'PyPy', python: 'pypy-3.9', os: ubuntu-latest, tox: pypy39}
+          - {name: Typing, python: '3.11', os: ubuntu-latest, tox: typing}
     steps:
-      - uses: actions/checkout@v3
-      - uses: actions/setup-python@v4
+      - uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c
+      - uses: actions/setup-python@5ccb29d8773c3f3f653e1705f474dfaa8a06a912
         with:
           python-version: ${{ matrix.python }}
           cache: 'pip'
@@ -46,10 +47,10 @@ jobs:
           pip install -U setuptools
           python -m pip install -U pip
       - name: cache mypy
-        uses: actions/cache@v3.0.4
+        uses: actions/cache@58c146cc91c5b9e778e71775dfe9bf1442ad9a12
         with:
           path: ./.mypy_cache
           key: mypy|${{ matrix.python }}|${{ hashFiles('setup.cfg') }}
         if: matrix.tox == 'typing'
       - run: pip install tox
-      - run: tox -e ${{ matrix.tox }}
+      - run: tox run -e ${{ matrix.tox }}
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 55f8c13..44b6847 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -3,12 +3,12 @@ ci:
   autoupdate_schedule: monthly
 repos:
   - repo: https://github.com/asottile/pyupgrade
-    rev: v2.37.3
+    rev: v3.3.1
     hooks:
       - id: pyupgrade
         args: ["--py37-plus"]
   - repo: https://github.com/asottile/reorder_python_imports
-    rev: v3.8.2
+    rev: v3.9.0
     hooks:
       - id: reorder-python-imports
         name: Reorder Python imports (src, tests)
@@ -21,22 +21,22 @@ repos:
         args: ["--application-directories", "examples"]
         additional_dependencies: ["setuptools>60.9"]
   - repo: https://github.com/psf/black
-    rev: 22.6.0
+    rev: 23.1.0
     hooks:
       - id: black
   - repo: https://github.com/PyCQA/flake8
-    rev: 5.0.4
+    rev: 6.0.0
     hooks:
       - id: flake8
         additional_dependencies:
           - flake8-bugbear
           - flake8-implicit-str-concat
   - repo: https://github.com/peterdemin/pip-compile-multi
-    rev: v2.4.6
+    rev: v2.6.1
     hooks:
       - id: pip-compile-multi-verify
   - repo: https://github.com/pre-commit/pre-commit-hooks
-    rev: v4.3.0
+    rev: v4.4.0
     hooks:
       - id: fix-byte-order-marker
       - id: trailing-whitespace
diff --git a/CHANGES.rst b/CHANGES.rst
index 18e68af..71527e8 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -1,5 +1,32 @@
 .. currentmodule:: werkzeug
 
+Version 2.2.3
+-------------
+
+Released 2023-02-14
+
+-   Ensure that URL rules using path converters will redirect with strict slashes when
+    the trailing slash is missing. :issue:`2533`
+-   Type signature for ``get_json`` specifies that return type is not optional when
+    ``silent=False``. :issue:`2508`
+-   ``parse_content_range_header`` returns ``None`` for a value like ``bytes */-1``
+    where the length is invalid, instead of raising an ``AssertionError``. :issue:`2531`
+-   Address remaining ``ResourceWarning`` related to the socket used by ``run_simple``.
+    Remove ``prepare_socket``, which now happens when creating the server. :issue:`2421`
+-   Update pre-existing headers for ``multipart/form-data`` requests with the test
+    client. :issue:`2549`
+-   Fix handling of header extended parameters such that they are no longer quoted.
+    :issue:`2529`
+-   ``LimitedStream.read`` works correctly when wrapping a stream that may not return
+    the requested size in one ``read`` call. :issue:`2558`
+-   A cookie header that starts with ``=`` is treated as an empty key and discarded,
+    rather than stripping the leading ``==``.
+-   Specify a maximum number of multipart parts, default 1000, after which a
+    ``RequestEntityTooLarge`` exception is raised on parsing. This mitigates a DoS
+    attack where a larger number of form/file parts would result in disproportionate
+    resource use.
+
+
 Version 2.2.2
 -------------
 
@@ -54,8 +81,9 @@ Released 2022-07-23
     debug console. :pr:`2439`
 -   Fix compatibility with Python 3.11 by ensuring that ``end_lineno``
     and ``end_col_offset`` are present on AST nodes. :issue:`2425`
--   Add a new faster matching router based on a state
-    machine. :pr:`2433`
+-   Add a new faster URL matching router based on a state machine. If a custom converter
+    needs to match a ``/`` it must set the class variable ``part_isolating = False``.
+    :pr:`2433`
 -   Fix branch leaf path masking branch paths when strict-slashes is
     disabled. :issue:`1074`
 -   Names within options headers are always converted to lowercase. This
diff --git a/debian/changelog b/debian/changelog
index 40eb24c..64333bf 100644
--- a/debian/changelog
+++ b/debian/changelog
@@ -1,3 +1,10 @@
+python-werkzeug (2.2.3+ds-1) UNRELEASED; urgency=low
+
+  * New upstream release.
+  * Drop patch remove-test_exclude_patterns-test.patch, present upstream.
+
+ -- Debian Janitor <janitor@jelmer.uk>  Wed, 01 Mar 2023 02:52:33 -0000
+
 python-werkzeug (2.2.2-2) unstable; urgency=medium
 
   * Uploading to unstable.
diff --git a/debian/patches/preserve-any-existing-PYTHONPATH-in-tests.patch b/debian/patches/preserve-any-existing-PYTHONPATH-in-tests.patch
index f70e6fc..fc72bd0 100644
--- a/debian/patches/preserve-any-existing-PYTHONPATH-in-tests.patch
+++ b/debian/patches/preserve-any-existing-PYTHONPATH-in-tests.patch
@@ -7,11 +7,11 @@ Subject: [PATCH] Preserve any existing PYTHONPATH in tests
  tests/conftest.py | 10 ++++++++--
  1 file changed, 8 insertions(+), 2 deletions(-)
 
-diff --git a/tests/conftest.py b/tests/conftest.py
-index 4ad1ff23..7200d286 100644
---- a/tests/conftest.py
-+++ b/tests/conftest.py
-@@ -118,9 +118,15 @@ def dev_server(xprocess, request, tmp_path):
+Index: python-werkzeug.git/tests/conftest.py
+===================================================================
+--- python-werkzeug.git.orig/tests/conftest.py
++++ python-werkzeug.git/tests/conftest.py
+@@ -103,9 +103,15 @@ def dev_server(xprocess, request, tmp_pa
          class Starter(ProcessStarter):
              args = [sys.executable, run_path, name, json.dumps(kwargs)]
              # Extend the existing env, otherwise Windows and CI fails.
@@ -29,6 +29,3 @@ index 4ad1ff23..7200d286 100644
  
              @cached_property
              def pattern(self):
--- 
-2.31.1
-
diff --git a/debian/patches/remove-test_exclude_patterns-test.patch b/debian/patches/remove-test_exclude_patterns-test.patch
deleted file mode 100644
index fbc4f2c..0000000
--- a/debian/patches/remove-test_exclude_patterns-test.patch
+++ /dev/null
@@ -1,26 +0,0 @@
-Description: Remove test_exclude_patterns test
- Under the sbuild environment, the asert doesn't work and sys.prefix gets
- wrong. So I'm just removing this test.
-Author: Thomas Goirand <zigo@debian.org>
-Forwarded: not-needed
-Last-Update: 2022-09-14
-
---- python-werkzeug-2.2.2.orig/tests/test_serving.py
-+++ python-werkzeug-2.2.2/tests/test_serving.py
-@@ -125,16 +125,6 @@ def test_windows_get_args_for_reloading(
-     assert rv == argv
- 
- 
--@pytest.mark.parametrize("find", [_find_stat_paths, _find_watchdog_paths])
--def test_exclude_patterns(find):
--    # Imported paths under sys.prefix will be included by default.
--    paths = find(set(), set())
--    assert any(p.startswith(sys.prefix) for p in paths)
--    # Those paths should be excluded due to the pattern.
--    paths = find(set(), {f"{sys.prefix}*"})
--    assert not any(p.startswith(sys.prefix) for p in paths)
--
--
- @pytest.mark.filterwarnings("ignore::pytest.PytestUnraisableExceptionWarning")
- @pytest.mark.dev_server
- def test_wrong_protocol(standard_app):
diff --git a/debian/patches/series b/debian/patches/series
index ead6d01..b34e0ba 100644
--- a/debian/patches/series
+++ b/debian/patches/series
@@ -1,2 +1 @@
 preserve-any-existing-PYTHONPATH-in-tests.patch
-remove-test_exclude_patterns-test.patch
diff --git a/docs/installation.rst b/docs/installation.rst
index 9c5aa7f..8cf3ab2 100644
--- a/docs/installation.rst
+++ b/docs/installation.rst
@@ -9,12 +9,6 @@ We recommend using the latest version of Python. Werkzeug supports
 Python 3.7 and newer.
 
 
-Dependencies
-------------
-
-Werkzeug does not have any direct dependencies.
-
-
 Optional dependencies
 ~~~~~~~~~~~~~~~~~~~~~
 
diff --git a/docs/request_data.rst b/docs/request_data.rst
index 83c6278..e55841e 100644
--- a/docs/request_data.rst
+++ b/docs/request_data.rst
@@ -73,23 +73,26 @@ read the stream *or* call :meth:`~Request.get_data`.
 Limiting Request Data
 ---------------------
 
-To avoid being the victim of a DDOS attack you can set the maximum
-accepted content length and request field sizes.  The :class:`Request`
-class has two attributes for that: :attr:`~Request.max_content_length`
-and :attr:`~Request.max_form_memory_size`.
-
-The first one can be used to limit the total content length.  For example
-by setting it to ``1024 * 1024 * 16`` the request won't accept more than
-16MB of transmitted data.
-
-Because certain data can't be moved to the hard disk (regular post data)
-whereas temporary files can, there is a second limit you can set.  The
-:attr:`~Request.max_form_memory_size` limits the size of `POST`
-transmitted form data.  By setting it to ``1024 * 1024 * 2`` you can make
-sure that all in memory-stored fields are not more than 2MB in size.
-
-This however does *not* affect in-memory stored files if the
-`stream_factory` used returns a in-memory file.
+The :class:`Request` class provides a few attributes to control how much data is
+processed from the request body. This can help mitigate DoS attacks that craft the
+request in such a way that the server uses too many resources to handle it. Each of
+these limits will raise a :exc:`~werkzeug.exceptions.RequestEntityTooLarge` if they are
+exceeded.
+
+-   :attr:`~Request.max_content_length` Stop reading request data after this number
+    of bytes. It's better to configure this in the WSGI server or HTTP server, rather
+    than the WSGI application.
+-   :attr:`~Request.max_form_memory_size` Stop reading request data if any form part is
+    larger than this number of bytes. While file parts can be moved to disk, regular
+    form field data is stored in memory only.
+-   :attr:`~Request.max_form_parts` Stop reading request data if more than this number
+    of parts are sent in multipart form data. This is useful to stop a very large number
+    of very small parts, especially file parts. The default is 1000.
+
+Using Werkzeug to set these limits is only one layer of protection. WSGI servers
+and HTTPS servers should set their own limits on size and timeouts. The operating system
+or container manager should set limits on memory and processing time for server
+processes.
 
 
 How to extend Parsing?
diff --git a/docs/utils.rst b/docs/utils.rst
index 0d4e339..6afa4ab 100644
--- a/docs/utils.rst
+++ b/docs/utils.rst
@@ -23,6 +23,8 @@ General Helpers
 
 .. autofunction:: send_file
 
+.. autofunction:: send_from_directory
+
 .. autofunction:: import_string
 
 .. autofunction:: find_modules
diff --git a/requirements/build.in b/requirements/build.in
new file mode 100644
index 0000000..378eac2
--- /dev/null
+++ b/requirements/build.in
@@ -0,0 +1 @@
+build
diff --git a/requirements/build.txt b/requirements/build.txt
new file mode 100644
index 0000000..a735b3d
--- /dev/null
+++ b/requirements/build.txt
@@ -0,0 +1,17 @@
+# SHA1:80754af91bfb6d1073585b046fe0a474ce868509
+#
+# This file is autogenerated by pip-compile-multi
+# To update, run:
+#
+#    pip-compile-multi
+#
+build==0.9.0
+    # via -r requirements/build.in
+packaging==23.0
+    # via build
+pep517==0.13.0
+    # via build
+tomli==2.0.1
+    # via
+    #   build
+    #   pep517
diff --git a/requirements/dev.txt b/requirements/dev.txt
index 50e233e..d9c1c3a 100644
--- a/requirements/dev.txt
+++ b/requirements/dev.txt
@@ -8,55 +8,55 @@
 -r docs.txt
 -r tests.txt
 -r typing.txt
-build==0.8.0
+build==0.9.0
     # via pip-tools
+cachetools==5.2.0
+    # via tox
 cfgv==3.3.1
     # via pre-commit
+chardet==5.1.0
+    # via tox
 click==8.1.3
     # via
     #   pip-compile-multi
     #   pip-tools
-distlib==0.3.4
+colorama==0.4.6
+    # via tox
+distlib==0.3.6
     # via virtualenv
-filelock==3.7.1
+filelock==3.9.0
     # via
     #   tox
     #   virtualenv
-greenlet==1.1.2 ; python_version < "3.11"
-    # via -r requirements/tests.in
-identify==2.5.1
+identify==2.5.12
     # via pre-commit
 nodeenv==1.7.0
     # via pre-commit
-pep517==0.12.0
+pep517==0.13.0
     # via build
-pip-compile-multi==2.4.5
+pip-compile-multi==2.6.1
     # via -r requirements/dev.in
-pip-tools==6.8.0
+pip-tools==6.12.1
     # via pip-compile-multi
-platformdirs==2.5.2
-    # via virtualenv
-pre-commit==2.20.0
-    # via -r requirements/dev.in
-pyyaml==6.0
-    # via pre-commit
-six==1.16.0
+platformdirs==2.6.2
     # via
     #   tox
     #   virtualenv
-toml==0.10.2
-    # via
-    #   pre-commit
-    #   tox
+pre-commit==2.21.0
+    # via -r requirements/dev.in
+pyproject-api==1.4.0
+    # via tox
+pyyaml==6.0
+    # via pre-commit
 toposort==1.7
     # via pip-compile-multi
-tox==3.25.1
+tox==4.2.3
     # via -r requirements/dev.in
-virtualenv==20.15.1
+virtualenv==20.17.1
     # via
     #   pre-commit
     #   tox
-wheel==0.37.1
+wheel==0.38.4
     # via pip-tools
 
 # The following packages are considered to be unsafe in a requirements file:
diff --git a/requirements/docs.txt b/requirements/docs.txt
index 8238e78..f347006 100644
--- a/requirements/docs.txt
+++ b/requirements/docs.txt
@@ -7,15 +7,15 @@
 #
 alabaster==0.7.12
     # via sphinx
-babel==2.10.3
+babel==2.11.0
     # via sphinx
-certifi==2022.6.15
+certifi==2022.12.7
     # via requests
-charset-normalizer==2.1.0
+charset-normalizer==2.1.1
     # via requests
-docutils==0.18.1
+docutils==0.19
     # via sphinx
-idna==3.3
+idna==3.4
     # via requests
 imagesize==1.4.1
     # via sphinx
@@ -23,23 +23,21 @@ jinja2==3.1.2
     # via sphinx
 markupsafe==2.1.1
     # via jinja2
-packaging==21.3
+packaging==22.0
     # via
     #   pallets-sphinx-themes
     #   sphinx
-pallets-sphinx-themes==2.0.2
+pallets-sphinx-themes==2.0.3
     # via -r requirements/docs.in
-pygments==2.12.0
+pygments==2.14.0
     # via sphinx
-pyparsing==3.0.9
-    # via packaging
-pytz==2022.1
+pytz==2022.7
     # via babel
 requests==2.28.1
     # via sphinx
 snowballstemmer==2.2.0
     # via sphinx
-sphinx==5.0.2
+sphinx==6.1.1
     # via
     #   -r requirements/docs.in
     #   pallets-sphinx-themes
@@ -61,5 +59,5 @@ sphinxcontrib-qthelp==1.0.3
     # via sphinx
 sphinxcontrib-serializinghtml==1.1.5
     # via sphinx
-urllib3==1.26.10
+urllib3==1.26.13
     # via requests
diff --git a/requirements/tests.txt b/requirements/tests.txt
index 689d8ba..98e7df8 100644
--- a/requirements/tests.txt
+++ b/requirements/tests.txt
@@ -5,40 +5,40 @@
 #
 #    pip-compile-multi
 #
-attrs==21.4.0
+attrs==22.2.0
     # via pytest
 cffi==1.15.1
     # via cryptography
-cryptography==37.0.4
+cryptography==39.0.0
     # via -r requirements/tests.in
 ephemeral-port-reserve==1.1.4
     # via -r requirements/tests.in
-greenlet==1.1.2 ; python_version < "3.11"
+exceptiongroup==1.1.0
+    # via pytest
+greenlet==2.0.1 ; python_version < "3.11"
     # via -r requirements/tests.in
 iniconfig==1.1.1
     # via pytest
-packaging==21.3
+packaging==22.0
     # via pytest
 pluggy==1.0.0
     # via pytest
-psutil==5.9.1
+psutil==5.9.4
     # via pytest-xprocess
 py==1.11.0
-    # via pytest
+    # via pytest-xprocess
 pycparser==2.21
     # via cffi
-pyparsing==3.0.9
-    # via packaging
-pytest==7.1.2
+pytest==7.2.0
     # via
     #   -r requirements/tests.in
     #   pytest-timeout
     #   pytest-xprocess
 pytest-timeout==2.1.0
     # via -r requirements/tests.in
-pytest-xprocess==0.19.0
+pytest-xprocess==0.22.2
     # via -r requirements/tests.in
 tomli==2.0.1
     # via pytest
-watchdog==2.1.9
+watchdog==2.2.1
     # via -r requirements/tests.in
diff --git a/requirements/typing.in b/requirements/typing.in
index e17c43d..23ab158 100644
--- a/requirements/typing.in
+++ b/requirements/typing.in
@@ -2,3 +2,4 @@ mypy
 types-contextvars
 types-dataclasses
 types-setuptools
+watchdog
diff --git a/requirements/typing.txt b/requirements/typing.txt
index 1f6de2c..35ccf49 100644
--- a/requirements/typing.txt
+++ b/requirements/typing.txt
@@ -1,11 +1,11 @@
-# SHA1:95499f7e92b572adde012b13e1ec99dbbb2f7089
+# SHA1:162796b1b3ac7a29da65fe0e32278f14b68ed8c8
 #
 # This file is autogenerated by pip-compile-multi
 # To update, run:
 #
 #    pip-compile-multi
 #
-mypy==0.961
+mypy==0.991
     # via -r requirements/typing.in
 mypy-extensions==0.4.3
     # via mypy
@@ -15,7 +15,11 @@ types-contextvars==2.4.7
     # via -r requirements/typing.in
 types-dataclasses==0.6.6
     # via -r requirements/typing.in
-types-setuptools==62.6.1
+types-docutils==0.19.1.1
+    # via types-setuptools
+types-setuptools==65.6.0.3
     # via -r requirements/typing.in
-typing-extensions==4.3.0
+typing-extensions==4.4.0
     # via mypy
+watchdog==2.2.1
+    # via -r requirements/typing.in
diff --git a/setup.cfg b/setup.cfg
index 2a1c2e4..03fb5b6 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -59,33 +59,6 @@ source =
     src
     */site-packages
 
-[flake8]
-# B = bugbear
-# E = pycodestyle errors
-# F = flake8 pyflakes
-# W = pycodestyle warnings
-# B9 = bugbear opinions
-# ISC = implicit str concat
-select = B, E, F, W, B9, ISC
-ignore =
-    # slice notation whitespace, invalid
-    E203
-    # import at top, too many circular import fixes
-    E402
-    # line length, handled by bugbear B950
-    E501
-    # bare except, handled by bugbear B001
-    E722
-    # bin op line break, invalid
-    W503
-# up to 88 allowed by bugbear B950
-max-line-length = 80
-per-file-ignores =
-    # __init__ exports names
-    **/__init__.py: F401
-    # LocalProxy assigns lambdas
-    src/werkzeug/local.py: E731
-
 [mypy]
 files = src/werkzeug
 python_version = 3.7
diff --git a/src/werkzeug/__init__.py b/src/werkzeug/__init__.py
index fd7f8d2..c20ac29 100644
--- a/src/werkzeug/__init__.py
+++ b/src/werkzeug/__init__.py
@@ -3,4 +3,4 @@ from .test import Client as Client
 from .wrappers import Request as Request
 from .wrappers import Response as Response
 
-__version__ = "2.2.2"
+__version__ = "2.2.3"
diff --git a/src/werkzeug/_internal.py b/src/werkzeug/_internal.py
index 4636647..f95207a 100644
--- a/src/werkzeug/_internal.py
+++ b/src/werkzeug/_internal.py
@@ -34,7 +34,7 @@ _quote_re = re.compile(rb"[\\].")
 _legal_cookie_chars_re = rb"[\w\d!#%&\'~_`><@,:/\$\*\+\-\.\^\|\)\(\?\}\{\=]"
 _cookie_re = re.compile(
     rb"""
-    (?P<key>[^=;]+)
+    (?P<key>[^=;]*)
     (?:\s*=\s*
         (?P<val>
             "(?:[^\\"]|\\.)*" |
@@ -382,16 +382,21 @@ def _cookie_parse_impl(b: bytes) -> t.Iterator[t.Tuple[bytes, bytes]]:
     """Lowlevel cookie parsing facility that operates on bytes."""
     i = 0
     n = len(b)
+    b += b";"
 
     while i < n:
-        match = _cookie_re.search(b + b";", i)
+        match = _cookie_re.match(b, i)
+
         if not match:
             break
 
-        key = match.group("key").strip()
-        value = match.group("val") or b""
         i = match.end(0)
+        key = match.group("key").strip()
+
+        if not key:
+            continue
 
+        value = match.group("val") or b""
         yield key, _cookie_unquote(value)
 
 
diff --git a/src/werkzeug/_reloader.py b/src/werkzeug/_reloader.py
index 57f3117..8af296d 100644
--- a/src/werkzeug/_reloader.py
+++ b/src/werkzeug/_reloader.py
@@ -20,7 +20,7 @@ prefix = {*_ignore_always, sys.prefix, sys.exec_prefix}
 
 if hasattr(sys, "real_prefix"):
     # virtualenv < 20
-    prefix.add(sys.real_prefix)  # type: ignore[attr-defined]
+    prefix.add(sys.real_prefix)
 
 _stat_ignore_scan = tuple(prefix)
 del prefix
@@ -309,7 +309,7 @@ class WatchdogReloaderLoop(ReloaderLoop):
         super().__init__(*args, **kwargs)
         trigger_reload = self.trigger_reload
 
-        class EventHandler(PatternMatchingEventHandler):  # type: ignore
+        class EventHandler(PatternMatchingEventHandler):
             def on_any_event(self, event):  # type: ignore
                 trigger_reload(event.src_path)
 
diff --git a/src/werkzeug/datastructures.py b/src/werkzeug/datastructures.py
index 43ee8c7..a293dfd 100644
--- a/src/werkzeug/datastructures.py
+++ b/src/werkzeug/datastructures.py
@@ -1226,7 +1226,7 @@ class Headers:
                 (_unicodify_header_value(k), _unicodify_header_value(v))
                 for (k, v) in value
             ]
-            for (_, v) in value:
+            for _, v in value:
                 self._validate_value(v)
             if isinstance(key, int):
                 self._list[key] = value[0]
diff --git a/src/werkzeug/debug/__init__.py b/src/werkzeug/debug/__init__.py
index e0dcc65..24d19bb 100644
--- a/src/werkzeug/debug/__init__.py
+++ b/src/werkzeug/debug/__init__.py
@@ -329,7 +329,7 @@ class DebuggedApplication:
             app_iter = self.app(environ, start_response)
             yield from app_iter
             if hasattr(app_iter, "close"):
-                app_iter.close()  # type: ignore
+                app_iter.close()
         except Exception as e:
             if hasattr(app_iter, "close"):
                 app_iter.close()  # type: ignore
diff --git a/src/werkzeug/debug/repr.py b/src/werkzeug/debug/repr.py
index c0872f1..d9c28da 100644
--- a/src/werkzeug/debug/repr.py
+++ b/src/werkzeug/debug/repr.py
@@ -132,7 +132,7 @@ class DebugReprGenerator:
 
     def regex_repr(self, obj: t.Pattern) -> str:
         pattern = repr(obj.pattern)
-        pattern = codecs.decode(pattern, "unicode-escape", "ignore")  # type: ignore
+        pattern = codecs.decode(pattern, "unicode-escape", "ignore")
         pattern = f"r{pattern}"
         return f're.compile(<span class="string regex">{pattern}</span>)'
 
diff --git a/src/werkzeug/debug/tbtools.py b/src/werkzeug/debug/tbtools.py
index ea90de9..d56f739 100644
--- a/src/werkzeug/debug/tbtools.py
+++ b/src/werkzeug/debug/tbtools.py
@@ -184,7 +184,7 @@ def _process_traceback(
         }
 
         if hasattr(fs, "colno"):
-            frame_args["colno"] = fs.colno  # type: ignore[attr-defined]
+            frame_args["colno"] = fs.colno
             frame_args["end_colno"] = fs.end_colno  # type: ignore[attr-defined]
 
         new_stack.append(DebugFrameSummary(**frame_args))
diff --git a/src/werkzeug/exceptions.py b/src/werkzeug/exceptions.py
index 013df72..739bd90 100644
--- a/src/werkzeug/exceptions.py
+++ b/src/werkzeug/exceptions.py
@@ -205,7 +205,7 @@ class BadRequestKeyError(BadRequest, KeyError):
             KeyError.__init__(self, arg)
 
     @property  # type: ignore
-    def description(self) -> str:  # type: ignore
+    def description(self) -> str:
         if self.show_exception:
             return (
                 f"{self._description}\n"
diff --git a/src/werkzeug/formparser.py b/src/werkzeug/formparser.py
index 10d58ca..bebb2fc 100644
--- a/src/werkzeug/formparser.py
+++ b/src/werkzeug/formparser.py
@@ -179,6 +179,8 @@ class FormDataParser:
     :param cls: an optional dict class to use.  If this is not specified
                        or `None` the default :class:`MultiDict` is used.
     :param silent: If set to False parsing errors will not be caught.
+    :param max_form_parts: The maximum number of parts to be parsed. If this is
+        exceeded, a :exc:`~exceptions.RequestEntityTooLarge` exception is raised.
     """
 
     def __init__(
@@ -190,6 +192,8 @@ class FormDataParser:
         max_content_length: t.Optional[int] = None,
         cls: t.Optional[t.Type[MultiDict]] = None,
         silent: bool = True,
+        *,
+        max_form_parts: t.Optional[int] = None,
     ) -> None:
         if stream_factory is None:
             stream_factory = default_stream_factory
@@ -199,6 +203,7 @@ class FormDataParser:
         self.errors = errors
         self.max_form_memory_size = max_form_memory_size
         self.max_content_length = max_content_length
+        self.max_form_parts = max_form_parts
 
         if cls is None:
             cls = MultiDict
@@ -281,6 +286,7 @@ class FormDataParser:
             self.errors,
             max_form_memory_size=self.max_form_memory_size,
             cls=self.cls,
+            max_form_parts=self.max_form_parts,
         )
         boundary = options.get("boundary", "").encode("ascii")
 
@@ -346,10 +352,12 @@ class MultiPartParser:
         max_form_memory_size: t.Optional[int] = None,
         cls: t.Optional[t.Type[MultiDict]] = None,
         buffer_size: int = 64 * 1024,
+        max_form_parts: t.Optional[int] = None,
     ) -> None:
         self.charset = charset
         self.errors = errors
         self.max_form_memory_size = max_form_memory_size
+        self.max_form_parts = max_form_parts
 
         if stream_factory is None:
             stream_factory = default_stream_factory
@@ -409,7 +417,9 @@ class MultiPartParser:
             [None],
         )
 
-        parser = MultipartDecoder(boundary, self.max_form_memory_size)
+        parser = MultipartDecoder(
+            boundary, self.max_form_memory_size, max_parts=self.max_form_parts
+        )
 
         fields = []
         files = []
diff --git a/src/werkzeug/http.py b/src/werkzeug/http.py
index 9777685..0a7bc73 100644
--- a/src/werkzeug/http.py
+++ b/src/werkzeug/http.py
@@ -190,6 +190,15 @@ class COOP(Enum):
     SAME_ORIGIN = "same-origin"
 
 
+def _is_extended_parameter(key: str) -> bool:
+    """Per RFC 5987/8187, "extended" values may *not* be quoted.
+    This is in keeping with browser implementations. So we test
+    using this function to see if the key indicates this parameter
+    follows the `ext-parameter` syntax (using a trailing '*').
+    """
+    return key.strip().endswith("*")
+
+
 def quote_header_value(
     value: t.Union[str, int], extra_chars: str = "", allow_token: bool = True
 ) -> str:
@@ -254,6 +263,8 @@ def dump_options_header(
     for key, value in options.items():
         if value is None:
             segments.append(key)
+        elif _is_extended_parameter(key):
+            segments.append(f"{key}={value}")
         else:
             segments.append(f"{key}={quote_header_value(value)}")
     return "; ".join(segments)
@@ -282,6 +293,8 @@ def dump_header(
         for key, value in iterable.items():
             if value is None:
                 items.append(key)
+            elif _is_extended_parameter(key):
+                items.append(f"{key}={value}")
             else:
                 items.append(
                     f"{key}={quote_header_value(value, allow_token=allow_token)}"
@@ -818,6 +831,9 @@ def parse_content_range_header(
             return None
 
     if rng == "*":
+        if not is_byte_range_valid(None, None, length):
+            return None
+
         return ds.ContentRange(units, None, None, length, on_update=on_update)
     elif "-" not in rng:
         return None
diff --git a/src/werkzeug/local.py b/src/werkzeug/local.py
index 70e9bf7..9927a0a 100644
--- a/src/werkzeug/local.py
+++ b/src/werkzeug/local.py
@@ -291,7 +291,7 @@ class _ProxyLookup:
             # A C function, use partial to bind the first argument.
 
             def bind_f(instance: "LocalProxy", obj: t.Any) -> t.Callable:
-                return partial(f, obj)  # type: ignore
+                return partial(f, obj)
 
         else:
             # Use getattr, which will produce a bound method.
@@ -313,7 +313,7 @@ class _ProxyLookup:
             return self
 
         try:
-            obj = instance._get_current_object()  # type: ignore[misc]
+            obj = instance._get_current_object()
         except RuntimeError:
             if self.fallback is None:
                 raise
diff --git a/src/werkzeug/middleware/lint.py b/src/werkzeug/middleware/lint.py
index 6b54630..fcf3b41 100644
--- a/src/werkzeug/middleware/lint.py
+++ b/src/werkzeug/middleware/lint.py
@@ -164,7 +164,7 @@ class GuardedIterator:
         self.closed = True
 
         if hasattr(self._iterator, "close"):
-            self._iterator.close()  # type: ignore
+            self._iterator.close()
 
         if self.headers_set:
             status_code, headers = self.headers_set
diff --git a/src/werkzeug/middleware/profiler.py b/src/werkzeug/middleware/profiler.py
index 200dae0..f91e33b 100644
--- a/src/werkzeug/middleware/profiler.py
+++ b/src/werkzeug/middleware/profiler.py
@@ -106,7 +106,7 @@ class ProfilerMiddleware:
             response_body.extend(app_iter)
 
             if hasattr(app_iter, "close"):
-                app_iter.close()  # type: ignore
+                app_iter.close()
 
         profile = Profile()
         start = time.time()
diff --git a/src/werkzeug/routing/matcher.py b/src/werkzeug/routing/matcher.py
index d22b05a..05370c3 100644
--- a/src/werkzeug/routing/matcher.py
+++ b/src/werkzeug/routing/matcher.py
@@ -127,7 +127,14 @@ class StateMachineMatcher:
                     remaining = []
                 match = re.compile(test_part.content).match(target)
                 if match is not None:
-                    rv = _match(new_state, remaining, values + list(match.groups()))
+                    groups = list(match.groups())
+                    if test_part.suffixed:
+                        # If a part_isolating=False part has a slash suffix, remove the
+                        # suffix from the match and check for the slash redirect next.
+                        suffix = groups.pop()
+                        if suffix == "/":
+                            remaining = [""]
+                    rv = _match(new_state, remaining, values + groups)
                     if rv is not None:
                         return rv
 
diff --git a/src/werkzeug/routing/rules.py b/src/werkzeug/routing/rules.py
index a61717a..7b37890 100644
--- a/src/werkzeug/routing/rules.py
+++ b/src/werkzeug/routing/rules.py
@@ -36,6 +36,7 @@ class RulePart:
     content: str
     final: bool
     static: bool
+    suffixed: bool
     weight: Weighting
 
 
@@ -631,7 +632,11 @@ class Rule(RuleFactory):
                         argument_weights,
                     )
                     yield RulePart(
-                        content=content, final=final, static=static, weight=weight
+                        content=content,
+                        final=final,
+                        static=static,
+                        suffixed=False,
+                        weight=weight,
                     )
                     content = ""
                     static = True
@@ -641,6 +646,12 @@ class Rule(RuleFactory):
 
             pos = match.end()
 
+        suffixed = False
+        if final and content[-1] == "/":
+            # If a converter is part_isolating=False (matches slashes) and ends with a
+            # slash, augment the regex to support slash redirects.
+            suffixed = True
+            content = content[:-1] + "(?<!/)(/?)"
         if not static:
             content += r"\Z"
         weight = Weighting(
@@ -649,7 +660,17 @@ class Rule(RuleFactory):
             -len(argument_weights),
             argument_weights,
         )
-        yield RulePart(content=content, final=final, static=static, weight=weight)
+        yield RulePart(
+            content=content,
+            final=final,
+            static=static,
+            suffixed=suffixed,
+            weight=weight,
+        )
+        if suffixed:
+            yield RulePart(
+                content="", final=False, static=True, suffixed=False, weight=weight
+            )
 
     def compile(self) -> None:
         """Compiles the regular expression and stores it."""
@@ -665,7 +686,11 @@ class Rule(RuleFactory):
         if domain_rule == "":
             self._parts = [
                 RulePart(
-                    content="", final=False, static=True, weight=Weighting(0, [], 0, [])
+                    content="",
+                    final=False,
+                    static=True,
+                    suffixed=False,
+                    weight=Weighting(0, [], 0, []),
                 )
             ]
         else:
diff --git a/src/werkzeug/sansio/http.py b/src/werkzeug/sansio/http.py
index 8288882..6b22738 100644
--- a/src/werkzeug/sansio/http.py
+++ b/src/werkzeug/sansio/http.py
@@ -126,10 +126,6 @@ def parse_cookie(
     def _parse_pairs() -> t.Iterator[t.Tuple[str, str]]:
         for key, val in _cookie_parse_impl(cookie):  # type: ignore
             key_str = _to_str(key, charset, errors, allow_none_charset=True)
-
-            if not key_str:
-                continue
-
             val_str = _to_str(val, charset, errors, allow_none_charset=True)
             yield key_str, val_str
 
diff --git a/src/werkzeug/sansio/multipart.py b/src/werkzeug/sansio/multipart.py
index d8abeb3..2684e5d 100644
--- a/src/werkzeug/sansio/multipart.py
+++ b/src/werkzeug/sansio/multipart.py
@@ -87,10 +87,13 @@ class MultipartDecoder:
         self,
         boundary: bytes,
         max_form_memory_size: Optional[int] = None,
+        *,
+        max_parts: Optional[int] = None,
     ) -> None:
         self.buffer = bytearray()
         self.complete = False
         self.max_form_memory_size = max_form_memory_size
+        self.max_parts = max_parts
         self.state = State.PREAMBLE
         self.boundary = boundary
 
@@ -118,6 +121,7 @@ class MultipartDecoder:
             re.MULTILINE,
         )
         self._search_position = 0
+        self._parts_decoded = 0
 
     def last_newline(self) -> int:
         try:
@@ -191,6 +195,10 @@ class MultipartDecoder:
                     )
                 self.state = State.DATA
                 self._search_position = 0
+                self._parts_decoded += 1
+
+                if self.max_parts is not None and self._parts_decoded > self.max_parts:
+                    raise RequestEntityTooLarge()
             else:
                 # Update the search start position to be equal to the
                 # current buffer length (already searched) minus a
diff --git a/src/werkzeug/security.py b/src/werkzeug/security.py
index 18d0919..4599fb3 100644
--- a/src/werkzeug/security.py
+++ b/src/werkzeug/security.py
@@ -12,7 +12,7 @@ SALT_CHARS = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789"
 DEFAULT_PBKDF2_ITERATIONS = 260000
 
 _os_alt_seps: t.List[str] = list(
-    sep for sep in [os.path.sep, os.path.altsep] if sep is not None and sep != "/"
+    sep for sep in [os.sep, os.path.altsep] if sep is not None and sep != "/"
 )
 
 
diff --git a/src/werkzeug/serving.py b/src/werkzeug/serving.py
index c482469..2a2e74d 100644
--- a/src/werkzeug/serving.py
+++ b/src/werkzeug/serving.py
@@ -221,9 +221,7 @@ class WSGIRequestHandler(BaseHTTPRequestHandler):
         try:
             # binary_form=False gives nicer information, but wouldn't be compatible with
             # what Nginx or Apache could return.
-            peer_cert = self.connection.getpeercert(  # type: ignore[attr-defined]
-                binary_form=True
-            )
+            peer_cert = self.connection.getpeercert(binary_form=True)
             if peer_cert is not None:
                 # Nginx and Apache use PEM format.
                 environ["SSL_CLIENT_CERT"] = ssl.DER_cert_to_PEM_cert(peer_cert)
@@ -329,7 +327,7 @@ class WSGIRequestHandler(BaseHTTPRequestHandler):
                     self.wfile.write(b"0\r\n\r\n")
             finally:
                 if hasattr(application_iter, "close"):
-                    application_iter.close()  # type: ignore
+                    application_iter.close()
 
         try:
             execute(self.server.app)
@@ -659,6 +657,7 @@ class BaseWSGIServer(HTTPServer):
     multithread = False
     multiprocess = False
     request_queue_size = LISTEN_QUEUE
+    allow_reuse_address = True
 
     def __init__(
         self,
@@ -710,10 +709,36 @@ class BaseWSGIServer(HTTPServer):
             try:
                 self.server_bind()
                 self.server_activate()
+            except OSError as e:
+                # Catch connection issues and show them without the traceback. Show
+                # extra instructions for address not found, and for macOS.
+                self.server_close()
+                print(e.strerror, file=sys.stderr)
+
+                if e.errno == errno.EADDRINUSE:
+                    print(
+                        f"Port {port} is in use by another program. Either identify and"
+                        " stop that program, or start the server with a different"
+                        " port.",
+                        file=sys.stderr,
+                    )
+
+                    if sys.platform == "darwin" and port == 5000:
+                        print(
+                            "On macOS, try disabling the 'AirPlay Receiver' service"
+                            " from System Preferences -> Sharing.",
+                            file=sys.stderr,
+                        )
+
+                sys.exit(1)
             except BaseException:
                 self.server_close()
                 raise
         else:
+            # TCPServer automatically opens a socket even if bind_and_activate is False.
+            # Close it to silence a ResourceWarning.
+            self.server_close()
+
             # Use the passed in socket directly.
             self.socket = socket.fromfd(fd, address_family, socket.SOCK_STREAM)
             self.server_address = self.socket.getsockname()
@@ -879,60 +904,6 @@ def is_running_from_reloader() -> bool:
     return os.environ.get("WERKZEUG_RUN_MAIN") == "true"
 
 
-def prepare_socket(hostname: str, port: int) -> socket.socket:
-    """Prepare a socket for use by the WSGI server and reloader.
-
-    The socket is marked inheritable so that it can be kept across
-    reloads instead of breaking connections.
-
-    Catch errors during bind and show simpler error messages. For
-    "address already in use", show instructions for resolving the issue,
-    with special instructions for macOS.
-
-    This is called from :func:`run_simple`, but can be used separately
-    to control server creation with :func:`make_server`.
-    """
-    address_family = select_address_family(hostname, port)
-    server_address = get_sockaddr(hostname, port, address_family)
-    s = socket.socket(address_family, socket.SOCK_STREAM)
-    s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
-    s.set_inheritable(True)
-
-    # Remove the socket file if it already exists.
-    if address_family == af_unix:
-        server_address = t.cast(str, server_address)
-
-        if os.path.exists(server_address):
-            os.unlink(server_address)
-
-    # Catch connection issues and show them without the traceback. Show
-    # extra instructions for address not found, and for macOS.
-    try:
-        s.bind(server_address)
-    except OSError as e:
-        print(e.strerror, file=sys.stderr)
-
-        if e.errno == errno.EADDRINUSE:
-            print(
-                f"Port {port} is in use by another program. Either"
-                " identify and stop that program, or start the"
-                " server with a different port.",
-                file=sys.stderr,
-            )
-
-            if sys.platform == "darwin" and port == 5000:
-                print(
-                    "On macOS, try disabling the 'AirPlay Receiver'"
-                    " service from System Preferences -> Sharing.",
-                    file=sys.stderr,
-                )
-
-        sys.exit(1)
-
-    s.listen(LISTEN_QUEUE)
-    return s
-
-
 def run_simple(
     hostname: str,
     port: int,
@@ -1059,12 +1030,7 @@ def run_simple(
         application = DebuggedApplication(application, evalex=use_evalex)
 
     if not is_running_from_reloader():
-        s = prepare_socket(hostname, port)
-        fd = s.fileno()
-        # Silence a ResourceWarning about an unclosed socket. This object is no longer
-        # used, the server will create another with fromfd.
-        s.detach()
-        os.environ["WERKZEUG_SERVER_FD"] = str(fd)
+        fd = None
     else:
         fd = int(os.environ["WERKZEUG_SERVER_FD"])
 
@@ -1079,6 +1045,8 @@ def run_simple(
         ssl_context,
         fd=fd,
     )
+    srv.socket.set_inheritable(True)
+    os.environ["WERKZEUG_SERVER_FD"] = str(srv.fileno())
 
     if not is_running_from_reloader():
         srv.log_startup()
@@ -1087,12 +1055,15 @@ def run_simple(
     if use_reloader:
         from ._reloader import run_with_reloader
 
-        run_with_reloader(
-            srv.serve_forever,
-            extra_files=extra_files,
-            exclude_patterns=exclude_patterns,
-            interval=reloader_interval,
-            reloader_type=reloader_type,
-        )
+        try:
+            run_with_reloader(
+                srv.serve_forever,
+                extra_files=extra_files,
+                exclude_patterns=exclude_patterns,
+                interval=reloader_interval,
+                reloader_type=reloader_type,
+            )
+        finally:
+            srv.server_close()
     else:
         srv.serve_forever()
diff --git a/src/werkzeug/test.py b/src/werkzeug/test.py
index edb4d4a..996f438 100644
--- a/src/werkzeug/test.py
+++ b/src/werkzeug/test.py
@@ -107,7 +107,8 @@ def stream_encode_multipart(
                     and mimetypes.guess_type(filename)[0]
                     or "application/octet-stream"
                 )
-            headers = Headers([("Content-Type", content_type)])
+            headers = value.headers
+            headers.update([("Content-Type", content_type)])
             if filename is None:
                 write_binary(encoder.send_event(Field(name=key, headers=headers)))
             else:
@@ -441,7 +442,7 @@ class EnvironBuilder:
             if input_stream is not None:
                 raise TypeError("can't provide input stream and data")
             if hasattr(data, "read"):
-                data = data.read()  # type: ignore
+                data = data.read()
             if isinstance(data, str):
                 data = data.encode(self.charset)
             if isinstance(data, bytes):
@@ -449,7 +450,7 @@ class EnvironBuilder:
                 if self.content_length is None:
                     self.content_length = len(data)
             else:
-                for key, value in _iter_data(data):  # type: ignore
+                for key, value in _iter_data(data):
                     if isinstance(value, (tuple, dict)) or hasattr(value, "read"):
                         self._add_file_from_data(key, value)
                     else:
diff --git a/src/werkzeug/utils.py b/src/werkzeug/utils.py
index 672e6e5..4ef5837 100644
--- a/src/werkzeug/utils.py
+++ b/src/werkzeug/utils.py
@@ -221,7 +221,7 @@ def secure_filename(filename: str) -> str:
     filename = unicodedata.normalize("NFKD", filename)
     filename = filename.encode("ascii", "ignore").decode("ascii")
 
-    for sep in os.path.sep, os.path.altsep:
+    for sep in os.sep, os.path.altsep:
         if sep:
             filename = filename.replace(sep, " ")
     filename = str(_filename_ascii_strip_re.sub("", "_".join(filename.split()))).strip(
@@ -352,7 +352,7 @@ def send_file(
 
     Never pass file paths provided by a user. The path is assumed to be
     trusted, so a user could craft a path to access a file you didn't
-    intend.
+    intend. Use :func:`send_from_directory` to safely serve user-provided paths.
 
     If the WSGI server sets a ``file_wrapper`` in ``environ``, it is
     used, otherwise Werkzeug's built-in wrapper is used. Alternatively,
@@ -562,9 +562,10 @@ def send_from_directory(
     If the final path does not point to an existing regular file,
     returns a 404 :exc:`~werkzeug.exceptions.NotFound` error.
 
-    :param directory: The directory that ``path`` must be located under.
-    :param path: The path to the file to send, relative to
-        ``directory``.
+    :param directory: The directory that ``path`` must be located under. This *must not*
+        be a value provided by the client, otherwise it becomes insecure.
+    :param path: The path to the file to send, relative to ``directory``. This is the
+        part of the path provided by the client, which is checked for security.
     :param environ: The WSGI environ for the current request.
     :param kwargs: Arguments to pass to :func:`send_file`.
 
diff --git a/src/werkzeug/wrappers/request.py b/src/werkzeug/wrappers/request.py
index 57b739c..2de77df 100644
--- a/src/werkzeug/wrappers/request.py
+++ b/src/werkzeug/wrappers/request.py
@@ -83,6 +83,13 @@ class Request(_SansIORequest):
     #: .. versionadded:: 0.5
     max_form_memory_size: t.Optional[int] = None
 
+    #: The maximum number of multipart parts to parse, passed to
+    #: :attr:`form_data_parser_class`. Parsing form data with more than this
+    #: many parts will raise :exc:`~.RequestEntityTooLarge`.
+    #:
+    #: .. versionadded:: 2.2.3
+    max_form_parts = 1000
+
     #: The form data parser that should be used.  Can be replaced to customize
     #: the form date parsing.
     form_data_parser_class: t.Type[FormDataParser] = FormDataParser
@@ -246,6 +253,7 @@ class Request(_SansIORequest):
             self.max_form_memory_size,
             self.max_content_length,
             self.parameter_storage_class,
+            max_form_parts=self.max_form_parts,
         )
 
     def _load_form_data(self) -> None:
@@ -543,6 +551,18 @@ class Request(_SansIORequest):
     # with sentinel values.
     _cached_json: t.Tuple[t.Any, t.Any] = (Ellipsis, Ellipsis)
 
+    @t.overload
+    def get_json(
+        self, force: bool = ..., silent: "te.Literal[False]" = ..., cache: bool = ...
+    ) -> t.Any:
+        ...
+
+    @t.overload
+    def get_json(
+        self, force: bool = ..., silent: bool = ..., cache: bool = ...
+    ) -> t.Optional[t.Any]:
+        ...
+
     def get_json(
         self, force: bool = False, silent: bool = False, cache: bool = True
     ) -> t.Optional[t.Any]:
diff --git a/src/werkzeug/wrappers/response.py b/src/werkzeug/wrappers/response.py
index 7e888cb..454208c 100644
--- a/src/werkzeug/wrappers/response.py
+++ b/src/werkzeug/wrappers/response.py
@@ -439,7 +439,7 @@ class Response(_SansIOResponse):
            Can now be used in a with statement.
         """
         if hasattr(self.response, "close"):
-            self.response.close()  # type: ignore
+            self.response.close()
         for func in self._on_close:
             func()
 
@@ -645,6 +645,14 @@ class Response(_SansIOResponse):
         """
         return self.get_json()
 
+    @t.overload
+    def get_json(self, force: bool = ..., silent: "te.Literal[False]" = ...) -> t.Any:
+        ...
+
+    @t.overload
+    def get_json(self, force: bool = ..., silent: bool = ...) -> t.Optional[t.Any]:
+        ...
+
     def get_json(self, force: bool = False, silent: bool = False) -> t.Optional[t.Any]:
         """Parse :attr:`data` as JSON. Useful during testing.
 
diff --git a/src/werkzeug/wsgi.py b/src/werkzeug/wsgi.py
index 24ece0b..d74430d 100644
--- a/src/werkzeug/wsgi.py
+++ b/src/werkzeug/wsgi.py
@@ -611,9 +611,7 @@ class _RangeWrapper:
             self.end_byte = start_byte + byte_range
 
         self.read_length = 0
-        self.seekable = (
-            hasattr(iterable, "seekable") and iterable.seekable()  # type: ignore
-        )
+        self.seekable = hasattr(iterable, "seekable") and iterable.seekable()
         self.end_reached = False
 
     def __iter__(self) -> "_RangeWrapper":
@@ -665,7 +663,7 @@ class _RangeWrapper:
 
     def close(self) -> None:
         if hasattr(self.iterable, "close"):
-            self.iterable.close()  # type: ignore
+            self.iterable.close()
 
 
 def _make_chunk_iter(
@@ -930,37 +928,77 @@ class LimitedStream(io.IOBase):
 
         raise ClientDisconnected()
 
-    def exhaust(self, chunk_size: int = 1024 * 64) -> None:
-        """Exhaust the stream.  This consumes all the data left until the
-        limit is reached.
+    def _exhaust_chunks(self, chunk_size: int = 1024 * 64) -> t.Iterator[bytes]:
+        """Exhaust the stream by reading until the limit is reached or the client
+        disconnects, yielding each chunk.
+
+        :param chunk_size: How many bytes to read at a time.
 
-        :param chunk_size: the size for a chunk.  It will read the chunk
-                           until the stream is exhausted and throw away
-                           the results.
+        :meta private:
+
+        .. versionadded:: 2.2.3
         """
         to_read = self.limit - self._pos
-        chunk = chunk_size
+
         while to_read > 0:
-            chunk = min(to_read, chunk)
-            self.read(chunk)
-            to_read -= chunk
+            chunk = self.read(min(to_read, chunk_size))
+            yield chunk
+            to_read -= len(chunk)
+
+    def exhaust(self, chunk_size: int = 1024 * 64) -> None:
+        """Exhaust the stream by reading until the limit is reached or the client
+        disconnects, discarding the data.
+
+        :param chunk_size: How many bytes to read at a time.
+
+        .. versionchanged:: 2.2.3
+            Handle case where wrapped stream returns fewer bytes than requested.
+        """
+        for _ in self._exhaust_chunks(chunk_size):
+            pass
 
     def read(self, size: t.Optional[int] = None) -> bytes:
-        """Read `size` bytes or if size is not provided everything is read.
+        """Read up to ``size`` bytes from the underlying stream. If size is not
+        provided, read until the limit.
 
-        :param size: the number of bytes read.
+        If the limit is reached, :meth:`on_exhausted` is called, which returns empty
+        bytes.
+
+        If no bytes are read and the limit is not reached, or if an error occurs during
+        the read, :meth:`on_disconnect` is called, which raises
+        :exc:`.ClientDisconnected`.
+
+        :param size: The number of bytes to read. ``None``, default, reads until the
+            limit is reached.
+
+        .. versionchanged:: 2.2.3
+            Handle case where wrapped stream returns fewer bytes than requested.
         """
         if self._pos >= self.limit:
             return self.on_exhausted()
-        if size is None or size == -1:  # -1 is for consistence with file
-            size = self.limit
+
+        if size is None or size == -1:  # -1 is for consistency with file
+            # Keep reading from the wrapped stream until the limit is reached. Can't
+            # rely on stream.read(size) because it's not guaranteed to return size.
+            buf = bytearray()
+
+            for chunk in self._exhaust_chunks():
+                buf.extend(chunk)
+
+            return bytes(buf)
+
         to_read = min(self.limit - self._pos, size)
+
         try:
             read = self._read(to_read)
         except (OSError, ValueError):
             return self.on_disconnect()
-        if to_read and len(read) != to_read:
+
+        if to_read and not len(read):
+            # If no data was read, treat it as a disconnect. As long as some data was
+            # read, a subsequent call can still return more before reaching the limit.
             return self.on_disconnect()
+
         self._pos += len(read)
         return read
 
diff --git a/tests/conftest.py b/tests/conftest.py
index 7ce0896..b73202c 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -41,7 +41,9 @@ class DevServerClient:
         self.log = None
 
     def tail_log(self, path):
-        self.log = open(path)
+        # surrogateescape allows for handling of file streams
+        # containing junk binary values as normal text streams
+        self.log = open(path, errors="surrogateescape")
         self.log.read()
 
     def connect(self, **kwargs):
diff --git a/tests/live_apps/data_app.py b/tests/live_apps/data_app.py
index a7158c7..561390a 100644
--- a/tests/live_apps/data_app.py
+++ b/tests/live_apps/data_app.py
@@ -5,12 +5,12 @@ from werkzeug.wrappers import Response
 
 
 @Request.application
-def app(request):
+def app(request: Request) -> Response:
     return Response(
         json.dumps(
             {
                 "environ": request.environ,
-                "form": request.form,
+                "form": request.form.to_dict(),
                 "files": {k: v.read().decode("utf8") for k, v in request.files.items()},
             },
             default=lambda x: str(x),
diff --git a/tests/test_formparser.py b/tests/test_formparser.py
index 49010b4..4c518b1 100644
--- a/tests/test_formparser.py
+++ b/tests/test_formparser.py
@@ -127,6 +127,15 @@ class TestFormParser:
         req.max_form_memory_size = 400
         assert req.form["foo"] == "Hello World"
 
+        req = Request.from_values(
+            input_stream=io.BytesIO(data),
+            content_length=len(data),
+            content_type="multipart/form-data; boundary=foo",
+            method="POST",
+        )
+        req.max_form_parts = 1
+        pytest.raises(RequestEntityTooLarge, lambda: req.form["foo"])
+
     def test_missing_multipart_boundary(self):
         data = (
             b"--foo\r\nContent-Disposition: form-field; name=foo\r\n\r\n"
diff --git a/tests/test_http.py b/tests/test_http.py
index 3760dc1..61940a1 100644
--- a/tests/test_http.py
+++ b/tests/test_http.py
@@ -354,6 +354,7 @@ class TestHTTPUtility:
         assert http.dump_header([1, 2, 3], allow_token=False) == '"1", "2", "3"'
         assert http.dump_header({"foo": "bar"}, allow_token=False) == 'foo="bar"'
         assert http.dump_header({"foo": "bar"}) == "foo=bar"
+        assert http.dump_header({"foo*": "UTF-8''bar"}) == "foo*=UTF-8''bar"
 
     def test_is_resource_modified(self):
         env = create_environ()
@@ -411,7 +412,8 @@ class TestHTTPUtility:
     def test_parse_cookie(self):
         cookies = http.parse_cookie(
             "dismiss-top=6; CP=null*; PHPSESSID=0a539d42abc001cdc762809248d4beed;"
-            'a=42; b="\\";"; ; fo234{=bar;blub=Blah; "__Secure-c"=d'
+            'a=42; b="\\";"; ; fo234{=bar;blub=Blah; "__Secure-c"=d;'
+            "==__Host-eq=bad;__Host-eq=good;"
         )
         assert cookies.to_dict() == {
             "CP": "null*",
@@ -422,6 +424,7 @@ class TestHTTPUtility:
             "fo234{": "bar",
             "blub": "Blah",
             '"__Secure-c"': "d",
+            "__Host-eq": "good",
         }
 
     def test_dump_cookie(self):
@@ -619,6 +622,9 @@ class TestRange:
         rv = http.parse_content_range_header("bytes 0-98/*asdfsa")
         assert rv is None
 
+        rv = http.parse_content_range_header("bytes */-1")
+        assert rv is None
+
         rv = http.parse_content_range_header("bytes 0-99/100")
         assert rv.to_header() == "bytes 0-99/100"
         rv.start = None
diff --git a/tests/test_routing.py b/tests/test_routing.py
index 15d25a7..bfb191c 100644
--- a/tests/test_routing.py
+++ b/tests/test_routing.py
@@ -163,6 +163,7 @@ def test_strict_slashes_redirect():
             r.Rule("/bar/", endpoint="get", methods=["GET"]),
             r.Rule("/bar", endpoint="post", methods=["POST"]),
             r.Rule("/foo/", endpoint="foo", methods=["POST"]),
+            r.Rule("/<path:var>/", endpoint="path", methods=["GET"]),
         ]
     )
     adapter = map.bind("example.org", "/")
@@ -170,6 +171,7 @@ def test_strict_slashes_redirect():
     # Check if the actual routes works
     assert adapter.match("/bar/", method="GET") == ("get", {})
     assert adapter.match("/bar", method="POST") == ("post", {})
+    assert adapter.match("/abc/", method="GET") == ("path", {"var": "abc"})
 
     # Check if exceptions are correct
     pytest.raises(r.RequestRedirect, adapter.match, "/bar", method="GET")
@@ -177,6 +179,9 @@ def test_strict_slashes_redirect():
     with pytest.raises(r.RequestRedirect) as error_info:
         adapter.match("/foo", method="POST")
     assert error_info.value.code == 308
+    with pytest.raises(r.RequestRedirect) as error_info:
+        adapter.match("/abc", method="GET")
+    assert error_info.value.new_url == "http://example.org/abc/"
 
     # Check differently defined order
     map = r.Map(
@@ -1434,6 +1439,9 @@ def test_strict_slashes_false():
         [
             r.Rule("/path1", endpoint="leaf_path", strict_slashes=False),
             r.Rule("/path2/", endpoint="branch_path", strict_slashes=False),
+            r.Rule(
+                "/<path:path>", endpoint="leaf_path_converter", strict_slashes=False
+            ),
         ],
     )
 
@@ -1443,6 +1451,14 @@ def test_strict_slashes_false():
     assert adapter.match("/path1/", method="GET") == ("leaf_path", {})
     assert adapter.match("/path2", method="GET") == ("branch_path", {})
     assert adapter.match("/path2/", method="GET") == ("branch_path", {})
+    assert adapter.match("/any", method="GET") == (
+        "leaf_path_converter",
+        {"path": "any"},
+    )
+    assert adapter.match("/any/", method="GET") == (
+        "leaf_path_converter",
+        {"path": "any/"},
+    )
 
 
 def test_invalid_rule():
diff --git a/tests/test_send_file.py b/tests/test_send_file.py
index fc4299a..6732c84 100644
--- a/tests/test_send_file.py
+++ b/tests/test_send_file.py
@@ -107,6 +107,9 @@ def test_object_attachment_requires_name():
         ("Vögel.txt", "Vogel.txt", "V%C3%B6gel.txt"),
         # ":/" are not safe in filename* value
         ("те:/ст", '":/"', "%D1%82%D0%B5%3A%2F%D1%81%D1%82"),
+        # general test of extended parameter (non-quoted)
+        ("(тест.txt", '"(.txt"', "(%D1%82%D0%B5%D1%81%D1%82.txt"),
+        ("(test.txt", '"(test.txt"', None),
     ),
 )
 def test_non_ascii_name(name, ascii, utf8):
diff --git a/tests/test_serving.py b/tests/test_serving.py
index 0494828..b6dc254 100644
--- a/tests/test_serving.py
+++ b/tests/test_serving.py
@@ -125,6 +125,7 @@ def test_windows_get_args_for_reloading(monkeypatch, tmp_path):
     assert rv == argv
 
 
+@pytest.mark.filterwarnings("ignore::pytest.PytestUnraisableExceptionWarning")
 @pytest.mark.parametrize("find", [_find_stat_paths, _find_watchdog_paths])
 def test_exclude_patterns(find):
     # Imported paths under sys.prefix will be included by default.
@@ -254,6 +255,7 @@ def test_multiline_header_folding(standard_app):
 
 
 @pytest.mark.parametrize("endpoint", ["", "crash"])
+@pytest.mark.filterwarnings("ignore::pytest.PytestUnraisableExceptionWarning")
 @pytest.mark.dev_server
 def test_streaming_close_response(dev_server, endpoint):
     """When using HTTP/1.0, chunked encoding is not supported. Fall
@@ -265,6 +267,7 @@ def test_streaming_close_response(dev_server, endpoint):
     assert r.data == "".join(str(x) + "\n" for x in range(5)).encode()
 
 
+@pytest.mark.filterwarnings("ignore::pytest.PytestUnraisableExceptionWarning")
 @pytest.mark.dev_server
 def test_streaming_chunked_response(dev_server):
     """When using HTTP/1.1, use Transfer-Encoding: chunked for streamed
diff --git a/tests/test_wsgi.py b/tests/test_wsgi.py
index b0f71bc..cdc151d 100644
--- a/tests/test_wsgi.py
+++ b/tests/test_wsgi.py
@@ -1,6 +1,9 @@
+from __future__ import annotations
+
 import io
 import json
 import os
+import typing as t
 
 import pytest
 
@@ -165,21 +168,63 @@ def test_limited_stream_json_load():
 
 
 def test_limited_stream_disconnection():
-    io_ = io.BytesIO(b"A bit of content")
-
-    # disconnect detection on out of bytes
-    stream = wsgi.LimitedStream(io_, 255)
+    # disconnect because stream returns zero bytes
+    stream = wsgi.LimitedStream(io.BytesIO(), 255)
     with pytest.raises(ClientDisconnected):
         stream.read()
 
-    # disconnect detection because file close
-    io_ = io.BytesIO(b"x" * 255)
-    io_.close()
-    stream = wsgi.LimitedStream(io_, 255)
+    # disconnect because stream is closed
+    data = io.BytesIO(b"x" * 255)
+    data.close()
+    stream = wsgi.LimitedStream(data, 255)
+
     with pytest.raises(ClientDisconnected):
         stream.read()
 
 
+def test_limited_stream_read_with_raw_io():
+    class OneByteStream(t.BinaryIO):
+        def __init__(self, buf: bytes) -> None:
+            self.buf = buf
+            self.pos = 0
+
+        def read(self, size: int | None = None) -> bytes:
+            """Return one byte at a time regardless of requested size."""
+
+            if size is None or size == -1:
+                raise ValueError("expected read to be called with specific limit")
+
+            if size == 0 or len(self.buf) < self.pos:
+                return b""
+
+            b = self.buf[self.pos : self.pos + 1]
+            self.pos += 1
+            return b
+
+    stream = wsgi.LimitedStream(OneByteStream(b"foo"), 4)
+    assert stream.read(5) == b"f"
+    assert stream.read(5) == b"o"
+    assert stream.read(5) == b"o"
+
+    # The stream has fewer bytes (3) than the limit (4), therefore the read returns 0
+    # bytes before the limit is reached.
+    with pytest.raises(ClientDisconnected):
+        stream.read(5)
+
+    stream = wsgi.LimitedStream(OneByteStream(b"foo123"), 3)
+    assert stream.read(5) == b"f"
+    assert stream.read(5) == b"o"
+    assert stream.read(5) == b"o"
+    # The limit was reached, therefore the wrapper is exhausted, not disconnected.
+    assert stream.read(5) == b""
+
+    stream = wsgi.LimitedStream(OneByteStream(b"foo"), 3)
+    assert stream.read() == b"foo"
+
+    stream = wsgi.LimitedStream(OneByteStream(b"foo"), 2)
+    assert stream.read() == b"fo"
+
+
 def test_get_host_fallback():
     assert (
         wsgi.get_host(
diff --git a/tox.ini b/tox.ini
index 056ca0d..db5eccb 100644
--- a/tox.ini
+++ b/tox.ini
@@ -1,24 +1,31 @@
 [tox]
 envlist =
-    py3{11,10,9,8,7},pypy3{8,7}
+    py3{12,11,10,9,8,7}
+    pypy39
     style
     typing
     docs
 skip_missing_interpreters = true
 
 [testenv]
+package = wheel
+wheel_build_env = .pkg
 deps = -r requirements/tests.txt
 commands = pytest -v --tb=short --basetemp={envtmpdir} {posargs}
 
 [testenv:style]
 deps = pre-commit
 skip_install = true
-commands = pre-commit run --all-files --show-diff-on-failure
+commands = pre-commit run --all-files
 
 [testenv:typing]
+package = wheel
+wheel_build_env = .pkg
 deps = -r requirements/typing.txt
 commands = mypy
 
 [testenv:docs]
+package = wheel
+wheel_build_env = .pkg
 deps = -r requirements/docs.txt
 commands = sphinx-build -W -b html -d {envtmpdir}/doctrees docs {envtmpdir}/html

More details

Full run details

Historical runs