New Upstream Release - networkx

Ready changes

Summary

Merged new upstream version: 3.1 (was: 3.0).

Resulting package

Built on 2023-08-22T23:31 (took 16m17s)

The resulting binary packages can be installed (if you have the apt repository enabled) by running one of:

apt install -t fresh-releases python3-networkx

Diff

diff --git a/.circleci/config.yml b/.circleci/config.yml
index 4639853..be5f7e0 100644
--- a/.circleci/config.yml
+++ b/.circleci/config.yml
@@ -19,11 +19,6 @@ jobs:
           command: |
             sudo apt-get install graphviz libgraphviz-dev
 
-      - run:
-          name: Install TeX
-          command: |
-            sudo apt-get install texlive texlive-latex-extra latexmk texlive-xetex fonts-freefont-otf xindy
-
       - run:
           name: Install pysal dependencies
           command: |
@@ -63,8 +58,6 @@ jobs:
             export OMP_NUM_THREADS=1
             source venv/bin/activate
             make -C doc/ html
-            make -C doc/ latexpdf LATEXOPTS="-file-line-error -halt-on-error"
-            cp -a doc/build/latex/networkx_reference.pdf doc/build/html/_downloads/.
 
       - store_artifacts:
           path: doc/build/html
diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs
index ad47efc..64b36ad 100644
--- a/.git-blame-ignore-revs
+++ b/.git-blame-ignore-revs
@@ -10,3 +10,5 @@ cea08c3bb8ca5aa2e167d534b0c5629205733762
 bec833c60c61e838722bf096da75949a9b519d1f
 be23fa0e422b51f4526828cb19b8105c89e5dcbb
 5c0b11afb4c0882a070d522ef3fa41482ba935d3
+5fcf01b9a43a097c4f579486023d1279b2b88619
+7297ae8a37dd3356b64d383cb0c55735a6364bcc
diff --git a/.github/FUNDING.yml b/.github/FUNDING.yml
deleted file mode 100644
index 6b83ca6..0000000
--- a/.github/FUNDING.yml
+++ /dev/null
@@ -1 +0,0 @@
-custom: https://numfocus.org/donate-to-networkx
diff --git a/.github/workflows/coverage.yml b/.github/workflows/coverage.yml
index a7d0c8f..b9f4e96 100644
--- a/.github/workflows/coverage.yml
+++ b/.github/workflows/coverage.yml
@@ -2,9 +2,13 @@ name: coverage
 
 on:
   push:
-    branches: [v2.8]
+    branches: [main]
   pull_request:
-    branches: [v2.8]
+    branches: [main]
+
+concurrency:
+  group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
+  cancel-in-progress: true
 
 jobs:
   report:
@@ -15,7 +19,7 @@ jobs:
     steps:
       - uses: actions/checkout@v3
       - name: Set up Python ${{ matrix.python-version }}
-        uses: actions/setup-python@v3
+        uses: actions/setup-python@v4
         with:
           python-version: ${{ matrix.python-version }}
 
diff --git a/.github/workflows/deploy-docs.yml b/.github/workflows/deploy-docs.yml
index fc714d4..dde6b8e 100644
--- a/.github/workflows/deploy-docs.yml
+++ b/.github/workflows/deploy-docs.yml
@@ -2,7 +2,7 @@ name: deploy
 
 on:
   push:
-    branches: [v2.8]
+    branches: [main]
 
 jobs:
   documentation:
@@ -14,7 +14,7 @@ jobs:
     steps:
       - uses: actions/checkout@v3
       - name: Set up Python
-        uses: actions/setup-python@v3
+        uses: actions/setup-python@v4
         with:
           python-version: "3.9"
 
@@ -22,8 +22,6 @@ jobs:
         run: |
           sudo apt-get update
           sudo apt-get install graphviz graphviz-dev
-          sudo apt-get install texlive texlive-latex-extra latexmk texlive-xetex
-          sudo apt-get install fonts-freefont-otf xindy
           sudo apt-get install libspatialindex-dev
 
       - name: Install packages
@@ -46,27 +44,25 @@ jobs:
       #   - Settings -> Secrets -> New Repository Secret
       #   - Make sure the name is the same as below: CI_DEPLOY_KEY
       - name: Install SSH agent
-        if: github.ref == 'refs/heads/v2.8'
-        uses: webfactory/ssh-agent@v0.5.4
+        if: github.ref == 'refs/heads/main'
+        uses: webfactory/ssh-agent@v0.7.0
         with:
           ssh-private-key: ${{ secrets.CI_DEPLOY_KEY }}
 
       - name: Build docs
-        if: github.ref == 'refs/heads/v2.8'
+        if: github.ref == 'refs/heads/main'
         run: |
           export DISPLAY=:99
           make -C doc/ html
-          make -C doc/ latexpdf LATEXOPTS="-file-line-error -halt-on-error"
-          cp -a doc/build/latex/networkx_reference.pdf doc/build/html/_downloads/.
 
       - name: Deploy docs
-        if: github.ref == 'refs/heads/v2.8'
-        uses: JamesIves/github-pages-deploy-action@releases/v3
+        if: github.ref == 'refs/heads/main'
+        uses: JamesIves/github-pages-deploy-action@releases/v4
         with:
-          GIT_CONFIG_NAME: nx-doc-deploy-bot
-          GIT_CONFIG_EMAIL: nx-doc-deploy-bot@nomail
-          FOLDER: doc/build/html
-          REPOSITORY_NAME: networkx/documentation
-          BRANCH: gh-pages
-          TARGET_FOLDER: latest
-          SSH: true
+          git-config-name: nx-doc-deploy-bot
+          git-config-email: nx-doc-deploy-bot@nomail
+          folder: doc/build/html
+          repository-name: networkx/documentation
+          branch: gh-pages
+          target-folder: latest
+          ssh-key: true
diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml
index 4c6c698..1f47388 100644
--- a/.github/workflows/lint.yml
+++ b/.github/workflows/lint.yml
@@ -1,12 +1,10 @@
 name: style
 
-on:
-  push:
-    branches:
-      - v2.8
-  pull_request:
-    branches:
-      - v2.8
+on: [push, pull_request]
+
+concurrency:
+  group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
+  cancel-in-progress: true
 
 jobs:
   format:
@@ -19,7 +17,7 @@ jobs:
       - uses: actions/checkout@v3
 
       - name: Set up Python ${{ matrix.python-version }}
-        uses: actions/setup-python@v3
+        uses: actions/setup-python@v4
         with:
           python-version: ${{ matrix.python-version }}
 
diff --git a/.github/workflows/mypy.yml b/.github/workflows/mypy.yml
index 9aecd42..2aacbeb 100644
--- a/.github/workflows/mypy.yml
+++ b/.github/workflows/mypy.yml
@@ -2,6 +2,10 @@ name: Mypy
 
 on: [push, pull_request]
 
+concurrency:
+  group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
+  cancel-in-progress: true
+
 jobs:
   type-check:
     runs-on: ubuntu-latest
@@ -13,7 +17,7 @@ jobs:
       - uses: actions/checkout@v3
 
       - name: Set up Python ${{ matrix.python-version }}
-        uses: actions/setup-python@v3
+        uses: actions/setup-python@v4
         with:
           python-version: ${{ matrix.python-version }}
 
@@ -21,8 +25,6 @@ jobs:
         run: |
           pip install --upgrade pip wheel setuptools
           pip install -r requirements/developer.txt
-          # Rm below when yaml no longer a dep
-          pip install types-PyYAML
           pip install -e .
           pip list
 
diff --git a/.github/workflows/pytest-randomly.yml b/.github/workflows/pytest-randomly.yml
index 84168d2..39c72dd 100644
--- a/.github/workflows/pytest-randomly.yml
+++ b/.github/workflows/pytest-randomly.yml
@@ -6,11 +6,12 @@ on:
 
 jobs:
   randomize-test-order:
+    if: github.repository == 'networkx/networkx'
     runs-on: ubuntu-latest
     steps:
-      - uses: actions/checkout@v2
+      - uses: actions/checkout@v3
       - name: Set up Python
-        uses: actions/setup-python@v2
+        uses: actions/setup-python@v4
         with:
           python-version: "3.9"
 
diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml
index 6e402f2..cc82b10 100644
--- a/.github/workflows/test.yml
+++ b/.github/workflows/test.yml
@@ -1,12 +1,10 @@
 name: test
 
-on:
-  push:
-    branches:
-      - v2.8
-  pull_request:
-    branches:
-      - v2.8
+on: [push, pull_request]
+
+concurrency:
+  group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
+  cancel-in-progress: true
 
 jobs:
   base:
@@ -18,7 +16,7 @@ jobs:
     steps:
       - uses: actions/checkout@v3
       - name: Set up Python ${{ matrix.python-version }}
-        uses: actions/setup-python@v3
+        uses: actions/setup-python@v4
         with:
           python-version: ${{ matrix.python-version }}
 
@@ -42,7 +40,7 @@ jobs:
     steps:
       - uses: actions/checkout@v3
       - name: Set up Python ${{ matrix.python-version }}
-        uses: actions/setup-python@v3
+        uses: actions/setup-python@v4
         with:
           python-version: ${{ matrix.python-version }}
 
@@ -57,6 +55,12 @@ jobs:
         run: |
           pytest --doctest-modules --durations=10 --pyargs networkx
 
+      - name: Test Dispatching
+        # Limit this to only a single combination from the matrix
+        if: ${{ (matrix.os == 'ubuntu') && (matrix.python-version == '3.11') }}
+        run: |
+          NETWORKX_GRAPH_CONVERT=nx-loopback pytest --doctest-modules --durations=10 --pyargs networkx
+
   extra:
     runs-on: ${{ matrix.os }}
     strategy:
@@ -66,7 +70,7 @@ jobs:
     steps:
       - uses: actions/checkout@v3
       - name: Set up Python ${{ matrix.python-version }}
-        uses: actions/setup-python@v3
+        uses: actions/setup-python@v4
         with:
           python-version: ${{ matrix.python-version }}
 
@@ -126,7 +130,7 @@ jobs:
     steps:
       - uses: actions/checkout@v3
       - name: Set up Python ${{ matrix.python-version }}
-        uses: actions/setup-python@v3
+        uses: actions/setup-python@v4
         with:
           python-version: ${{ matrix.python-version }}
 
diff --git a/.mypy.ini b/.mypy.ini
index f10a25b..6efdb23 100644
--- a/.mypy.ini
+++ b/.mypy.ini
@@ -1,3 +1,3 @@
 [mypy]
 ignore_missing_imports = True
-exclude = yaml|subgraphviews|reportviews*
+exclude = subgraphviews|reportviews*
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 3efcf9c..15e1332 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -3,28 +3,22 @@
 
 repos:
   - repo: https://github.com/psf/black
-    rev: 22.10.0
+    rev: 23.3.0
     hooks:
       - id: black
-  - repo: https://github.com/asottile/pyupgrade
-    rev: v3.0.0
-    hooks:
-      - id: pyupgrade
-        args: [--py38-plus]
-  - repo: https://github.com/asottile/blacken-docs
-    rev: v1.12.1
+  - repo: https://github.com/adamchainz/blacken-docs
+    rev: 1.13.0
     hooks:
       - id: blacken-docs
-  - repo: https://github.com/pycqa/isort
-    rev: 5.10.1
-    hooks:
-      - id: isort
-        name: isort (python)
-        args: ["--profile", "black", "--filter-files", "--skip", "__init__.py"]
-        files: ^networkx/
   - repo: https://github.com/pre-commit/mirrors-prettier
     rev: v2.7.1
     hooks:
       - id: prettier
         files: \.(html|md|yml|yaml)
         args: [--prose-wrap=preserve]
+  - repo: https://github.com/charliermarsh/ruff-pre-commit
+    rev: v0.0.258
+    hooks:
+      - id: ruff
+        args:
+          - --fix
diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst
index 003ad8c..30024e2 100644
--- a/CONTRIBUTING.rst
+++ b/CONTRIBUTING.rst
@@ -47,7 +47,7 @@ Development Workflow
          # Install main development and runtime dependencies of networkx
          pip install -r requirements/default.txt -r requirements/test.txt -r requirements/developer.txt
          #
-         # (Optional) Install pygraphviz, pydot, and gdal packages
+         # (Optional) Install pygraphviz and pydot packages
          # These packages require that you have your system properly configured
          # and what that involves differs on various systems.
          # pip install -r requirements/extra.txt
@@ -55,7 +55,7 @@ Development Workflow
          # Build and install networkx from source
          pip install -e .
          # Test your installation
-         PYTHONPATH=. pytest networkx
+         pytest --pyargs networkx
 
      * ``conda`` (Anaconda or Miniconda)
 
@@ -68,7 +68,7 @@ Development Workflow
          # Install main development and runtime dependencies of networkx
          conda install -c conda-forge --file requirements/default.txt --file requirements/test.txt --file requirements/developer.txt
          #
-         # (Optional) Install pygraphviz, pydot, and gdal packages
+         # (Optional) Install pygraphviz and pydot packages
          # These packages require that you have your system properly configured
          # and what that involves differs on various systems.
          # conda install -c conda-forge --file requirements/extra.txt
@@ -76,7 +76,7 @@ Development Workflow
          # Install networkx from source
          pip install -e .
          # Test your installation
-         PYTHONPATH=. pytest networkx
+         pytest --pyargs networkx
 
    * Finally, we recommend you use a pre-commit hook, which runs black when
      you type ``git commit``::
@@ -325,11 +325,11 @@ Or the tests for a specific submodule::
 
 Or tests from a specific file::
 
-    $ PYTHONPATH=. pytest networkx/readwrite/tests/test_yaml.py
+    $ PYTHONPATH=. pytest networkx/readwrite/tests/test_edgelist.py
 
 Or a single test within that file::
 
-    $ PYTHONPATH=. pytest networkx/readwrite/tests/test_yaml.py::TestYaml::testUndirected
+    $ PYTHONPATH=. pytest networkx/readwrite/tests/test_edgelist.py::test_parse_edgelist_with_data_list
 
 Use ``--doctest-modules`` to run doctests.
 For example, run all tests and all doctests using::
@@ -355,7 +355,7 @@ detailing the test coverage::
   ...
 
 Adding tests
-------------
+~~~~~~~~~~~~
 
 If you're **new to testing**, see existing test files for examples of things to do.
 **Don't let the tests keep you from submitting your contribution!**
@@ -363,8 +363,67 @@ If you're not sure how to do this or are having trouble, submit your pull reques
 anyway.
 We will help you create the tests and sort out any kind of problem during code review.
 
+Image comparison
+~~~~~~~~~~~~~~~~
+
+To run image comparisons::
+
+    $ PYTHONPATH=. pytest --mpl --pyargs networkx.drawing
+
+The ``--mpl`` tells ``pytest`` to use ``pytest-mpl`` to compare the generated plots
+with baseline ones stored in ``networkx/drawing/tests/baseline``.
+
+To add a new test, add a test function to ``networkx/drawing/tests`` that
+returns a Matplotlib figure (or any figure object that has a savefig method)
+and decorate it as follows::
+
+    @pytest.mark.mpl_image_compare
+    def test_barbell():
+        fig = plt.figure()
+        barbell = nx.barbell_graph(4, 6)
+        # make sure to fix any randomness
+        pos = nx.spring_layout(barbell, seed=42)
+        nx.draw(barbell, pos=pos)
+        return fig
+
+Then create a baseline image to compare against later::
+
+    $ pytest -k test_barbell --mpl-generate-path=networkx/drawing/tests/baseline
+
+.. note: In order to keep the size of the repository from becoming too large, we
+   prefer to limit the size and number of baseline images we include.
+
+And test::
+
+    $ pytest -k test_barbell --mpl
+
+Documentation
+-------------
+
+Building the documentation locally requires that the additional dependencies
+specified in ``requirements/doc.txt`` be installed in your development
+environment.
+
+The documentation is built with ``sphinx``. To build the documentation locally,
+navigate to the ``doc/`` directory and::
+
+    make html
+
+This will generate both the reference documentation as well as the example
+gallery. If you want to build the documentation *without* building the
+gallery examples use::
+
+    make html-noplot
+
+The build products are stored in ``doc/build/`` and can be viewed directly.
+For example, to view the built html, open ``build/html/index.html``
+in your preferred web browser.
+
+.. note: ``sphinx`` supports many other output formats. Type ``make`` without
+   any arguments to see all the built-in options.
+
 Adding examples
----------------
+~~~~~~~~~~~~~~~
 
 The gallery examples are managed by
 `sphinx-gallery <https://sphinx-gallery.readthedocs.io/>`_.
@@ -372,6 +431,9 @@ The source files for the example gallery are ``.py`` scripts in ``examples/`` th
 generate one or more figures. They are executed automatically by sphinx-gallery when the
 documentation is built. The output is gathered and assembled into the gallery.
 
+Building the example gallery locally requires that the additional dependencies
+in ``requirements/example.txt`` be installed in your development environment.
+
 You can **add a new** plot by placing a new ``.py`` file in one of the directories inside the
 ``examples`` directory of the repository. See the other examples to get an idea for the
 format.
@@ -388,7 +450,7 @@ General guidelines for making a good gallery plot:
   documentation.
 
 Adding References
------------------
+~~~~~~~~~~~~~~~~~
 
 If you are contributing a new algorithm (or an improvement to a current algorithm),
 a reference paper or resource should also be provided in the function docstring.
@@ -415,41 +477,6 @@ to use the `wayback machine <https://web.archive.org/>`_ to create a snapshot of
 and link the internet archive link. The URL of the resource can change, and it creates unreachable
 links from the documentation.
 
-
-Image comparison
-----------------
-
-To run image comparisons::
-
-    $ PYTHONPATH=. pytest --mpl --pyargs networkx.drawing
-
-The ``--mpl`` tells ``pytest`` to use ``pytest-mpl`` to compare the generated plots
-with baseline ones stored in ``networkx/drawing/tests/baseline``.
-
-To add a new test, add a test function to ``networkx/drawing/tests`` that
-returns a Matplotlib figure (or any figure object that has a savefig method)
-and decorate it as follows::
-
-    @pytest.mark.mpl_image_compare
-    def test_barbell():
-        fig = plt.figure()
-        barbell = nx.barbell_graph(4, 6)
-        # make sure to fix any randomness
-        pos = nx.spring_layout(barbell, seed=42)
-        nx.draw(barbell, pos=pos)
-        return fig
-
-Then create a baseline image to compare against later::
-
-    $ pytest -k test_barbell --mpl-generate-path=networkx/drawing/tests/baseline
-
-.. note: In order to keep the size of the repository from becoming too large, we
-   prefer to limit the size and number of baseline images we include.
-
-And test::
-
-    $ pytest -k test_barbell --mpl
-
 Bugs
 ----
 
@@ -464,4 +491,4 @@ All interactions with the project are subject to the
 We also follow these policies:
 
 * :doc:`NetworkX deprecation policy <deprecations>`
-* :doc:`Python version support <nep-0029-deprecation_policy>`
+* :external+neps:doc:`Python version support <nep-0029-deprecation_policy>`
diff --git a/INSTALL.rst b/INSTALL.rst
index 792c099..b8ffc2d 100644
--- a/INSTALL.rst
+++ b/INSTALL.rst
@@ -78,7 +78,7 @@ Extra packages
 --------------
 
 .. note::
-   Some optional packages (e.g., `gdal`) may require compiling
+   Some optional packages may require compiling
    C or C++ code.  If you have difficulty installing these packages
    with `pip`, please consult the homepages of those packages.
 
@@ -89,8 +89,6 @@ version requirements.
 - `PyGraphviz <http://pygraphviz.github.io/>`_ and
   `pydot <https://github.com/erocarrera/pydot>`_ provide graph drawing
   and graph layout algorithms via `GraphViz <http://graphviz.org/>`_.
-- `PyYAML <http://pyyaml.org/>`_ provides YAML format reading and writing.
-- `gdal <http://www.gdal.org/>`_ provides shapefile format reading and writing.
 - `lxml <http://lxml.de/>`_ used for GraphML XML format.
 
 To install ``networkx`` and extra packages, do::
@@ -99,7 +97,7 @@ To install ``networkx`` and extra packages, do::
 
 To explicitly install all optional packages, do::
 
-    $ pip install pygraphviz pydot pyyaml gdal lxml
+    $ pip install pygraphviz pydot lxml
 
 Or, install any optional package (e.g., ``pygraphviz``) individually::
 
diff --git a/LICENSE.txt b/LICENSE.txt
index a274a66..42b6f17 100644
--- a/LICENSE.txt
+++ b/LICENSE.txt
@@ -2,7 +2,7 @@ NetworkX is distributed with the 3-clause BSD license.
 
 ::
 
-   Copyright (C) 2004-2022, NetworkX Developers
+   Copyright (C) 2004-2023, NetworkX Developers
    Aric Hagberg <hagberg@lanl.gov>
    Dan Schult <dschult@colgate.edu>
    Pieter Swart <swart@lanl.gov>
diff --git a/README.rst b/README.rst
index e76b856..ae9986c 100644
--- a/README.rst
+++ b/README.rst
@@ -1,6 +1,9 @@
 NetworkX
 ========
 
+`NetworkX Survey 2023!! <https://forms.gle/NUGcBxyjx5onbAgc8>`_ 🎉 Fill out the survey to tell us about your ideas, complaints, praises of NetworkX!
+
+
 .. image:: https://github.com/networkx/networkx/workflows/test/badge.svg?branch=main
   :target: https://github.com/networkx/networkx/actions?query=workflow%3A%22test%22
 
@@ -66,7 +69,7 @@ License
 
 Released under the 3-Clause BSD license (see `LICENSE.txt`)::
 
-   Copyright (C) 2004-2022 NetworkX Developers
+   Copyright (C) 2004-2023 NetworkX Developers
    Aric Hagberg <hagberg@lanl.gov>
    Dan Schult <dschult@colgate.edu>
    Pieter Swart <swart@lanl.gov>
diff --git a/debian/changelog b/debian/changelog
index 45357d6..b588ebc 100644
--- a/debian/changelog
+++ b/debian/changelog
@@ -1,8 +1,10 @@
-networkx (2.8.8-2) UNRELEASED; urgency=medium
+networkx (3.1-1) UNRELEASED; urgency=medium
 
   * Trim trailing whitespace.
+  * New upstream release.
+  * New upstream release.
 
- -- Debian Janitor <janitor@jelmer.uk>  Tue, 10 Jan 2023 05:38:51 -0000
+ -- Debian Janitor <janitor@jelmer.uk>  Tue, 22 Aug 2023 23:17:30 -0000
 
 networkx (2.8.8-1) unstable; urgency=medium
 
diff --git a/doc/Makefile b/doc/Makefile
index 303c4fa..e8e6cca 100644
--- a/doc/Makefile
+++ b/doc/Makefile
@@ -15,17 +15,18 @@ ALLSPHINXOPTS   = -d build/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
 
 help:
 	@echo "Please use \`make <target>' where <target> is one of"
-	@echo "  html      to make standalone HTML files"
-	@echo "  dirhtml   to make HTML files named index.html in directories"
-	@echo "  pickle    to make pickle files"
-	@echo "  epub       to make an epub"
-	@echo "  json      to make JSON files"
-	@echo "  htmlhelp  to make HTML files and a HTML help project"
-	@echo "  qthelp    to make HTML files and a qthelp project"
-	@echo "  latex     to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
-	@echo "  changes   to make an overview of all changed/added/deprecated items"
-	@echo "  linkcheck to check all external links for integrity"
-	@echo "  doctest   to run all doctests embedded in the documentation (if enabled)"
+	@echo "  html         to make standalone HTML files"
+	@echo "  html-noplot  to make standalone HTML files without building the examples"
+	@echo "  dirhtml      to make HTML files named index.html in directories"
+	@echo "  pickle       to make pickle files"
+	@echo "  epub         to make an epub"
+	@echo "  json         to make JSON files"
+	@echo "  htmlhelp     to make HTML files and a HTML help project"
+	@echo "  qthelp       to make HTML files and a qthelp project"
+	@echo "  latex        to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
+	@echo "  changes      to make an overview of all changed/added/deprecated items"
+	@echo "  linkcheck    to check all external links for integrity"
+	@echo "  doctest      to run all doctests embedded in the documentation (if enabled)"
 
 
 clean:
@@ -48,6 +49,11 @@ html:
 	@echo
 	@echo "Build finished. The HTML pages are in build/html."
 
+html-noplot:
+	$(SPHINXBUILD) -D plot_gallery="False" -b html $(ALLSPHINXOPTS) build/html
+	@echo
+	@echo "Build finished. The HTML pages are in build/html."
+
 dirhtml:
 	$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) build/dirhtml
 	@echo
diff --git a/doc/conf.py b/doc/conf.py
index f5b6d55..1c2d0e8 100644
--- a/doc/conf.py
+++ b/doc/conf.py
@@ -50,6 +50,7 @@ sphinx_gallery_conf = {
     "gallery_dirs": "auto_examples",
     "backreferences_dir": "modules/generated",
     "image_scrapers": ("matplotlib",),
+    "plot_gallery": "True",
 }
 # Add pygraphviz png scraper, if available
 try:
@@ -126,6 +127,7 @@ html_theme_options = {
     "collapse_navigation": True,
     "navigation_depth": 2,
     "show_prev_next": False,
+    "announcement": "<p><a href='https://forms.gle/NUGcBxyjx5onbAgc8'> NetworkX User Survey 2023</a> 🎉 Fill out the survey to tell us about your ideas, complaints, praises of NetworkX!</p>",
     "icon_links": [
         {"name": "Home Page", "url": "https://networkx.org", "icon": "fas fa-home"},
         {
@@ -136,7 +138,7 @@ html_theme_options = {
     ],
     "external_links": [{"name": "Guides", "url": "https://networkx.org/nx-guides/"}],
     "navbar_end": ["theme-switcher", "navbar-icon-links", "version"],
-    "page_sidebar_items": ["search-field", "page-toc", "edit-this-page"],
+    "secondary_sidebar_items": ["search-field", "page-toc", "edit-this-page"],
     "header_links_before_dropdown": 7,
 }
 html_sidebars = {
@@ -222,11 +224,11 @@ latex_appendices = ["tutorial"]
 intersphinx_mapping = {
     "python": ("https://docs.python.org/3/", None),
     "numpy": ("https://numpy.org/doc/stable/", None),
-    "neps": ("https://numpy.org/neps", None),
-    "matplotlib": ("https://matplotlib.org/stable", None),
-    "scipy": ("https://docs.scipy.org/doc/scipy/reference", None),
-    "pandas": ("https://pandas.pydata.org/pandas-docs/stable", None),
-    "geopandas": ("https://geopandas.org/", None),
+    "neps": ("https://numpy.org/neps/", None),
+    "matplotlib": ("https://matplotlib.org/stable/", None),
+    "scipy": ("https://docs.scipy.org/doc/scipy/", None),
+    "pandas": ("https://pandas.pydata.org/pandas-docs/stable/", None),
+    "geopandas": ("https://geopandas.org/en/stable/", None),
     "pygraphviz": ("https://pygraphviz.github.io/documentation/stable/", None),
     "sphinx-gallery": ("https://sphinx-gallery.github.io/stable/", None),
     "nx-guides": ("https://networkx.org/nx-guides/", None),
diff --git a/doc/developer/about_us.rst b/doc/developer/about_us.rst
index b785cf2..bb0fe5c 100644
--- a/doc/developer/about_us.rst
+++ b/doc/developer/about_us.rst
@@ -6,7 +6,7 @@ and has been developed with the help of many others. Thanks to everyone who has
 improved NetworkX by contributing code, bug reports (and fixes), documentation,
 and input on design, features, and the future of NetworkX.
 
-.. include:: team.rst
+.. include:: teams.inc
 
 Contributors
 ------------
@@ -145,6 +145,8 @@ to add your name to the bottom of the list.
 - Philip Boalch
 - Matt Schwennesen, Github: `mjschwenne <https://github.com/mjschwenne>`_
 - Andrew Knyazev, Github: `lobpcg <https://github.com/lobpcg>`_, LinkedIn: `andrew-knyazev <https://www.linkedin.com/in/andrew-knyazev>`_
+- Luca Cappelletti, GitHub: `LucaCappelletti94 <https://github.com/LucaCappelletti94>`_
+- Sultan Orazbayev, GitHub: `SultanOrazbayev <https://github.com/SultanOrazbayev>`_, LinkedIn: `Sultan Orazbayev <https://www.linkedin.com/in/sultan-orazbayev/>`_
 
 A supplementary (but still incomplete) list of contributors is given by the
 list of names that have commits in ``networkx``'s
diff --git a/doc/developer/deprecations.rst b/doc/developer/deprecations.rst
index db98405..12582ef 100644
--- a/doc/developer/deprecations.rst
+++ b/doc/developer/deprecations.rst
@@ -46,76 +46,19 @@ Version 3.0
 
 * In ``readwrite/gml.py`` remove ``literal_stringizer`` and related tests.
 * In ``readwrite/gml.py`` remove ``literal_destringizer`` and related tests.
-* In ``utils/misc.py`` remove ``is_string_like`` and related tests.
-* In ``utils/misc.py`` remove ``make_str`` and related tests.
-* In ``utils/misc.py`` remove ``is_iterator``.
-* In ``utils/misc.py`` remove ``iterable``.
-* In ``utils/misc.py`` remove ``is_list_of_ints``.
-* In ``utils/misc.py`` remove ``consume``.
-* In ``utils/misc.py`` remove ``default_opener``.
-* In ``utils/misc.py`` remove ``empty_generator``.
-* Remove ``utils/contextmanagers.py`` and related tests.
-* In ``drawing/nx_agraph.py`` remove ``display_pygraphviz`` and related tests.
-* In ``algorithms/chordal.py`` replace ``chordal_graph_cliques`` with ``_chordal_graph_cliques``.
-* In ``algorithms/centrality/betweenness_centrality_subset.py`` remove ``betweenness_centrality_source``.
-* In ``algorithms/centrality/betweenness.py`` remove ``edge_betweeness``.
-* In ``algorithms/community_modularity_max.py`` remove old name ``_naive_greedy_modularity_communities``.
-* In ``linalg/algebraicconnectivity.py`` remove ``_CholeskySolver`` and related code.
-* In ``convert_matrix.py`` remove ``to_numpy_matrix`` and ``from_numpy_matrix``.
-* In ``readwrite/json_graph/cytoscape.py``, change function signature for
-  ``cytoscape_graph`` and ``cytoscape_data`` to replace the ``attrs`` keyword.
-  argument with explicit ``name`` and ``ident`` keyword args.
-* In ``readwrite/json_graph/tree.py``, remove ``attrs`` kwarg from ``tree_graph``
-  and ``tree_data``.
-* Undo changes related to the removal of ``pyyaml``. Remove the
-  ``__getattr__`` definitions from ``networkx/__init__.py``,
-  ``networkx/readwrite/__init__.py`` and ``networkx/readwrite/nx_yaml.py`` and
-  remove ``networkx/readwrite/tests/test_getattr_nxyaml_removal.py``
-* Remove ``readwrite/gpickle.py`` and related tests.
-* Remove ``readwrite/nx_shp.py`` and related tests (add info in alternatives).
 * Remove ``copy`` method in the coreview Filtered-related classes and related tests.
 * In ``algorithms/link_analysis/pagerank_alg.py`` replace ``pagerank`` with ``pagerank_scipy``.
 * In ``algorithms/link_analysis/pagerank_alg.py`` rename ``pagerank_numpy`` as ``_pagerank_numpy``.
 * In ``convert_matrix.py`` remove ``order`` kwarg from ``to_pandas_edgelist`` and docstring
-* Remove ``readwrite/json_graph/jit.py`` and related tests.
-* In ``utils/misc.py`` remove ``generate_unique_node`` and related tests.
-* In ``algorithms/link_analysis/hits_alg.py`` remove ``hub_matrix`` and ``authority_matrix``
-* In ``algorithms/link_analysis/hits_alg.py``, remove ``hits_numpy`` and ``hist_scipy``.
-* In ``classes`` remove the ``ordered`` module and the four ``Ordered``
-  classes defined therein.
-* In ``utils/decorators.py`` remove ``preserve_random_state``.
-* In ``algorithms/community/quality.py`` remove ``coverage`` and ``performance``.
-* Remove ``testing``.
-* In ``linalg/graphmatrix.py`` remove ``adj_matrix``.
-* In ``algorithms/similarity.py`` replace ``simrank_similarity`` with ``simrank_similarity_numpy``.
-* In ``algorithms/assortativity/mixing.py`` remove ``numeric_mixing_matrix``.
-* In ``algorithms/assortativity/connectivity.py`` remove ``k_nearest_neighbors``.
-* In ``utils/decorators.py`` remove ``random_state``.
 * In ``algorithms/operators/binary.py`` remove ``name`` kwarg from ``union`` and docstring.
-* In ``generators/geometric.py`` remove ``euclidean`` and tests.
-* In ``algorithms/node_classification/`` remove ``hmn.py``, ``lgc.py``,
-  and ``utils.py`` after moving the functions defined therein into the newly created
-  ``node_classification.py`` module, which will replace the current package.
 * In ``algorithms/link_analysis/pagerank_alg.py``, remove the
   ``np.asmatrix`` wrappers on the return values of ``google_matrix`` and remove
   the associated FutureWarning.
-* In ``convert_matrix.py`` remove ``from_scipy_sparse_matrix`` and
-  ``to_scipy_sparse_matrix``.
 * In ``linalg/attrmatrix.py`` remove the FutureWarning, update the
   return type by removing ``np.asmatrix``, and update the docstring to
   reflect that the function returns a ``numpy.ndarray`` instance.
-* In ``generators/small.py`` remove ``make_small_graph`` and
-  ``make_small_undirected_graph``.
-* In ``convert_matrix.py`` remove ``to_numpy_recarray``.
-* In ``classes/function.py`` remove ``info``.
-* In ``algorithms/community/modularity_max.py``, remove the deprecated
-  ``n_communities`` parameter from the ``greedy_modularity_communities``
-  function.
 * In ``algorithms/distance_measures.py`` remove ``extrema_bounding``.
-* In ``utils/misc.py`` remove ``dict_to_numpy_array1`` and ``dict_to_numpy_array2``.
-* In ``utils/misc.py`` remove ``to_tuple``.
 * In ``algorithms/matching.py``, remove parameter ``maxcardinality`` from ``min_weight_matching``.
-* In ``drawing/nx_pydot.py``, change PendingDeprecationWarning to DeprecationWarning.
 
 
 Version 3.2
@@ -125,3 +68,12 @@ Version 3.2
 * Remove pydot functionality ``drawing/nx_pydot.py``, if pydot is still not being maintained. See #5723
 * In ``readwrite/json_graph/node_link.py`` remove the ``attrs` keyword code 
   and docstring in ``node_link_data`` and ``node_link_graph``. Also the associated tests.
+
+Version 3.3
+~~~~~~~~~~~
+* Remove the ``forest_str`` function from ``readwrite/text.py``. Replace
+  existing usages with ``write_network_text``.
+* Change ``single_target_shortest_path_length`` in ``algorithms/shortest_path/unweighted.py``
+  to return a dict. See #6527
+* Change ``shortest_path`` in ``algorithms/shortest_path/generic.py``
+  to return a iterator. See #6527
diff --git a/doc/developer/new_contributor_faq.rst b/doc/developer/new_contributor_faq.rst
index 9c28978..1bfefdb 100644
--- a/doc/developer/new_contributor_faq.rst
+++ b/doc/developer/new_contributor_faq.rst
@@ -110,7 +110,7 @@ For example, let's say you are interested in making a change to the
 where it is defined. In an IPython terminal, you can use ``?`` --- the source file is
 listed in the ``File:`` field:
 
-.. code-block:: ipython
+.. code-block:: ipython3
 
    In [1]: import networkx as nx
    In [2]: nx.kamada_kawai_layout?
diff --git a/doc/developer/nxeps/nxep-0000.rst b/doc/developer/nxeps/nxep-0000.rst
index 3f3b10b..34e4030 100644
--- a/doc/developer/nxeps/nxep-0000.rst
+++ b/doc/developer/nxeps/nxep-0000.rst
@@ -5,7 +5,7 @@ NXEP 0 — Purpose and Process
 ============================
 
 :Author: Jarrod Millman <millman@berkeley.edu>
-:Status: Draft
+:Status: Accepted
 :Type: Process
 :Created: 2020-06-25
 
diff --git a/doc/developer/nxeps/nxep-0001.rst b/doc/developer/nxeps/nxep-0001.rst
index 516c249..b55444d 100644
--- a/doc/developer/nxeps/nxep-0001.rst
+++ b/doc/developer/nxeps/nxep-0001.rst
@@ -6,7 +6,7 @@ NXEP 1 — Governance and Decision Making
 
 :Author: Jarrod Millman <millman@berkeley.edu>
 :Author: Dan Schult <dschult@colgate.edu>
-:Status: Draft
+:Status: Accepted
 :Type: Process
 :Created: 2020-06-25
 
@@ -54,8 +54,8 @@ have shown they can be trusted to maintain NetworkX with care. Becoming a
 core developer allows contributors to merge approved pull requests, cast votes
 for and against merging a pull request, and be involved in deciding major
 changes to the API, and thereby more easily carry on with their project related
-activities. Core developers appear as team members on the `NetworkX Core Team page
-<https://github.com/orgs/networkx/teams/core-developers/members>`_ and can
+activities. Core developers appear as team members on the
+:ref:`NetworkX Core Developers gallery<core-developers-team>` and can
 be messaged ``@networkx/core-developers``. Core
 developers are expected to review code contributions while adhering to the
 :ref:`core_dev`.
@@ -83,10 +83,9 @@ long experience with both the project and the larger ecosystem. When the core
 developer community (including the SC members) fails to reach such a consensus
 in a reasonable timeframe, the SC is the entity that resolves the issue.
 
-Steering Council members appear as team members on the `NetworkX Steering
-Council Team page
-<https://github.com/orgs/networkx/teams/steering-council/members>`_ and
-can be messaged ``@networkx/steering-council``. Core
+The current list of steering council members appears on the
+`NetworkX Steering Council gallery<steering-council-team>` and can
+be messaged ``@networkx/steering-council``.
 
 Decision Making Process
 =======================
diff --git a/doc/developer/nxeps/nxep-0002.rst b/doc/developer/nxeps/nxep-0002.rst
index 1904cad..55c0617 100644
--- a/doc/developer/nxeps/nxep-0002.rst
+++ b/doc/developer/nxeps/nxep-0002.rst
@@ -78,7 +78,7 @@ The main impact and the decision that needs to be taken in this NXEP is with
 respect to the user facing API. By implementing this NXEP via subscripting NodeViews,
 we may end up adding some ambiguity for users. As for example `G.nodes[x]`
 will return an attribute dict but `G.nodes[0:5]` will return a list of first five nodes.
-This will be more ambigious with EdgeView as ``G.edges[0, 1]`` will return an
+This will be more ambiguous with EdgeView as ``G.edges[0, 1]`` will return an
 attribute dictionary of the edge between 0 and 1 and ``G.edges[0:1]`` will return the first edge.
 We need to find a way to counter this potential confusion.
 The alternative proposal of a new slicing method is one possible solution.
@@ -117,7 +117,7 @@ add order and edge order based on adjacency storage.
 On the computational front, if we create lists to allow slices, we use memory to store the lists.
 This is something user would have anyway done with something like ``list(G.nodes(data=True))[0:10]``.
 But we can do better with our slicing mechanisms.
-We should be able to avoid constucting the entire list simply to get the slices by internally
+We should be able to avoid constructing the entire list simply to get the slices by internally
 using code like: ``indx=[n for i, n in enumerate(G.nodes(data=True)) if i in range(x.start, x.stop, s.step)]``
 where x is the desired slice object.
 
@@ -138,11 +138,11 @@ The following code will be valid::
   >>> G.edges[1:10]
   >>> G.edges(data=True)[4:6]
 
-Prelimanary impelementation work is available at https://github.com/networkx/networkx/pull/4086
+Preliminary implementation work is available at https://github.com/networkx/networkx/pull/4086
 
 Alternatively, to get rid of the ambiguity in slicing API with respect to
 the dict views we can implement a new
-``slice`` method which leads to a less ambigious API.::
+``slice`` method which leads to a less ambiguous API.::
 
   >>> G.nodes(data=True).slice[:10]
   >>> G.nodes.slice[10:30]
@@ -206,7 +206,7 @@ The listed alternatives are not mutually exclusive.
         ...
      NetworkXError: NodeView does not support slicing. Try list(G.nodes)[0:10].
 
-- Instead of changing the behavior of ``__getitem__`` we can impelment a new
+- Instead of changing the behavior of ``__getitem__`` we can implement a new
   method, something like ``G.nodes.head(x)`` (insipired by pandas) which
   returns the first x nodes.
   This approach could be expanded to using a ``slice`` object directly but
diff --git a/doc/developer/nxeps/nxep-0004.rst b/doc/developer/nxeps/nxep-0004.rst
index 8d6912f..41bde67 100644
--- a/doc/developer/nxeps/nxep-0004.rst
+++ b/doc/developer/nxeps/nxep-0004.rst
@@ -1,8 +1,8 @@
 .. _NXEP4:
 
-======================================================================
-NXEP 4 — Adopting `numpy.random.Generator` as default random interface
-======================================================================
+=================================
+NXEP 4 — Default random interface
+=================================
 
 :Author: Ross Barnowski (rossbar@berkeley.edu)
 :Status: Draft
@@ -21,12 +21,12 @@ that includes support for `numpy.random` and the Python built-in `random` module
 preferred package for random number generation.
 NumPy introduced a new interface in the `numpy.random` package in NumPy version
 1.17.
-According to :doc:`NEP19 <nep-0019-rng-policy>`, the new interface based on
+According to :external+neps:doc:`NEP19 <nep-0019-rng-policy>`, the new interface based on
 `numpy.random.Generator`
 is recommended over the legacy `numpy.random.RandomState` as the former has
 `better statistical properties <https://www.pcg-random.org/index.html>`_,
-:doc:`more features <reference/random/new-or-different>`,
-and :doc:`improved performance <reference/random/performance>`.
+:external+numpy:doc:`more features <reference/random/new-or-different>`,
+and :external+numpy:doc:`improved performance <reference/random/performance>`.
 This NXEP proposes a strategy for adopting `numpy.random.Generator` as the
 **default** interface for random number generation within NetworkX.
 
@@ -166,7 +166,7 @@ by `numpy.random.Generator.integers`.
 Thus any code that uses `create_random_state` or `create_py_random_state` and
 relies on the ``randint`` method of the returned rng would result in an
 `AttributeError`.
-This can be addressed with a compatiblity class similar to the
+This can be addressed with a compatibility class similar to the
 `networkx.utils.misc.PythonRandomInterface` class, which provides a compatibility
 layer between `random` and `numpy.random.RandomState`.
 
@@ -210,7 +210,7 @@ potential approaches to supporting the new NumPy random interface:
 - `scikit-learn/scikit-learn#14042 <sklearn14042>`_ is a higher-level discussion
   that includes additional information about the design considerations and constraints
   related to scikit-learn's ``random_state``.
-- There is also a releated `SLEP <slep011>`_.
+- There is also a related `SLEP <slep011>`_.
 
 .. _sklearn16988: https://github.com/scikit-learn/scikit-learn/issues/16988
 .. _sklearn14042: https://github.com/scikit-learn/scikit-learn/issues/14042
@@ -299,7 +299,26 @@ To illustrate (ignoring implementation details)::
 Discussion
 ----------
 
-This section may just be a bullet list including links to any discussions
-regarding the NXEP:
-
-- This includes links to mailing list threads or relevant GitHub issues.
+This NXEP has been discussed at several community meetings, see e.g.
+`these meeting notes <https://github.com/networkx/archive/blob/main/meetings/2023-03-14.md#nxep-topic-of-the-week>`_.
+
+The main concern that has surfaced during these discussions is that the
+NumPy ``Generator`` interface does not make the same strict stream-compatibility
+guarantees as the older ``RandomState``.
+Therefore, if this NXEP were implemented as proposed, code that relies on seeded
+random numbers could in principle return different results with some future
+NumPy version due to changes in the default ``BitGenerator`` or ``Generator`` methods.
+
+Many NetworkX functions are quite sensitive to the random seed.
+For example, changing the seed for the default ``spring_layout`` function can
+yield a vastly different (but equally valid) layout for a network.
+Stream-compatibility is important for reproducibility in these contexts.
+
+Thus we have concluded through various discussions *not* to implement the
+changes proposed in this NXEP.
+``RandomState`` will remain the default random number generator for the ``random_state``
+decorator in an effort to support strict backward compatibility for all NetworkX
+user code that relies on ``random_state``.
+The ``Generator`` interface is *supported* in the ``random_state`` decorator,
+and users are encouraged to use ``Generator`` instances in new code where
+stream-compatibility is not a priority.
diff --git a/doc/developer/projects.rst b/doc/developer/projects.rst
index f6a2963..eafae92 100644
--- a/doc/developer/projects.rst
+++ b/doc/developer/projects.rst
@@ -28,12 +28,11 @@ Pedagogical Interactive Notebooks for Algorithms Implemented in NetworkX
 - Expected Outcome: A collection of Interactive Jupyter notebooks which
   explain and explore network algorithms to readers and users of NetworkX.
   For example, see this notebook on
-  :doc:`Random Geometric Graphs <content/generators/geometric>`
+  :doc:`Geometric Generator Models <nx-guides:content/generators/geometric>`
 
 - Complexity: Depending on the algorithms you are interested to work on.
 
-- Interested Mentors: `@dschult <https://github.com/dschult/>`__,
-  `@MridulS <https://github.com/MridulS/>`__,
+- Interested Mentors: `@MridulS <https://github.com/MridulS/>`__,
   `@rossbar <https://github.com/rossbar/>`__
   
 - Expected time commitment: This project can be either a medium project (~175 hours)
@@ -41,35 +40,54 @@ Pedagogical Interactive Notebooks for Algorithms Implemented in NetworkX
   pedagogical interactive notebooks for the medium duration project and 4-5 notebooks
   for the long duration project.
 
-Implement the VF2++ Graph Isomorphism Algorithm
------------------------------------------------
-
-- Abstract: The `Graph Isomorphism Problem`_ is a famous difficult network problem at
-  the boundary between P and NP-Complete. The VF2 algorithm is included with NetworkX
-  in a recursive formulation. There is an improved version of this algorithm called
-  `VF2++`_ which we intend to implement. We have early attempts at a nonrecursive version
-  of the main algorithm that also address subgraph isomorphism and subgraph monomorphism.
-  This project involves fully implementing them and extending to directed and multigraph
-  settings.
+Visualization API with Matplotlib
+---------------------------------
 
-- Recommended Skills: Python, graph algorithms
+- Abstract: NetworkX has some basic drawing tools that use Matplotlib to render the
+  images. The API hasn't changed while Matplotlib has changed. Also we have added or
+  are trying to add new features especially with regard to plotting edges. We'd like
+  someone to read a lot about what we offer and also what Matplotlib offers, and
+  come up with a nice way for users to draw graphs flexibly and yet with good defaults.
+  There is little chace just a broad topic could be completed in one summer, but a
+  roadmap and substantial headway on that road is possible.
 
-- Expected Outcome: A new set of functions in NetworkX that implement the VF2++
-  algorithm for all problem and graph types in a nonrecursive manner.
+- Recommended Skills: Python, matplotlib experience.
 
-- Complexity: Moderate
+- Expected Outcome: A roadmap for a refined API for the matplotlib tools within NetworkX
+  as well as code in the form of PR(s) which implement (part of) that API with tests.
 
 - Interested Mentors: `@dschult <https://github.com/dschult/>`__,
-  `@MridulS <https://github.com/MridulS/>`__, `@boothby <https://github.com/boothby/>`__,
 
-.. _`Graph Isomorphism Problem`: https://en.wikipedia.org/wiki/Graph_isomorphism_problem
-.. _VF2++: https://doi.org/10.1016/j.dam.2018.02.018
+- Expected time commitment: This project will be a full time 10 week project (~350 hrs).
+
+Incorporate a Python library for ISMAGs isomorphism calculations
+----------------------------------------------------------------
+
+- Abstract: A team from Sandia Labs has converted the original java implementation of
+  the ISMAGS isomorphism routines to Python. They have invited us to incorporate that
+  code into NetworkX if we are interested. We'd like someone to learn the ISMAGS code
+  we currently provide, and the code from this new library and figure out what the
+  best combination is to include in NetworkX moving forward. That could be two separate
+  subpackages of tools, or more likely a combination of the two sets of code, or a
+  third incantation that combines good features from each.
+
+- Recommended Skills: Python, graph algorithms.
 
-- Expected time commitment: Long project (~350 hours)
+- Expected Outcome: A plan for how to best incorporate ISMAGS into NetworkX along
+  with code to do that incorporation.
+
+- Interested Mentors: `@dschult <https://github.com/dschult/>`__,
+
+- Expected time commitment: This project will be a full time 10 week project (~350 hrs).
 
 Completed Projects
 ==================
 
+- `VF2++ algorithm for graph isomorphism`_
+    - Program: Google Summer of Code 2022
+    - Contributor: `@kpetridis24 <https://github.com/kpetridis24/>`__
+    - Link to Proposal: `GSoC 2022: VF2++ Algorithm <https://github.com/networkx/archive/blob/main/proposals-gsoc/GSoC-2022-VF2plusplus-isomorphism.pdf>`_
+
 - `Louvain community detection algorithm`_ 
     - Program: Google Summer of Code 2021
     - Contributor: `@z3y50n <https://github.com/z3y50n/>`__
@@ -98,6 +116,7 @@ Completed Projects
     - Contributor: `@MridulS <https://github.com/MridulS/>`__
     - Link to Proposal: `GSoC 2015: NetworkX 2.0 API <https://github.com/networkx/archive/blob/main/proposals-gsoc/GSoC-2015-NetworkX-2.0-api.md>`__
 
+.. _`VF2++ algorithm for graph isomorphism`: https://github.com/networkx/networkx/pull/5788
 .. _`Louvain community detection algorithm`: https://github.com/networkx/networkx/pull/4929
 .. _`Asadpour algorithm for directed travelling salesman problem`: https://github.com/networkx/networkx/pull/4740
 .. _`Directed acyclic graphs and topological sort`: https://github.com/networkx/nx-guides/pull/44
diff --git a/doc/developer/roadmap.rst b/doc/developer/roadmap.rst
index f0f1b1a..93dec06 100644
--- a/doc/developer/roadmap.rst
+++ b/doc/developer/roadmap.rst
@@ -15,7 +15,7 @@ Installation
 ------------
 
 We aim to make NetworkX as easy to install as possible.
-Some of our dependencies (e.g., graphviz and gdal) can be tricky to install.
+Some of our dependencies (e.g., graphviz) can be tricky to install.
 Other of our dependencies are easy to install on the CPython platform, but
 may be more involved on other platforms such as PyPy.
 Addressing these installation issues may involve working with the external projects.
diff --git a/doc/developer/team.rst b/doc/developer/teams.inc
similarity index 70%
rename from doc/developer/team.rst
rename to doc/developer/teams.inc
index a80e19e..33bc0b1 100644
--- a/doc/developer/team.rst
+++ b/doc/developer/teams.inc
@@ -1,4 +1,6 @@
 
+.. _core-developers-team:
+
 Core Developers
 ---------------
 
@@ -23,23 +25,6 @@ NetworkX development is guided by the following core team:
    </div>
 
 
-.. raw:: html
-
-   <div class="team-member">
-     <a href="https://github.com/camillescott" class="team-member-name">
-        <div class="team-member-photo">
-           <img
-             src="https://avatars.githubusercontent.com/u/2896301?u=bd57c546510c131f4f7f41e3999fb8e6e33a2298&v=4&s=40"
-             loading="lazy"
-             alt="Avatar picture of @camillescott"
-           />
-        </div>
-        Camille Scott
-     </a>
-     <div class="team-member-handle">@camillescott</div>
-   </div>
-
-
 .. raw:: html
 
    <div class="team-member">
@@ -57,23 +42,6 @@ NetworkX development is guided by the following core team:
    </div>
 
 
-.. raw:: html
-
-   <div class="team-member">
-     <a href="https://github.com/ericmjl" class="team-member-name">
-        <div class="team-member-photo">
-           <img
-             src="https://avatars.githubusercontent.com/u/2631566?u=c5d73d769c251a862d7d4bbf1119297d8085c34c&v=4&s=40"
-             loading="lazy"
-             alt="Avatar picture of @ericmjl"
-           />
-        </div>
-        Eric Ma
-     </a>
-     <div class="team-member-handle">@ericmjl</div>
-   </div>
-
-
 .. raw:: html
 
    <div class="team-member">
@@ -91,23 +59,6 @@ NetworkX development is guided by the following core team:
    </div>
 
 
-.. raw:: html
-
-   <div class="team-member">
-     <a href="https://github.com/harshal-dupare" class="team-member-name">
-        <div class="team-member-photo">
-           <img
-             src="https://avatars.githubusercontent.com/u/52428908?u=cb974ff050563c3610f377b7dbbf4982df6a1b90&v=4&s=40"
-             loading="lazy"
-             alt="Avatar picture of @harshal-dupare"
-           />
-        </div>
-        Harshal Dupare
-     </a>
-     <div class="team-member-handle">@harshal-dupare</div>
-   </div>
-
-
 .. raw:: html
 
    <div class="team-member">
@@ -162,51 +113,51 @@ NetworkX development is guided by the following core team:
 .. raw:: html
 
    <div class="team-member">
-     <a href="https://github.com/rossbar" class="team-member-name">
+     <a href="https://github.com/paulitapb" class="team-member-name">
         <div class="team-member-photo">
            <img
-             src="https://avatars.githubusercontent.com/u/1268991?u=974707b96081a9705f3a239c0773320f353ee02f&v=4&s=40"
+             src="https://avatars.githubusercontent.com/u/44149844?u=9bba3eec362015b3e8ce08a18a58bce4a33361da&v=4&s=40"
              loading="lazy"
-             alt="Avatar picture of @rossbar"
+             alt="Avatar picture of @paulitapb"
            />
         </div>
-        Ross Barnowski
+        Paula Pérez Bianchi
      </a>
-     <div class="team-member-handle">@rossbar</div>
+     <div class="team-member-handle">@paulitapb</div>
    </div>
 
 
 .. raw:: html
 
    <div class="team-member">
-     <a href="https://github.com/stefanv" class="team-member-name">
+     <a href="https://github.com/rossbar" class="team-member-name">
         <div class="team-member-photo">
            <img
-             src="https://avatars.githubusercontent.com/u/45071?u=c779b5e06448fbc638bc987cdfe305c7f9a7175e&v=4&s=40"
+             src="https://avatars.githubusercontent.com/u/1268991?u=974707b96081a9705f3a239c0773320f353ee02f&v=4&s=40"
              loading="lazy"
-             alt="Avatar picture of @stefanv"
+             alt="Avatar picture of @rossbar"
            />
         </div>
-        Stefan van der Walt
+        Ross Barnowski
      </a>
-     <div class="team-member-handle">@stefanv</div>
+     <div class="team-member-handle">@rossbar</div>
    </div>
 
 
 .. raw:: html
 
    <div class="team-member">
-     <a href="https://github.com/vdshk" class="team-member-name">
+     <a href="https://github.com/stefanv" class="team-member-name">
         <div class="team-member-photo">
            <img
-             src="https://avatars.githubusercontent.com/u/43042296?u=01411ddb7d394274117007e8d29019e091a8e00a&v=4&s=40"
+             src="https://avatars.githubusercontent.com/u/45071?u=c779b5e06448fbc638bc987cdfe305c7f9a7175e&v=4&s=40"
              loading="lazy"
-             alt="Avatar picture of @vdshk"
+             alt="Avatar picture of @stefanv"
            />
         </div>
-        Vadim Abzalov
+        Stefan van der Walt
      </a>
-     <div class="team-member-handle">@vdshk</div>
+     <div class="team-member-handle">@stefanv</div>
    </div>
 
 
@@ -241,7 +192,7 @@ We thank these previously-active core developers for their contributions to Netw
      <a href="https://github.com/bjedwards" class="team-member-name">
         <div class="team-member-photo">
            <img
-             src="https://avatars.githubusercontent.com/u/726274?u=e493f38cb65425f6de7a9568ee3802a183deaa8e&v=4&s=40"
+             src="https://avatars.githubusercontent.com/u/726274?u=a74ad0658212200d837ffb1ce549b32f0e99f3c6&v=4&s=40"
              loading="lazy"
              alt="Avatar picture of @bjedwards"
            />
@@ -252,6 +203,23 @@ We thank these previously-active core developers for their contributions to Netw
    </div>
 
 
+.. raw:: html
+
+   <div class="team-member">
+     <a href="https://github.com/camillescott" class="team-member-name">
+        <div class="team-member-photo">
+           <img
+             src="https://avatars.githubusercontent.com/u/2896301?u=bd57c546510c131f4f7f41e3999fb8e6e33a2298&v=4&s=40"
+             loading="lazy"
+             alt="Avatar picture of @camillescott"
+           />
+        </div>
+        Camille Scott
+     </a>
+     <div class="team-member-handle">@camillescott</div>
+   </div>
+
+
 .. raw:: html
 
    <div class="team-member">
@@ -269,6 +237,40 @@ We thank these previously-active core developers for their contributions to Netw
    </div>
 
 
+.. raw:: html
+
+   <div class="team-member">
+     <a href="https://github.com/ericmjl" class="team-member-name">
+        <div class="team-member-photo">
+           <img
+             src="https://avatars.githubusercontent.com/u/2631566?u=c5d73d769c251a862d7d4bbf1119297d8085c34c&v=4&s=40"
+             loading="lazy"
+             alt="Avatar picture of @ericmjl"
+           />
+        </div>
+        Eric Ma
+     </a>
+     <div class="team-member-handle">@ericmjl</div>
+   </div>
+
+
+.. raw:: html
+
+   <div class="team-member">
+     <a href="https://github.com/harshal-dupare" class="team-member-name">
+        <div class="team-member-photo">
+           <img
+             src="https://avatars.githubusercontent.com/u/52428908?u=4538fa6338afe2b9460f2f5a4238cadb38f8ef2e&v=4&s=40"
+             loading="lazy"
+             alt="Avatar picture of @harshal-dupare"
+           />
+        </div>
+        Harshal Dupare
+     </a>
+     <div class="team-member-handle">@harshal-dupare</div>
+   </div>
+
+
 .. raw:: html
 
    <div class="team-member">
@@ -320,6 +322,23 @@ We thank these previously-active core developers for their contributions to Netw
    </div>
 
 
+.. raw:: html
+
+   <div class="team-member">
+     <a href="https://github.com/vadyushkins" class="team-member-name">
+        <div class="team-member-photo">
+           <img
+             src="https://avatars.githubusercontent.com/u/43042296?u=01411ddb7d394274117007e8d29019e091a8e00a&v=4&s=40"
+             loading="lazy"
+             alt="Avatar picture of @vadyushkins"
+           />
+        </div>
+        Vadim Abzalov
+     </a>
+     <div class="team-member-handle">@vadyushkins</div>
+   </div>
+
+
 .. raw:: html
 
    <div class="team-member">
@@ -336,3 +355,113 @@ We thank these previously-active core developers for their contributions to Netw
      <div class="team-member-handle">@ysitu</div>
    </div>
 
+
+.. _steering-council-team:
+
+Steering Council
+----------------
+
+
+
+
+.. raw:: html
+
+   <div class="team-member">
+     <a href="https://github.com/dschult" class="team-member-name">
+        <div class="team-member-photo">
+           <img
+             src="https://avatars.githubusercontent.com/u/915037?u=6a27f396c666c5c2172a1cfc7b0d4bbcd0069eed&v=4&s=40"
+             loading="lazy"
+             alt="Avatar picture of @dschult"
+           />
+        </div>
+        Dan Schult
+     </a>
+     <div class="team-member-handle">@dschult</div>
+   </div>
+
+
+.. raw:: html
+
+   <div class="team-member">
+     <a href="https://github.com/hagberg" class="team-member-name">
+        <div class="team-member-photo">
+           <img
+             src="https://avatars.githubusercontent.com/u/187875?v=4&s=40"
+             loading="lazy"
+             alt="Avatar picture of @hagberg"
+           />
+        </div>
+        Aric Hagberg
+     </a>
+     <div class="team-member-handle">@hagberg</div>
+   </div>
+
+
+.. raw:: html
+
+   <div class="team-member">
+     <a href="https://github.com/jarrodmillman" class="team-member-name">
+        <div class="team-member-photo">
+           <img
+             src="https://avatars.githubusercontent.com/u/123428?v=4&s=40"
+             loading="lazy"
+             alt="Avatar picture of @jarrodmillman"
+           />
+        </div>
+        Jarrod Millman
+     </a>
+     <div class="team-member-handle">@jarrodmillman</div>
+   </div>
+
+
+.. raw:: html
+
+   <div class="team-member">
+     <a href="https://github.com/MridulS" class="team-member-name">
+        <div class="team-member-photo">
+           <img
+             src="https://avatars.githubusercontent.com/u/5363860?u=ce5c6e9388d2fd153ebf8b0bb521c928b0813608&v=4&s=40"
+             loading="lazy"
+             alt="Avatar picture of @MridulS"
+           />
+        </div>
+        Mridul Seth
+     </a>
+     <div class="team-member-handle">@MridulS</div>
+   </div>
+
+
+.. raw:: html
+
+   <div class="team-member">
+     <a href="https://github.com/rossbar" class="team-member-name">
+        <div class="team-member-photo">
+           <img
+             src="https://avatars.githubusercontent.com/u/1268991?u=974707b96081a9705f3a239c0773320f353ee02f&v=4&s=40"
+             loading="lazy"
+             alt="Avatar picture of @rossbar"
+           />
+        </div>
+        Ross Barnowski
+     </a>
+     <div class="team-member-handle">@rossbar</div>
+   </div>
+
+
+.. raw:: html
+
+   <div class="team-member">
+     <a href="https://github.com/stefanv" class="team-member-name">
+        <div class="team-member-photo">
+           <img
+             src="https://avatars.githubusercontent.com/u/45071?u=c779b5e06448fbc638bc987cdfe305c7f9a7175e&v=4&s=40"
+             loading="lazy"
+             alt="Avatar picture of @stefanv"
+           />
+        </div>
+        Stefan van der Walt
+     </a>
+     <div class="team-member-handle">@stefanv</div>
+   </div>
+
diff --git a/doc/reference/algorithms/assortativity.rst b/doc/reference/algorithms/assortativity.rst
index 02f3b8d..8ec6167 100644
--- a/doc/reference/algorithms/assortativity.rst
+++ b/doc/reference/algorithms/assortativity.rst
@@ -30,7 +30,6 @@ Average degree connectivity
    :toctree: generated/
 
    average_degree_connectivity
-   k_nearest_neighbors
 
 
 Mixing
@@ -40,7 +39,6 @@ Mixing
 
    attribute_mixing_matrix
    degree_mixing_matrix
-   numeric_mixing_matrix
    attribute_mixing_dict
    degree_mixing_dict
    mixing_dict
diff --git a/doc/reference/algorithms/centrality.rst b/doc/reference/algorithms/centrality.rst
index 84472b9..b7d4001 100644
--- a/doc/reference/algorithms/centrality.rst
+++ b/doc/reference/algorithms/centrality.rst
@@ -45,7 +45,6 @@ Current Flow Closeness
    :toctree: generated/
 
    betweenness_centrality
-   betweenness_centrality_source
    betweenness_centrality_subset
    edge_betweenness_centrality
    edge_betweenness_centrality_subset
@@ -149,3 +148,10 @@ VoteRank
    :toctree: generated/
 
    voterank
+
+Laplacian
+---------
+.. autosummary::
+   :toctree: generated/
+
+   laplacian_centrality
diff --git a/doc/reference/algorithms/community.rst b/doc/reference/algorithms/community.rst
index ff60d36..55f7ad6 100644
--- a/doc/reference/algorithms/community.rst
+++ b/doc/reference/algorithms/community.rst
@@ -71,10 +71,8 @@ Measuring partitions
 .. autosummary::
    :toctree: generated/
 
-   coverage
    modularity
    partition_quality
-   performance
 
 Partitions via centrality measures
 ----------------------------------
diff --git a/doc/reference/algorithms/cycles.rst b/doc/reference/algorithms/cycles.rst
index 94cf3f8..fe97e4d 100644
--- a/doc/reference/algorithms/cycles.rst
+++ b/doc/reference/algorithms/cycles.rst
@@ -11,3 +11,4 @@ Cycles
    recursive_simple_cycles
    find_cycle
    minimum_cycle_basis
+   chordless_cycles
diff --git a/doc/reference/algorithms/distance_measures.rst b/doc/reference/algorithms/distance_measures.rst
index 1a6a3c7..f0d0918 100644
--- a/doc/reference/algorithms/distance_measures.rst
+++ b/doc/reference/algorithms/distance_measures.rst
@@ -10,7 +10,6 @@ Distance Measures
    center
    diameter
    eccentricity
-   extrema_bounding
    periphery
    radius
    resistance_distance
diff --git a/doc/reference/algorithms/isomorphism.ismags.rst b/doc/reference/algorithms/isomorphism.ismags.rst
index d9d19ca..67cc9b9 100644
--- a/doc/reference/algorithms/isomorphism.ismags.rst
+++ b/doc/reference/algorithms/isomorphism.ismags.rst
@@ -1,9 +1,5 @@
 .. _ismags:
 
-****************
-ISMAGS Algorithm
-****************
-
 .. automodule:: networkx.algorithms.isomorphism.ismags
 
 ISMAGS object
diff --git a/doc/reference/algorithms/isomorphism.rst b/doc/reference/algorithms/isomorphism.rst
index 5a29f1e..1d64bd0 100644
--- a/doc/reference/algorithms/isomorphism.rst
+++ b/doc/reference/algorithms/isomorphism.rst
@@ -4,9 +4,6 @@
 Isomorphism
 ***********
 
-.. toctree::
-   :maxdepth: 2
-
 .. automodule:: networkx.algorithms.isomorphism
 .. autosummary::
    :toctree: generated/
@@ -16,6 +13,15 @@ Isomorphism
    fast_could_be_isomorphic
    faster_could_be_isomorphic
 
+VF2++
+-----
+.. automodule:: networkx.algorithms.isomorphism.vf2pp
+.. autosummary::
+   :toctree: generated/
+
+   vf2pp_is_isomorphic
+   vf2pp_all_isomorphisms
+   vf2pp_isomorphism
 
 Tree Isomorphism
 -----------------
diff --git a/doc/reference/algorithms/link_analysis.rst b/doc/reference/algorithms/link_analysis.rst
index d85ab33..c691f89 100644
--- a/doc/reference/algorithms/link_analysis.rst
+++ b/doc/reference/algorithms/link_analysis.rst
@@ -10,8 +10,6 @@ PageRank
    :toctree: generated/
 
    pagerank
-   pagerank_numpy
-   pagerank_scipy
    google_matrix
 
 Hits
@@ -22,8 +20,3 @@ Hits
    :toctree: generated/
 
    hits
-   hits_numpy
-   hits_scipy
-   hub_matrix
-   authority_matrix
-
diff --git a/doc/reference/algorithms/node_classification.rst b/doc/reference/algorithms/node_classification.rst
index 2229818..6b79fe6 100644
--- a/doc/reference/algorithms/node_classification.rst
+++ b/doc/reference/algorithms/node_classification.rst
@@ -1,21 +1,9 @@
 Node Classification
 ===================
-.. automodule:: networkx.algorithms.node_classification
-.. currentmodule:: networkx
 
-Harmonic Function
------------------
-.. automodule:: networkx.algorithms.node_classification.hmn
+.. automodule:: networkx.algorithms.node_classification
 .. autosummary::
    :toctree: generated/
 
    harmonic_function
-
-
-Local and Global Consistency
-----------------------------
-.. automodule:: networkx.algorithms.node_classification.lgc
-.. autosummary::
-   :toctree: generated/
-
    local_and_global_consistency
diff --git a/doc/reference/algorithms/operators.rst b/doc/reference/algorithms/operators.rst
index 7babf2c..13632e2 100644
--- a/doc/reference/algorithms/operators.rst
+++ b/doc/reference/algorithms/operators.rst
@@ -43,3 +43,4 @@ Operators
    strong_product
    tensor_product
    power
+   corona_product
diff --git a/doc/reference/algorithms/similarity.rst b/doc/reference/algorithms/similarity.rst
index 4721101..17a4d68 100644
--- a/doc/reference/algorithms/similarity.rst
+++ b/doc/reference/algorithms/similarity.rst
@@ -11,6 +11,5 @@ Similarity Measures
    optimize_graph_edit_distance
    optimize_edit_paths
    simrank_similarity
-   simrank_similarity_numpy
    panther_similarity
    generate_random_paths
diff --git a/doc/reference/algorithms/swap.rst b/doc/reference/algorithms/swap.rst
index ea80cb8..4375b33 100644
--- a/doc/reference/algorithms/swap.rst
+++ b/doc/reference/algorithms/swap.rst
@@ -7,5 +7,6 @@ Swap
    :toctree: generated/
 
    double_edge_swap
+   directed_edge_swap
    connected_double_edge_swap
 
diff --git a/doc/reference/classes/index.rst b/doc/reference/classes/index.rst
index a0fc388..a4acd53 100644
--- a/doc/reference/classes/index.rst
+++ b/doc/reference/classes/index.rst
@@ -39,13 +39,9 @@ Basic graph types
    multidigraph
 
 .. note:: NetworkX uses `dicts` to store the nodes and neighbors in a graph.
-   So the reporting of nodes and edges for the base graph classes will not
-   necessarily be consistent across versions and platforms.  If you need the
-   order of nodes and edges to be consistent (e.g., when writing automated
-   tests), please see :class:`~networkx.OrderedGraph`,
-   :class:`~networkx.OrderedDiGraph`, :class:`~networkx.OrderedMultiGraph`,
-   or :class:`~networkx.OrderedMultiDiGraph`, which behave like the base
-   graph classes but give a consistent order for reporting of nodes and edges.
+   So the reporting of nodes and edges for the base graph classes may not
+   necessarily be consistent across versions and platforms; however, the reporting
+   for CPython is consistent across platforms and versions after 3.6.
 
 Graph Views
 ===========
@@ -99,3 +95,16 @@ Filters
    show_diedges
    show_multidiedges
    show_multiedges
+
+Backends
+========
+
+.. note:: This is an experimental feature to dispatch your computations to an
+   alternate backend like GraphBLAS, instead of using pure Python dictionaries
+   for computation. Things will change and break in the future!
+
+.. automodule:: networkx.classes.backends
+.. autosummary::
+   :toctree: generated/
+
+   _dispatch
diff --git a/doc/reference/classes/multidigraph.rst b/doc/reference/classes/multidigraph.rst
index a1d56e5..f8417a5 100644
--- a/doc/reference/classes/multidigraph.rst
+++ b/doc/reference/classes/multidigraph.rst
@@ -56,7 +56,7 @@ Reporting nodes edges and neighbors
    MultiDiGraph.successors
    MultiDiGraph.succ
    MultiDiGraph.predecessors
-   MultiDiGraph.succ
+   MultiDiGraph.pred
    MultiDiGraph.adjacency
    MultiDiGraph.nbunch_iter
 
diff --git a/doc/reference/classes/ordered.rst b/doc/reference/classes/ordered.rst
deleted file mode 100644
index c9bd45f..0000000
--- a/doc/reference/classes/ordered.rst
+++ /dev/null
@@ -1,13 +0,0 @@
-.. _ordered:
-
-============================================
-Ordered Graphs---Consistently ordered graphs
-============================================
-
-.. automodule:: networkx.classes.ordered
-
-.. currentmodule:: networkx
-.. autoclass:: OrderedGraph
-.. autoclass:: OrderedDiGraph
-.. autoclass:: OrderedMultiGraph
-.. autoclass:: OrderedMultiDiGraph
diff --git a/doc/reference/convert.rst b/doc/reference/convert.rst
index 4566048..1af838c 100644
--- a/doc/reference/convert.rst
+++ b/doc/reference/convert.rst
@@ -37,10 +37,7 @@ Numpy
 .. autosummary::
    :toctree: generated/
 
-   to_numpy_matrix
    to_numpy_array
-   to_numpy_recarray
-   from_numpy_matrix
    from_numpy_array
 
 Scipy
@@ -49,8 +46,6 @@ Scipy
    :toctree: generated/
 
    to_scipy_sparse_array
-   to_scipy_sparse_matrix
-   from_scipy_sparse_matrix
    from_scipy_sparse_array
 
 Pandas
diff --git a/doc/reference/drawing.rst b/doc/reference/drawing.rst
index efd3436..daf0949 100644
--- a/doc/reference/drawing.rst
+++ b/doc/reference/drawing.rst
@@ -95,4 +95,14 @@ Graph Layout
    spectral_layout
    spiral_layout
    multipartite_layout
-   
+
+
+LaTeX Code
+==========
+.. automodule:: networkx.drawing.nx_latex
+.. autosummary::
+   :toctree: generated/
+
+   to_latex_raw
+   to_latex
+   write_latex
diff --git a/doc/reference/functions.rst b/doc/reference/functions.rst
index 04ad1e5..a859bc8 100644
--- a/doc/reference/functions.rst
+++ b/doc/reference/functions.rst
@@ -12,7 +12,6 @@ Graph
    degree
    degree_histogram
    density
-   info
    create_empty_copy
    is_directed
    to_directed
diff --git a/doc/reference/generators.rst b/doc/reference/generators.rst
index 0985a0b..d2f12b2 100644
--- a/doc/reference/generators.rst
+++ b/doc/reference/generators.rst
@@ -73,7 +73,6 @@ Small
 .. autosummary::
    :toctree: generated/
 
-   make_small_graph
    LCF_graph
    bull_graph
    chvatal_graph
diff --git a/doc/reference/glossary.rst b/doc/reference/glossary.rst
index cd7ccd5..7dd86c8 100644
--- a/doc/reference/glossary.rst
+++ b/doc/reference/glossary.rst
@@ -15,7 +15,7 @@ Glossary
       with an edge attribute dictionary `(u, v, dict)`.
 
    ebunch
-      An iteratable container of edge tuples like a list, iterator,
+      An iterable container of edge tuples like a list, iterator,
       or file.
 
    edge attribute
@@ -26,8 +26,40 @@ Glossary
 
    nbunch
       An nbunch is a single node, container of nodes or `None` (representing
-      all nodes). It can be a list, set, graph, etc.. To filter an nbunch
-      so that only nodes actually in `G` appear, use `G.nbunch_iter(nbunch)`.
+      all nodes). It can be a list, set, graph, etc. To filter an nbunch
+      so that only nodes actually in ``G`` appear, use ``G.nbunch_iter(nbunch)``.
+
+      If the nbunch is a container or iterable that is not itself a node
+      in the graph, then it will be treated as an iterable of nodes, for
+      instance, when nbunch is a string or a tuple::
+
+         >>> import networkx as nx
+         >>> G = nx.DiGraph()
+         >>> G.add_edges_from([("b", "c"), ("a", "ab"), ("ab", "c")])
+         >>> G.edges("ab")
+         OutEdgeDataView([('ab', 'c')])
+      
+      Since "ab" is a node in G, it is treated as a single node::
+
+         >>> G.edges("bc")
+         OutEdgeDataView([('b', 'c')])
+
+      Since "bc" is not a node in G, it is treated as an iterator::
+
+         >>> G.edges(["bc"])
+         OutEdgeDataView([])
+
+      If "bc" is wrapped in a list, the list is the iterable and
+      "bc" is treated as a single node. That is, if the
+      nbunch is an iterable of iterables, the inner iterables will
+      always be treated as nodes::
+
+         >>> G.edges("de")
+         OutEdgeDataView([])
+
+      When nbunch is an iterator that is not itself a node and none of 
+      its elements are nodes, then the edge view suite of methods return
+      an empty edge view.
 
    node
       A node can be any hashable Python object except None.
diff --git a/doc/reference/readwrite/gpickle.rst b/doc/reference/readwrite/gpickle.rst
deleted file mode 100644
index 55255c1..0000000
--- a/doc/reference/readwrite/gpickle.rst
+++ /dev/null
@@ -1,8 +0,0 @@
-Pickle
-======
-.. automodule:: networkx.readwrite.gpickle
-.. autosummary::
-   :toctree: generated/
-
-   read_gpickle
-   write_gpickle
diff --git a/doc/reference/readwrite/index.rst b/doc/reference/readwrite/index.rst
index 76a3f21..e0c7903 100644
--- a/doc/reference/readwrite/index.rst
+++ b/doc/reference/readwrite/index.rst
@@ -12,11 +12,10 @@ Reading and writing graphs
    edgelist
    gexf
    gml
-   gpickle
    graphml
    json_graph
    leda
    sparsegraph6
    pajek
-   nx_shp
    matrix_market
+   text
diff --git a/doc/reference/readwrite/json_graph.rst b/doc/reference/readwrite/json_graph.rst
index 6ae934f..5336877 100644
--- a/doc/reference/readwrite/json_graph.rst
+++ b/doc/reference/readwrite/json_graph.rst
@@ -12,5 +12,4 @@ JSON
    cytoscape_graph
    tree_data
    tree_graph
-   jit_data
-   jit_graph
+
diff --git a/doc/reference/readwrite/matrix_market.rst b/doc/reference/readwrite/matrix_market.rst
index 3717383..a722c1f 100644
--- a/doc/reference/readwrite/matrix_market.rst
+++ b/doc/reference/readwrite/matrix_market.rst
@@ -95,6 +95,6 @@ sparse matrices::
 
     >>> # Read from file
     >>> fh.seek(0)
-    >>> H = nx.from_scipy_sparse_matrix(sp.io.mmread(fh))
+    >>> H = nx.from_scipy_sparse_array(sp.io.mmread(fh))
     >>> H.edges() == G.edges()
     True
diff --git a/doc/reference/readwrite/nx_shp.rst b/doc/reference/readwrite/nx_shp.rst
deleted file mode 100644
index 4750762..0000000
--- a/doc/reference/readwrite/nx_shp.rst
+++ /dev/null
@@ -1,10 +0,0 @@
-GIS Shapefile
-=============
-.. automodule:: networkx.readwrite.nx_shp
-.. autosummary::
-   :toctree: generated/
-
-   read_shp
-   write_shp
-
-
diff --git a/doc/reference/readwrite/text.rst b/doc/reference/readwrite/text.rst
new file mode 100644
index 0000000..660b306
--- /dev/null
+++ b/doc/reference/readwrite/text.rst
@@ -0,0 +1,9 @@
+
+Network Text
+============
+.. automodule:: networkx.readwrite.text
+.. autosummary::
+   :toctree: generated/
+
+   generate_network_text
+   write_network_text
diff --git a/doc/reference/utils.rst b/doc/reference/utils.rst
index 7a223fb..e8c1ce4 100644
--- a/doc/reference/utils.rst
+++ b/doc/reference/utils.rst
@@ -12,16 +12,13 @@ Helper Functions
    :toctree: generated/
 
    arbitrary_element
-   is_string_like
    flatten
-   iterable
    make_list_of_ints
-   make_str
-   generate_unique_node
-   default_opener
+   dict_to_numpy_array
    pairwise
    groups
    create_random_state
+   create_py_random_state
    nodes_equal
    edges_equal
    graphs_equal
@@ -68,3 +65,11 @@ Cuthill-Mckee Ordering
 
    cuthill_mckee_ordering
    reverse_cuthill_mckee_ordering
+
+Mapped Queue
+------------
+.. automodule:: networkx.utils.mapped_queue
+.. autosummary::
+   :toctree: generated/
+
+   MappedQueue
diff --git a/doc/release/contribs.py b/doc/release/contribs.py
index 55ccacc..fd1b6a6 100644
--- a/doc/release/contribs.py
+++ b/doc/release/contribs.py
@@ -5,11 +5,17 @@ import sys
 import string
 import shlex
 
-if len(sys.argv) != 2:
-    print("Usage: ./contributors.py tag-of-previous-release")
+if len(sys.argv) < 2 or len(sys.argv) > 3:
+    print(
+        "Usage: ./contributors.py tag-of-previous-release tag-of-newer-release (optional)"
+    )
     sys.exit(-1)
 
 tag = sys.argv[1]
+if len(sys.argv) < 3:
+    compare_tag = None
+else:
+    compare_tag = sys.argv[2]
 
 
 def call(cmd):
@@ -17,24 +23,47 @@ def call(cmd):
 
 
 tag_date = call(f"git log -n1 --format='%ci' {tag}")[0]
+if compare_tag:
+    compare_tag_date = call(f"git log -n1 --format='%ci' {compare_tag}")[0]
+
 print(f"Release {tag} was on {tag_date}\n")
 
-merges = call(f"git log --since='{tag_date}' --merges --format='>>>%B' --reverse")
+if compare_tag:
+    merges = call(
+        f"git log --since='{tag_date}' --until='{compare_tag_date}' --merges --format='>>>%B' --reverse"
+    )
+else:
+    merges = call(f"git log --since='{tag_date}' --merges --format='>>>%B' --reverse")
 merges = [m for m in merges if m.strip()]
 merges = "\n".join(merges).split(">>>")
 merges = [m.split("\n")[:2] for m in merges]
 merges = [m for m in merges if len(m) == 2 and m[1].strip()]
 
-num_commits = call(f"git rev-list {tag}..HEAD --count")[0]
+if compare_tag:
+    num_commits = call(f"git rev-list {tag}..{compare_tag} --count")[0]
+else:
+    num_commits = call(f"git rev-list {tag}..HEAD --count")[0]
+
 print(f"A total of {num_commits} changes have been committed.\n")
 
 # Use filter to remove empty strings
-commits = filter(None, call(f"git log --since='{tag_date}' --pretty=%s --reverse"))
+if compare_tag:
+    commits = filter(
+        None,
+        call(
+            f"git log --since='{tag_date}' --until='{compare_tag_date}' --pretty=%s --reverse"
+        ),
+    )
+else:
+    commits = filter(
+        None,
+        call(f"git log --since='{tag_date}' --pretty=%s --reverse"),
+    )
 for c in commits:
     print("- " + c)
 
 print(f"\nIt contained the following {len(merges)} merges:\n")
-for (merge, message) in merges:
+for merge, message in merges:
     if merge.startswith("Merge pull request #"):
         PR = f" ({merge.split()[3]})"
     else:
@@ -44,12 +73,17 @@ for (merge, message) in merges:
 
 print("\nMade by the following committers [alphabetical by last name]:\n")
 
-authors = call(f"git log --since='{tag_date}' --format=%aN")
+if compare_tag:
+    authors = call(
+        f"git log --since='{tag_date}' --until='{compare_tag_date}' --format=%aN"
+    )
+else:
+    authors = call(f"git log --since='{tag_date}' --format=%aN")
 authors = [a.strip() for a in authors if a.strip()]
 
 
 def key(author):
-    author = [v for v in author.split() if v[0] in string.ascii_letters]
+    author = list(author.split())
     if len(author) > 0:
         return author[-1]
 
diff --git a/doc/release/index.rst b/doc/release/index.rst
index 7ab33c9..d385a3f 100644
--- a/doc/release/index.rst
+++ b/doc/release/index.rst
@@ -15,6 +15,8 @@ period.
    :maxdepth: 2
 
    release_dev
+   release_3.1
+   release_3.0
    release_2.8.8
    release_2.8.7
    release_2.8.6
diff --git a/doc/release/migration_guide_from_2.x_to_3.0.rst b/doc/release/migration_guide_from_2.x_to_3.0.rst
index a7dc8a4..8035f4d 100644
--- a/doc/release/migration_guide_from_2.x_to_3.0.rst
+++ b/doc/release/migration_guide_from_2.x_to_3.0.rst
@@ -1,8 +1,8 @@
 :orphan:
 
-*****************************
-Preparing for the 3.0 release
-*****************************
+*******************************
+Migration guide from 2.X to 3.0
+*******************************
 
 .. note::
    Much of the work leading to the NetworkX 3.0 release will be included
@@ -11,7 +11,7 @@ Preparing for the 3.0 release
    ongoing work and will help you understand what changes you can make now
    to minimize the disruption caused by the move to 3.0.
 
-This is a guide for people moving from NetworkX 2.X to NetworkX 3.0
+This is a guide for people moving from NetworkX 2.X to NetworkX 3.0.
 
 Any issues with these can be discussed on the `mailing list
 <https://groups.google.com/forum/#!forum/networkx-discuss>`_.
@@ -34,9 +34,6 @@ structures (``Graph``, ``DiGraph``, etc.) and common algorithms, but some
 functionality, e.g. functions found in the ``networkx.linalg`` package, are
 only available if these additional libraries are installed.
 
-.. **TODO**: Generate a table showing dependencies of individual nx objects?
-.. Probably overkill...
-
 Improved integration with scientific Python
 -------------------------------------------
 
@@ -179,15 +176,7 @@ improving supported for array representations of multi-attribute adjacency::
 Deprecated code
 ---------------
 
-The 2.6 release deprecates over 30 functions.
-See :ref:`networkx_2.6`.
-
-.. **TODO**: A table summarizing one deprecation per row w/ 3 columns: 1. the
-.. deprecated function, 2. the old usage, 3. the replacement usage.
-
----
-
-The functions `read_gpickle` and `write_gpickle` will be removed in 3.0.
+The functions `read_gpickle` and `write_gpickle` were removed in 3.0.
 You can read and write NetworkX graphs as Python pickles.
 
 >>> import pickle
@@ -199,7 +188,7 @@ You can read and write NetworkX graphs as Python pickles.
 ...     G = pickle.load(f)
 ... 
 
-The functions `read_yaml` and `write_yaml` will be removed in 3.0.
+The functions `read_yaml` and `write_yaml` were removed in 3.0.
 You can read and write NetworkX graphs in YAML format
 using pyyaml.
 
diff --git a/doc/release/release_2.8.8.rst b/doc/release/release_2.8.8.rst
index 750230f..b6e88e7 100644
--- a/doc/release/release_2.8.8.rst
+++ b/doc/release/release_2.8.8.rst
@@ -1,4 +1,4 @@
-NetworkX 2.8.7
+NetworkX 2.8.8
 ==============
 
 Release date: 1 November 2022
diff --git a/doc/release/release_3.0.rst b/doc/release/release_3.0.rst
new file mode 100644
index 0000000..4694412
--- /dev/null
+++ b/doc/release/release_3.0.rst
@@ -0,0 +1,329 @@
+NetworkX 3.0
+============
+
+Release date: 7 January 2023
+
+Supports Python 3.8, 3.9, 3.10, and 3.11.
+
+NetworkX is a Python package for the creation, manipulation, and study of the
+structure, dynamics, and functions of complex networks.
+
+For more information, please visit our `website <https://networkx.org/>`_
+and our :ref:`gallery of examples <examples_gallery>`.
+Please send comments and questions to the `networkx-discuss mailing list
+<http://groups.google.com/group/networkx-discuss>`_.
+
+Highlights
+----------
+
+This release is the result of 8 months of work with over 180 changes by
+41 contributors. We also have a `guide for people moving from NetworkX 2.X
+to NetworkX 3.0 <https://networkx.org/documentation/latest/release/migration_guide_from_2.x_to_3.0.html>`_. Highlights include:
+
+- Better syncing between G._succ and G._adj for directed G.
+  And slightly better speed from all the core adjacency data structures.
+  G.adj is now a cached_property while still having the cache reset when
+  G._adj is set to a new dict (which doesn't happen very often).
+  Note: We have always assumed that G._succ and G._adj point to the same
+  object. But we did not enforce it well. If you have somehow worked
+  around our attempts and are relying on these private attributes being
+  allowed to be different from each other due to loopholes in our previous
+  code, you will have to look for other loopholes in our new code
+  (or subclass DiGraph to explicitly allow this).
+- If your code sets G._succ or G._adj to new dictionary-like objects, you no longer
+  have to set them both. Setting either will ensure the other is set as well.
+  And the cached_properties G.adj and G.succ will be rest accordingly too.
+- If you use the presence of the attribute `_adj` as a criteria for the object
+  being a Graph instance, that code may need updating. The graph classes
+  themselves now have an attribute `_adj`. So, it is possible that whatever you
+  are checking might be a class rather than an instance. We suggest you check
+  for attribute `_adj` to verify it is like a NetworkX graph object or type and
+  then `type(obj) is type` to check if it is a class.
+- We have added an `experimental plugin feature <https://github.com/networkx/networkx/pull/6000>`_,
+  which let users choose alternate backends like GraphBLAS, CuGraph for computation. This is an
+  opt-in feature and may change in future releases.
+- Improved integration with the general `Scientific Python ecosystem <https://networkx.org/documentation/latest/release/migration_guide_from_2.x_to_3.0.html#improved-integration-with-scientific-python>`_.
+- New drawing feature (module and tests) from NetworkX graphs to the TikZ library of TeX/LaTeX.
+  The basic interface is ``nx.to_latex(G, pos, **options)`` to construct a string of latex code or
+  ``nx.write_latex(G, filename, as_document=True, **options)`` to write the string to a file.
+- Added an improved subgraph isomorphism algorithm called VF2++.
+
+Improvements
+------------
+- [`#5663 <https://github.com/networkx/networkx/pull/5663>`_]
+  Implements edge swapping for directed graphs.
+- [`#5883 <https://github.com/networkx/networkx/pull/5883>`_]
+  Replace the implementation of ``lowest_common_ancestor`` and
+  ``all_pairs_lowest_common_ancestor`` with a "naive" algorithm to fix
+  several bugs and improve performance.
+- [`#5912 <https://github.com/networkx/networkx/pull/5912>`_]
+  The ``mapping`` argument of the ``relabel_nodes`` function can be either a
+  mapping or a function that creates a mapping. ``relabel_nodes`` first checks
+  whether the ``mapping`` is callable - if so, then it is used as a function.
+  This fixes a bug related for ``mapping=str`` and may change the behavior for
+  other ``mapping`` arguments that implement both ``__getitem__`` and
+  ``__call__``.
+- [`#5898 <https://github.com/networkx/networkx/pull/5898>`_]
+  Implements computing and checking for minimal d-separators between two nodes.
+  Also adds functionality to DAGs for computing v-structures.
+- [`#5943 <https://github.com/networkx/networkx/pull/5943>`_]
+  ``is_path`` used to raise a `KeyError` when the ``path`` argument contained
+  a node that was not in the Graph. The behavior has been updated so that
+  ``is_path`` returns `False` in this case rather than raising the exception.
+- [`#6003 <https://github.com/networkx/networkx/pull/6003>`_]
+  ``avg_shortest_path_length`` now raises an exception if the provided
+  graph is directed but not strongly connected. The previous test (weak
+  connecting) was wrong; in that case, the returned value was nonsensical.
+
+API Changes
+-----------
+
+- [`#5813 <https://github.com/networkx/networkx/pull/5813>`_]
+  OrderedGraph and other Ordered classes are replaced by Graph because
+  Python dicts (and thus networkx graphs) now maintain order.
+- [`#5899 <https://github.com/networkx/networkx/pull/5899>`_]
+  The `attrs` keyword argument will be replaced with keyword only arguments
+  `source`, `target`, `name`, `key` and `link` for `json_graph/node_link` functions.
+
+Deprecations
+------------
+
+- [`#5723 <https://github.com/networkx/networkx/issues/5723>`_]
+  ``nx.nx_pydot.*`` will be deprecated in the future if pydot isn't being
+  actively maintained. Users are recommended to use pygraphviz instead. 
+- [`#5899 <https://github.com/networkx/networkx/pull/5899>`_]
+  The `attrs` keyword argument will be replaced with keyword only arguments
+  `source`, `target`, `name`, `key` and `link` for `json_graph/node_link` functions.
+
+Merged PRs
+----------
+
+- Bump release version
+- Add characteristic polynomial example to polynomials docs (#5730)
+- Remove deprecated function is_string_like (#5738)
+- Remove deprecated function make_str (#5739)
+- Remove unused 'name' parameter from `union` (#5741)
+- Remove deprecated function is_iterator (#5740)
+- Remove deprecated `euclidean` from geometric.py (#5744)
+- Remove deprecated function utils.consume (#5745)
+- Rm `to_numpy_recarray` (#5737)
+- Remove deprecated function utils.empty_generator (#5748)
+- Rm jit.py (#5751)
+- Remove deprecated context managers (#5752)
+- Remove deprecated function utils.to_tuple (#5755)
+- Remove deprecated display_pygraphviz (#5754)
+- Remove to_numpy_matrix & from_numpy_matrix (#5746)
+- Remove deprecated decorator preserve_random_state (#5768)
+- Remove deprecated function is_list_of_ints (#5743)
+- Remove decorator random_state (#5770)
+- remove `adj_matrix` from `linalg/graphmatrix.py` (#5753)
+- Remove betweenness_centrality_source (#5786)
+- Remove deprecated simrank_similarity_numpy (#5783)
+- Remove networkx.testing subpackage (#5782)
+- Change PyDot PendingDeprecation to Deprecation (#5781)
+- Remove deprecated numeric_mixing_matrix (#5777)
+- Remove deprecated functions make_small_graph and make_small_undirected_graph (#5761)
+- Remove _naive_greedy_modularity_communities (#5760)
+- Make chordal_graph_cliques a generator (#5758)
+- update cytoscape functions to drop old signature (#5784)
+- Remove deprecated functions dict_to_numpy_array2 and dict_to_numpy_array1 (#5756)
+- Remove deprecated function utils.default_opener (#5747)
+- Remove deprecated function iterable (#5742)
+- remove old attr keyword from json_graph/tree (#5785)
+- Remove generate_unique_node (#5780)
+- Replace node_classification subpackage with a module (#5774)
+- Remove gpickle (#5773)
+- Remove deprecated function extrema_bounding (#5757)
+- Remove coverage and performance from quality (#5775)
+- Update return type of google_matrix to numpy.ndarray (#5762)
+- Remove deprecated k-nearest-neighbors (#5769)
+- Remove gdal dependency (#5766)
+- Update return type of attrmatrix (#5764)
+- Remove unused deprecated argument from to_pandas_edgelist (#5778)
+- Remove deprecated function edge_betweeness (#5765)
+- Remove pyyaml dependency (#5763)
+- Remove copy methods for Filter* coreviews (#5776)
+- Remove deprecated function nx.info (#5759)
+- Remove deprecated n_communities argument from greedy_modularity_communities (#5789)
+- Remove deprecated functions hub_matrix and authority_matrix (#5767)
+- Make HITS numpy and scipy private functions (#5771)
+- Add Triad example plot (#5528)
+- Add gallery example visualizing DAG with multiple layouts (#5432)
+- Make pagerank numpy and scipy private functions (#5772)
+- Implement directed edge swap (#5663)
+- Update relabel.py to preserve node order (#5258)
+- Modify DAG example to show topological layout. (#5835)
+- Add docstring example for self-ancestors/descendants (#5802)
+- Update precommit linters (#5839)
+- remove to/from_scipy_sparse_matrix (#5779)
+- Clean up from PR #5779 (#5841)
+- Corona Product (#5223)
+- Add direct link to github networkx org sponsorship (#5843)
+- added examples to efficiency_measures.py (#5643)
+- added examples to regular.py (#5642)
+- added examples to degree_alg.py (#5644)
+- Add docstring examples for triads functions (#5522)
+- Fix docbuild warnings: is_string_like is removed and identation in corona product (#5845)
+- Use py_random_state to control randomness of random_triad (#5847)
+- Remove OrderedGraphs (#5813)
+- Drop NumPy 1.19 (#5856)
+- Speed up unionfind a bit by not adding root node in the path (#5844)
+- Minor doc fixups (#5868)
+- Attempt to reverse slowdown from hasattr  needed for cached_property (#5836)
+- make lazy_import private and remove its internal use (#5878)
+- strategy_saturation_largest_first now accepts partial colorings (#5888)
+- Add weight distance metrics (#5305)
+- docstring updates for `union`, `disjoint_union`, and `compose` (#5892)
+- Update precommit hooks (#5923)
+- Remove old Appveyor cruft (#5924)
+- signature change for `node_link` functions: for issue #5787 (#5899)
+- Replace LCA with naive implementations (#5883)
+- Bump nodelink args deprecation expiration to v3.2 (#5933)
+- Update mapping logic in `relabel_nodes` (#5912)
+- Update pygraphviz (#5934)
+- Further improvements to strategy_saturation_largest_first (#5935)
+- Arf layout (#5910)
+- [ENH] Find and verify a minimal D-separating set in DAG (#5898)
+- Add Mehlhorn Steiner approximations (#5629)
+- Preliminary VF2++ Implementation (#5788)
+- Minor docstring touchups and test refactor for `is_path` (#5967)
+- Switch to relative import for vf2pp_helpers. (#5973)
+- Add vf2pp_helpers subpackage to wheel (#5975)
+- Enhance biconnected components to avoid indexing (#5974)
+- Update mentored projects list (#5985)
+- Add concurrency hook to cancel jobs on new push. (#5986)
+- Make all.py generator friendly (#5984)
+- Only run scheduled pytest-randomly job in main repo. (#5993)
+- Fix steiner tree test (#5999)
+- Update doc requirements (#6008)
+- VF2++ for Directed Graphs (#5972)
+- Fix defect and update docs for MappedQueue, related to gh-5681 (#5939)
+- Update pydata-sphinx-theme (#6012)
+- Update numpydoc (#6022)
+- Fixed test for average shortest path in the case of directed graphs (#6003)
+- Update deprecations after 3.0 dep sprint (#6031)
+- Use scipy.sparse array datastructure (#6037)
+- Designate 3.0b1 release
+- Bump release version
+- Use org funding.yml
+- Update which flow functions support the cutoff argument (#6085)
+- Update GML parsing/writing to allow empty lists/tuples as node attributes (#6093)
+- Warn on unused visualization kwargs that only apply to FancyArrowPatch edges (#6098)
+- Fix weighted MultiDiGraphs in DAG longest path algorithms + add additional tests (#5988)
+- Circular center node layout (#6114)
+- Fix doc inconsistencies related to cutoff in connectivity.py and disjoint_paths.py (#6113)
+- Remove deprecated maxcardinality parameter from min_weight_matching (#6146)
+- Remove deprecated `find_cores` (#6139)
+- Remove deprecated project function from bipartite package. (#6147)
+- Improve test coverage for voterank algorithm (#6161)
+- plugin based backend infrastructure to use multiple computation backends (#6000)
+- Undocumented parameters in dispersion (#6183)
+- Swap.py coverage to 100 (#6176)
+- Improve test coverage for current_flow_betweenness module (#6143)
+- Completed Testing in community.py resolves issue #6184 (#6185)
+- Added an example to algebraic_connectivity (#6153)
+- Add ThinGraph example to Multi*Graph doc_strings (#6160)
+- Fix defect in eulerize, replace reciprocal edge weights (#6145)
+- For issue #6030 Add test coverage for algorithms in beamsearch.py (#6087)
+- Improve test coverage expanders stochastic graph generators (#6073)
+- Update developer requirements  (#6194)
+- Designate 3.0rc1 release
+- Bump release version
+- Tests added in test_centrality.py (#6200)
+- add laplacian_spectrum example (#6169)
+- PR for issue #6033 Improve test coverage for algorithms in betweenness_subset.py #6033 (#6083)
+- Di graph edges doc fix (#6108)
+- Improve coverage for core.py (#6116)
+- Add clear edges method as a method to be frozen by nx.freeze (#6190)
+- Adds LCA test case for self-ancestors from gh-4458. (#6218)
+- Minor Python 2 cleanup (#6219)
+- Add example laplacian matrix  (#6168)
+- Revert 6219 and delete comment. (#6222)
+- fix wording in error message (#6228)
+- Rm incorrect test case for connected edge swap (#6223)
+- add missing `seed` to function called by `connected_double_edge_swap` (#6231)
+- Hide edges with a weight of None in A*. (#5945)
+- Add dfs_labeled_edges reporting of reverse edges due to depth_limit. (#6240)
+- Warn users about duplicate nodes in generator function input (#6237)
+- Reenable geospatial examples (#6252)
+- Draft 3.0 release notes (#6232)
+- Add 2.8.x release notes (#6255)
+- doc: clarify allowed `alpha` when using nx.draw_networkx_edges (#6254)
+- Add a contributor (#6256)
+- Allow MultiDiGraphs for LCA (#6234)
+- Update simple_paths.py to improve readability of the BFS. (#6273)
+- doc: update documentation when providing an iterator over current graph to add/remove_edges_from. (#6268)
+- Fix bug vf2pp is isomorphic issue 6257 (#6270)
+- Improve test coverage for Eigenvector centrality  (#6227)
+- Bug fix in swap: directed_edge_swap and double_edge_swap  (#6149)
+- Adding a test to verify that a NetworkXError is raised when calling n… (#6265)
+- Pin to sphinx 5.2.3 (#6277)
+- Update pre-commit hooks (#6278)
+- Update GH actions (#6280)
+- Fix links in release notes (#6281)
+- bug fix in smallworld.py: random_reference and lattice_reference (#6151)
+- [DOC] Follow numpydoc standard in barbell_graph documentation (#6286)
+- Update simple_paths.py: consistent behaviour for `is_simple_path` when path contains nodes not in the graph. (#6272)
+- Correctly point towards 2.8.8 in release notes (#6298)
+- Isomorphism improve documentation (#6295)
+- Improvements and test coverage for `line.py` (#6215)
+- Fix typo in Katz centrality comment (#6310)
+- Broken link in isomorphism documentation (#6296)
+- Update copyright years to 2023 (#6322)
+- fix warnings for make doctest (#6323)
+- fix whitespace issue in test_internet_as_graph (#6324)
+- Create a Tikz latex drawing feature for networkx (#6238)
+- Fix docstrings (#6329)
+- Fix documentation deployment (#6330)
+- Fix links to migration guide (#6331)
+- Fix links to migration guide (#6331)
+- Fix typo in readme file (#6312)
+- Fix typos in the networkx codebase (#6335)
+- Refactor vf2pp modules and test files (#6334)
+
+Contributors
+------------
+
+- 0ddoe_s
+- Abangma Jessika
+- Adam Li
+- Adam Richardson
+- Ali Faraji
+- Alimi Qudirah
+- Anurag Bhat
+- Ben Heil
+- Brian Hou
+- Casper van Elteren
+- danieleades
+- Dan Schult
+- ddelange
+- Dilara Tekinoglu
+- Dimitrios Papageorgiou
+- Douglas K. G. Araujo
+- Erik Welch
+- George Watkins
+- Guy Aglionby
+- Isaac Western
+- Jarrod Millman
+- Jim Kitchen
+- Juanita Gomez
+- Kevin Brown
+- Konstantinos Petridis
+- ladykkk
+- Lucas H. McCabe
+- Ludovic Stephan
+- Lukong123
+- Matt Schwennesen
+- Michael Holtz
+- Morrison Turnansky
+- Mridul Seth
+- nsengaw4c
+- Okite chimaobi Samuel
+- Paula Pérez Bianchi
+- Radoslav Fulek
+- reneechebbo
+- Ross Barnowski
+- Sebastiano Vigna
+- stevenstrickler
+- Sultan Orazbayev
+- Tina Oberoi
diff --git a/doc/release/release_3.1.rst b/doc/release/release_3.1.rst
new file mode 100644
index 0000000..ea788ae
--- /dev/null
+++ b/doc/release/release_3.1.rst
@@ -0,0 +1,181 @@
+NetworkX 3.1
+============
+
+Release date: 4 April 2023
+
+Supports Python 3.8, 3.9, 3.10, and 3.11.
+
+NetworkX is a Python package for the creation, manipulation, and study of the
+structure, dynamics, and functions of complex networks.
+
+For more information, please visit our `website <https://networkx.org/>`_
+and our :ref:`gallery of examples <examples_gallery>`.
+Please send comments and questions to the `networkx-discuss mailing list
+<http://groups.google.com/group/networkx-discuss>`_.
+
+Highlights
+----------
+
+This release is the result of 3 months of work with over 85 pull requests by
+26 contributors. Highlights include:
+
+- Minor bug-fixes and speed-ups
+- Improvements to plugin based backend infrastructure
+- Minor documentation improvements
+- Improved test coverage
+- Last release supporting Python 3.8
+- Stopped building PDF version of docs
+- Use Ruff for linting
+
+Improvements
+------------
+
+- [`#6461 <https://github.com/networkx/networkx/pull/6461>`_]
+  Add simple cycle enumerator for undirected class
+- [`#6404 <https://github.com/networkx/networkx/pull/6404>`_]
+  Add spectral bisection for graphs using fiedler vector
+- [`#6244 <https://github.com/networkx/networkx/pull/6244>`_]
+  Improve handling of create_using to allow Mixins of type Protocol
+- [`#5399 <https://github.com/networkx/networkx/pull/5399>`_]
+  Add Laplace centrality measure
+
+Deprecations
+------------
+
+- [`#6564 <https://github.com/networkx/networkx/pull/6564>`_]
+  Deprecate ``single_target_shortest_path_length`` to change return value to a dict in v3.3.
+  Deprecate ``shortest_path`` in case of all_pairs to change return value to a iterator in v3.3.
+- [`#5602 <https://github.com/networkx/networkx/pull/5602>`_]
+  Deprecate ``forest_str`` function (use ``write_network_text`` instead).
+
+Merged PRs
+----------
+
+- Designate 3.0 release
+- Fix docs
+- Bump release version
+- Fix link in isomorphvf2.py (#6347)
+- Add dev release notes template
+- Update precommit hooks (#6348)
+- Add clique examples and deprecate helper funtions (#6186)
+- Laplace centrality for issue 4973 (#5399)
+- doc:improve doc of possible values of nodes and expected behaviour (#6333)
+- add OrderedGraph removal as an API change in release_3.0.rst (#6354)
+- Update release_3.0 authors (add Jim and Erik) (#6356)
+- Fix broken link nx guide (#6361)
+- Add nx-guide link in the tutorial (#6353)
+- DOC: Minor formatting fixups to get rid of doc build warnings. (#6363)
+- Fix equation in clustering documentation (#6369)
+- Add reference to paper in vf2pp (#6373)
+- provide tikz with degrees, not radians (#6360)
+- Improve handling of create_using to allow Mixins of type Protocol (#6244)
+- Remove an instance of random.sample from a set (deprecated in Python 3.9) (#6380)
+- DOC: Add banner for user survey annoucement (#6375)
+- bump pre-commit hooks (and fix CI) (#6396)
+- Add generate / write "network text" (formerly graph_str) (#5602)
+- Improve doc regular graphs (#6397)
+- Fix link vonoroi (#6398)
+- Document PageRank algo convergence condition  (#6212)
+- Fix pre-commit on Python 3.10 (#6407)
+- DOC: list pred method for MultiDiGraphs (#6409)
+- Delete warning in approximation documentation (#6221)
+- Comment out unused unlayered dict construction. (#6411)
+- Update installation test instructions (#6303)
+- Added new tests in test_clique.py (#6142)
+- Improve testing of bipartite projection. (#6196)
+- Add dispatching to more shortest path algorithms (#6415)
+- Add Plausible Analytics to our docs (#6413)
+- Fix docstring heading title. (#6424)
+- Added tests to test_directed.py. (#6208)
+- Gallery example for Maximum Independent Set (#5563)
+- spectral bisection for graphs using fiedler vector (#6404)
+- Update developer requirements (#6429)
+- Fix reference in line.py-inverse_line_graph (#6434)
+- Add project desc for visualization and ISMAGs (#6432)
+- Lint using Ruff (#6371)
+- add ruff commit to git-blame-ignore (#6440)
+- NXEP 0 and NXEP 1 - change status to Accepted (#5343)
+- Bump gh-pages deploy bot version. (#6446)
+- Start using ruff for pyupgrade and isort (#6441)
+- Add documentation building to contributor guide (#6437)
+- Reset deploy-action param names for latest version. (#6451)
+- Doc upgrade paley graph (#6399)
+- Added two tests for convert_numpy (#6455)
+- Clean up similarity.py and use dataclasses for storing state (#5831)
+- Remove pdf latex builds of docs (#5572)
+- Add docstring for dorogovtsev_goltsev_mendes generator (#6450)
+- Allow first argument to be passed as kwarg in dispatcher (#6471)
+- Fix negative edge cycle function raising exception for empty graph (#6473)
+- Dispatch more BFS-based algorithms (#6467)
+- Ignore weakrefs when testing for memory leak (#6466)
+- Fix reference formatting in generator docstring. (#6493)
+- tweak `test_override_dispatch` to allow G keyword (#6499)
+- Improve test coverage for astar.py (#6504)
+- Add docstring example to weighted.py (#6497)
+- Fix len operation of UnionAtlas (#6478)
+- Improve test coverage for edgelist.py (#6507)
+- Improve test coverage for mst.py and bug fix in prim_mst_edges() (#6486)
+- Add examples clarifying ambiguity of nbunch (#6513)
+- Updating removing explicit import for communities (#6459)
+- Use generator to limit memory footprint of read_graph6. (#6519)
+- Update docstring of paley graph  (#6529)
+- Fixed bug k_truss doesn't raise exception for self loops (#6521)
+- Update pre-commit (#6545)
+- Update sphinx (#6544)
+- Add docstring examples to dag.py (#6491)
+- Add example script for mst (#6525)
+- Add docstring examples to boundary.py (#6487)
+- improve test coverage for branchings.py (#6523)
+- Improve test coverage for redundancy.py (#6551)
+- Fixed return type inconsistencies in shortest path methods documentation (#6528)
+- Optimize _single_shortest_path_length function (#6299)
+- Deprecate shortest_path functions to have consistent return values in v3.3 (#6567)
+- Add community detection example to Gallery (#6526)
+- add simple cycle enumerator for undirected class (#6461)
+- Fix survey URL (#6548)
+- Test dispatching via nx-loopback backend (#6536)
+- Fixed return type inconsistencies in weighted.py (#6568)
+- Update team galleries (#6569)
+- Added Docstring Example for Bidirectional Shortest Path (#6570)
+- Update release requirements (#6587)
+- Designate 3.1rc0 release
+- Bump release version
+- corrections to docstring of `weisfeiler_lehman_subgraph_hashes` (#6598)
+- Fixed method description in ismags.py (#6600)
+- Minor docs/test maintenance (#6614)
+- Better default alpha value for viz attributes in gexf writer (#6612)
+- Fix module docstring format for ismags reference article. (#6611)
+- Resolve NXEP4 with justification for not implementing it. (#6617)
+- Fix typos (#6620)
+- Draft release notes (#6621)
+- Prep 3.1 release
+
+Contributors
+------------
+
+- Navya Agarwal
+- Lukong Anne
+- Ross Barnowski
+- Gabor Berei
+- Paula Pérez Bianchi
+- Kelly Boothby
+- Purvi Chaurasia
+- Jon Crall
+- Michael Holtz
+- Jim Kitchen
+- Claudia Madrid
+- Jarrod Millman
+- Vanshika Mishra
+- Harri Nieminen
+- Tina Oberoi
+- Omkaar
+- Dima Pasechnik
+- Alimi Qudirah
+- Dan Schult
+- Mridul Seth
+- Eric Sims
+- Tortar
+- Erik Welch
+- Aaron Z
+- danieleades
+- stanyas
diff --git a/doc/release/release_dev.rst b/doc/release/release_dev.rst
index 199aa1e..59bffe0 100644
--- a/doc/release/release_dev.rst
+++ b/doc/release/release_dev.rst
@@ -1,9 +1,9 @@
-Next Release
-============
+3.2 (unreleased)
+================
 
 Release date: TBD
 
-Supports Python ...
+Supports Python 3.9, 3.10, and 3.11.
 
 NetworkX is a Python package for the creation, manipulation, and study of the
 structure, dynamics, and functions of complex networks.
@@ -19,25 +19,6 @@ Highlights
 This release is the result of X of work with over X pull requests by
 X contributors. Highlights include:
 
-- Better syncing between G._succ and G._adj for directed G.
-  And slightly better speed from all the core adjacency data structures.
-  G.adj is now a cached_property while still having the cache reset when
-  G._adj is set to a new dict (which doesn't happen very often).
-  Note: We have always assumed that G._succ and G._adj point to the same
-  object. But we did not enforce it well. If you have somehow worked
-  around our attempts and are relying on these private attributes being
-  allowed to be different from each other due to loopholes in our previous
-  code, you will have to look for other loopholes in our new code
-  (or subclass DiGraph to explicitly allow this).
-- If your code sets G._succ or G._adj to new dictionary-like objects, you no longer
-  have to set them both. Setting either will ensure the other is set as well.
-  And the cached_properties G.adj and G.succ will be rest accordingly too.
-- If you use the presence of the attribute `_adj` as a criteria for the object
-  being a Graph instance, that code may need updating. The graph classes
-  themselves now have an attribute `_adj`. So, it is possible that whatever you
-  are checking might be a class rather than an instance. We suggest you check
-  for attribute `_adj` to verify it is like a NetworkX graph object or type and
-  then `type(obj) is type` to check if it is a class.
 
 Improvements
 ------------
diff --git a/doc/release/report_functions_without_rst_generated.py b/doc/release/report_functions_without_rst_generated.py
index f73da43..3e29706 100644
--- a/doc/release/report_functions_without_rst_generated.py
+++ b/doc/release/report_functions_without_rst_generated.py
@@ -13,13 +13,9 @@ for n, f in funcs:
     # print(result)
 
     old_names = (
-        "find_cores",
         "test",
-        "edge_betweenness",
-        "betweenness_centrality_source",
         "write_graphml_lxml",
         "write_graphml_xml",
-        "adj_matrix",
         "project",
         "fruchterman_reingold_layout",
         "node_degree_xy",
diff --git a/doc/tutorial.rst b/doc/tutorial.rst
index 6c6a0a8..43f54b6 100644
--- a/doc/tutorial.rst
+++ b/doc/tutorial.rst
@@ -509,7 +509,7 @@ like so:
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 
 NetworkX supports many popular formats, such as edge lists, adjacency lists,
-GML, GraphML, pickle, LEDA and others.
+GML, GraphML, LEDA and others.
 
 .. nbplot::
 
@@ -626,3 +626,10 @@ the graph in dot format for further processing.
 See :doc:`/reference/drawing` for additional details.
 
 .. code-links::
+
+NX-Guides
+---------
+If you are interested in learning more about NetworkX, graph theory and network analysis 
+then you should check out :doc:`nx-guides <nx-guides:index>`. There you can find tutorials,
+real-world applications and in-depth examinations of graphs and network algorithms. 
+All the material is official and was developed and curated by the NetworkX community. 
diff --git a/examples/algorithms/plot_betweenness_centrality.py b/examples/algorithms/plot_betweenness_centrality.py
index e8354c6..eb1fe21 100644
--- a/examples/algorithms/plot_betweenness_centrality.py
+++ b/examples/algorithms/plot_betweenness_centrality.py
@@ -1,7 +1,7 @@
 """
-=====================
-Betweeness Centrality
-=====================
+======================
+Betweenness Centrality
+======================
 
 Betweenness centrality measures of positive gene functional associations
 using WormNet v.3-GS.
@@ -70,13 +70,13 @@ ax.text(
 ax.text(
     0.80,
     0.06,
-    "node size = betweeness centrality",
+    "node size = betweenness centrality",
     horizontalalignment="center",
     transform=ax.transAxes,
     fontdict=font,
 )
 
-# Resize figure for label readibility
+# Resize figure for label readability
 ax.margins(0.1, 0.05)
 fig.tight_layout()
 plt.axis("off")
diff --git a/examples/algorithms/plot_blockmodel.py b/examples/algorithms/plot_blockmodel.py
index e3fec60..b41f0c3 100644
--- a/examples/algorithms/plot_blockmodel.py
+++ b/examples/algorithms/plot_blockmodel.py
@@ -55,7 +55,7 @@ G = nx.read_edgelist("hartford_drug.edgelist")
 H = G.subgraph(next(nx.connected_components(G)))
 # Makes life easier to have consecutively labeled integer nodes
 H = nx.convert_node_labels_to_integers(H)
-# Create parititions with hierarchical clustering
+# Create partitions with hierarchical clustering
 partitions = create_hc(H)
 # Build blockmodel graph
 BM = nx.quotient_graph(H, partitions, relabel=True)
diff --git a/examples/algorithms/plot_davis_club.py b/examples/algorithms/plot_davis_club.py
index 517d523..3bd37b6 100644
--- a/examples/algorithms/plot_davis_club.py
+++ b/examples/algorithms/plot_davis_club.py
@@ -14,7 +14,7 @@ The graph is bipartite (clubs, women).
 """
 import matplotlib.pyplot as plt
 import networkx as nx
-import networkx.algorithms.bipartite as bipartite
+from networkx.algorithms import bipartite
 
 G = nx.davis_southern_women_graph()
 women = G.graph["top"]
@@ -30,7 +30,7 @@ print("#Friends, Member")
 for w in women:
     print(f"{W.degree(w)} {w}")
 
-# project bipartite graph onto women nodes keeping number of co-occurence
+# project bipartite graph onto women nodes keeping number of co-occurrence
 # the degree computed is weighted and counts the total number of shared contacts
 W = bipartite.weighted_projected_graph(G, women)
 print()
diff --git a/examples/algorithms/plot_dedensification.py b/examples/algorithms/plot_dedensification.py
index 9104d54..6cbe9ab 100644
--- a/examples/algorithms/plot_dedensification.py
+++ b/examples/algorithms/plot_dedensification.py
@@ -38,7 +38,7 @@ original_graph.add_edges_from(
         ("A", "6"),
     ]
 )
-base_options = dict(with_labels=True, edgecolors="black")
+base_options = {"with_labels": True, "edgecolors": "black"}
 pos = {
     "3": (0, 1),
     "2": (0, 2),
@@ -85,7 +85,7 @@ nx.draw_networkx(
     pos=nonexp_pos,
     node_color=nonexp_node_colors,
     node_size=nonexp_node_sizes,
-    **base_options
+    **base_options,
 )
 
 plt.tight_layout()
diff --git a/examples/algorithms/plot_girvan_newman.py b/examples/algorithms/plot_girvan_newman.py
new file mode 100644
index 0000000..151c80c
--- /dev/null
+++ b/examples/algorithms/plot_girvan_newman.py
@@ -0,0 +1,79 @@
+"""
+=======================================
+Community Detection using Girvan-Newman
+=======================================
+
+This example shows the detection of communities in the Zachary Karate
+Club dataset using the Girvan-Newman method.
+
+We plot the change in modularity as important edges are removed. 
+Graph is coloured and plotted based on community detection when number 
+of iterations are 1 and 4 respectively.
+"""
+
+import networkx as nx
+import pandas as pd
+import matplotlib.pyplot as plt
+
+# Load karate graph and find communities using Girvan-Newman
+G = nx.karate_club_graph()
+communities = list(nx.community.girvan_newman(G))
+
+# Modularity -> measures the strength of division of a network into modules
+modularity_df = pd.DataFrame(
+    [
+        [k + 1, nx.community.modularity(G, communities[k])]
+        for k in range(len(communities))
+    ],
+    columns=["k", "modularity"],
+)
+
+
+# function to create node colour list
+def create_community_node_colors(graph, communities):
+    number_of_colors = len(communities[0])
+    colors = ["#D4FCB1", "#CDC5FC", "#FFC2C4", "#F2D140", "#BCC6C8"][:number_of_colors]
+    node_colors = []
+    for node in graph:
+        current_community_index = 0
+        for community in communities:
+            if node in community:
+                node_colors.append(colors[current_community_index])
+                break
+            current_community_index += 1
+    return node_colors
+
+
+# function to plot graph with node colouring based on communities
+def visualize_communities(graph, communities, i):
+    node_colors = create_community_node_colors(graph, communities)
+    modularity = round(nx.community.modularity(graph, communities), 6)
+    title = f"Community Visualization of {len(communities)} communities with modularity of {modularity}"
+    pos = nx.spring_layout(graph, k=0.3, iterations=50, seed=2)
+    plt.subplot(3, 1, i)
+    plt.title(title)
+    nx.draw(
+        graph,
+        pos=pos,
+        node_size=1000,
+        node_color=node_colors,
+        with_labels=True,
+        font_size=20,
+        font_color="black",
+    )
+
+
+fig, ax = plt.subplots(3, figsize=(15, 20))
+
+# Plot graph with colouring based on communities
+visualize_communities(G, communities[0], 1)
+visualize_communities(G, communities[3], 2)
+
+# Plot change in modularity as the important edges are removed
+modularity_df.plot.bar(
+    x="k",
+    ax=ax[2],
+    color="#F2D140",
+    title="Modularity Trend for Girvan-Newman Community Detection",
+)
+plt.show()
diff --git a/examples/algorithms/plot_maximum_independent_set.py b/examples/algorithms/plot_maximum_independent_set.py
new file mode 100644
index 0000000..670edf9
--- /dev/null
+++ b/examples/algorithms/plot_maximum_independent_set.py
@@ -0,0 +1,44 @@
+"""
+=======================
+Maximum Independent Set
+=======================
+
+An independent set is a set of vertices in a graph where no two vertices in the
+set are adjacent. The maximum independent set is the independent set of largest
+possible size for a given graph.
+"""
+
+import numpy as np
+import matplotlib.pyplot as plt
+import networkx as nx
+from networkx.algorithms import approximation as approx
+
+G = nx.Graph(
+    [
+        (1, 2),
+        (7, 2),
+        (3, 9),
+        (3, 2),
+        (7, 6),
+        (5, 2),
+        (1, 5),
+        (2, 8),
+        (10, 2),
+        (1, 7),
+        (6, 1),
+        (6, 9),
+        (8, 4),
+        (9, 4),
+    ]
+)
+
+I = approx.maximum_independent_set(G)
+print(f"Maximum independent set of G: {I}")
+
+pos = nx.spring_layout(G, seed=39299899)
+nx.draw(
+    G,
+    pos=pos,
+    with_labels=True,
+    node_color=["tab:red" if n in I else "tab:blue" for n in G],
+)
diff --git a/examples/algorithms/plot_parallel_betweenness.py b/examples/algorithms/plot_parallel_betweenness.py
index d9333f4..e6d238d 100644
--- a/examples/algorithms/plot_parallel_betweenness.py
+++ b/examples/algorithms/plot_parallel_betweenness.py
@@ -65,7 +65,7 @@ G_ws = nx.connected_watts_strogatz_graph(1000, 4, 0.1)
 for G in [G_ba, G_er, G_ws]:
     print("")
     print("Computing betweenness centrality for:")
-    print(nx.info(G))
+    print(G)
     print("\tParallel version")
     start = time.time()
     bt = betweenness_centrality_parallel(G)
diff --git a/examples/algorithms/plot_snap.py b/examples/algorithms/plot_snap.py
index 85ea71d..f49f927 100644
--- a/examples/algorithms/plot_snap.py
+++ b/examples/algorithms/plot_snap.py
@@ -16,18 +16,18 @@ import matplotlib.pyplot as plt
 
 
 nodes = {
-    "A": dict(color="Red"),
-    "B": dict(color="Red"),
-    "C": dict(color="Red"),
-    "D": dict(color="Red"),
-    "E": dict(color="Blue"),
-    "F": dict(color="Blue"),
-    "G": dict(color="Blue"),
-    "H": dict(color="Blue"),
-    "I": dict(color="Yellow"),
-    "J": dict(color="Yellow"),
-    "K": dict(color="Yellow"),
-    "L": dict(color="Yellow"),
+    "A": {"color": "Red"},
+    "B": {"color": "Red"},
+    "C": {"color": "Red"},
+    "D": {"color": "Red"},
+    "E": {"color": "Blue"},
+    "F": {"color": "Blue"},
+    "G": {"color": "Blue"},
+    "H": {"color": "Blue"},
+    "I": {"color": "Yellow"},
+    "J": {"color": "Yellow"},
+    "K": {"color": "Yellow"},
+    "L": {"color": "Yellow"},
 }
 edges = [
     ("A", "B", "Strong"),
@@ -50,7 +50,7 @@ original_graph.add_edges_from((u, v, {"type": label}) for u, v, label in edges)
 
 plt.suptitle("SNAP Summarization")
 
-base_options = dict(with_labels=True, edgecolors="black", node_size=500)
+base_options = {"with_labels": True, "edgecolors": "black", "node_size": 500}
 
 ax1 = plt.subplot(1, 2, 1)
 plt.title(
@@ -101,7 +101,7 @@ nx.draw_networkx(
     pos=summary_pos,
     node_color=node_colors,
     width=edge_weights,
-    **base_options
+    **base_options,
 )
 
 plt.tight_layout()
diff --git a/examples/drawing/plot_center_node.py b/examples/drawing/plot_center_node.py
new file mode 100644
index 0000000..d7eb470
--- /dev/null
+++ b/examples/drawing/plot_center_node.py
@@ -0,0 +1,20 @@
+"""
+====================
+Custom Node Position
+====================
+
+Draw a graph with node(s) located at user-defined positions.
+
+When a position is set by the user, the other nodes can still be neatly organised in a layout.
+"""
+
+import networkx as nx
+import numpy as np
+
+G = nx.path_graph(20)  # An example graph
+center_node = 5  # Or any other node to be in the center
+edge_nodes = set(G) - {center_node}
+# Ensures the nodes around the circle are evenly distributed
+pos = nx.circular_layout(G.subgraph(edge_nodes))
+pos[center_node] = np.array([0, 0])  # manually specify node position
+nx.draw(G, pos, with_labels=True)
diff --git a/examples/drawing/plot_chess_masters.py b/examples/drawing/plot_chess_masters.py
index 52f4cfb..a5f3b5a 100644
--- a/examples/drawing/plot_chess_masters.py
+++ b/examples/drawing/plot_chess_masters.py
@@ -81,7 +81,7 @@ print(f"\nFrom a total of {len(openings)} different openings,")
 print("the following games used the Sicilian opening")
 print('with the Najdorff 7...Qb6 "Poisoned Pawn" variation.\n')
 
-for (white, black, game_info) in G.edges(data=True):
+for white, black, game_info in G.edges(data=True):
     if game_info["ECO"] == "B97":
         summary = f"{white} vs {black}\n"
         for k, v in game_info.items():
@@ -97,7 +97,7 @@ edgewidth = [len(G.get_edge_data(u, v)) for u, v in H.edges()]
 
 # node size is proportional to number of games won
 wins = dict.fromkeys(G.nodes(), 0.0)
-for (u, v, d) in G.edges(data=True):
+for u, v, d in G.edges(data=True):
     r = d["Result"].split("-")
     if r[0] == "1":
         wins[u] += 1.0
@@ -145,7 +145,7 @@ ax.text(
     fontdict=font,
 )
 
-# Resize figure for label readibility
+# Resize figure for label readability
 ax.margins(0.1, 0.05)
 fig.tight_layout()
 plt.axis("off")
diff --git a/examples/drawing/plot_knuth_miles.py b/examples/drawing/plot_knuth_miles.py
index e0ebea8..cd13959 100644
--- a/examples/drawing/plot_knuth_miles.py
+++ b/examples/drawing/plot_knuth_miles.py
@@ -79,7 +79,7 @@ print(G)
 H = nx.Graph()
 for v in G:
     H.add_node(v)
-for (u, v, d) in G.edges(data=True):
+for u, v, d in G.edges(data=True):
     if d["weight"] < 300:
         H.add_edge(u, v)
 
@@ -110,11 +110,11 @@ try:
     # NOTE: When using cartopy, use matplotlib directly rather than nx.draw
     # to take advantage of the cartopy transforms
     ax.scatter(
-        *np.array([v for v in G.position.values()]).T,
+        *np.array(list(G.position.values())).T,
         s=[G.population[v] for v in H],
         c=node_color,
         transform=ccrs.PlateCarree(),
-        zorder=100  # Ensure nodes lie on top of edges/state lines
+        zorder=100,  # Ensure nodes lie on top of edges/state lines
     )
     # Plot edges between the cities
     for edge in H.edges():
diff --git a/examples/drawing/plot_multipartite_graph.py b/examples/drawing/plot_multipartite_graph.py
index 15c4d82..a941bdd 100644
--- a/examples/drawing/plot_multipartite_graph.py
+++ b/examples/drawing/plot_multipartite_graph.py
@@ -25,7 +25,7 @@ def multilayered_graph(*subset_sizes):
     extents = nx.utils.pairwise(itertools.accumulate((0,) + subset_sizes))
     layers = [range(start, end) for start, end in extents]
     G = nx.Graph()
-    for (i, layer) in enumerate(layers):
+    for i, layer in enumerate(layers):
         G.add_nodes_from(layer, layer=i)
     for layer1, layer2 in nx.utils.pairwise(layers):
         G.add_edges_from(itertools.product(layer1, layer2))
diff --git a/examples/drawing/plot_unix_email.py b/examples/drawing/plot_unix_email.py
index 25fce7a..748b549 100644
--- a/examples/drawing/plot_unix_email.py
+++ b/examples/drawing/plot_unix_email.py
@@ -43,7 +43,7 @@ def mbox_graph():
         resent_ccs = msg.get_all("resent-cc", [])
         all_recipients = getaddresses(tos + ccs + resent_tos + resent_ccs)
         # now add the edges for this mail message
-        for (target_name, target_addr) in all_recipients:
+        for target_name, target_addr in all_recipients:
             G.add_edge(source_addr, target_addr, message=msg)
 
     return G
@@ -52,7 +52,7 @@ def mbox_graph():
 G = mbox_graph()
 
 # print edges with message subject
-for (u, v, d) in G.edges(data=True):
+for u, v, d in G.edges(data=True):
     print(f"From: {u} To: {v} Subject: {d['message']['Subject']}")
 
 pos = nx.spring_layout(G, iterations=10, seed=227)
diff --git a/examples/external/force/README.txt b/examples/external/force/README.txt
index b49796c..6bac21a 100644
--- a/examples/external/force/README.txt
+++ b/examples/external/force/README.txt
@@ -1,7 +1,7 @@
 Modified from the example at of D3
 https://bl.ocks.org/mbostock/4062045
 
-Run the file force.py to generate the force.json data file needed for this to work.
+Run the file javascript_force.py to generate the force.json data file needed for this to work.
 
 Then copy all of the files in this directory to a webserver and load force.html.
 
diff --git a/examples/graph/plot_dag_layout.py b/examples/graph/plot_dag_layout.py
new file mode 100644
index 0000000..0b12013
--- /dev/null
+++ b/examples/graph/plot_dag_layout.py
@@ -0,0 +1,42 @@
+"""
+========================
+DAG - Topological Layout
+========================
+
+This example combines the `topological_generations` generator with
+`multipartite_layout` to show how to visualize a DAG in topologically-sorted
+order.
+"""
+
+import networkx as nx
+import matplotlib.pyplot as plt
+
+
+G = nx.DiGraph(
+    [
+        ("f", "a"),
+        ("a", "b"),
+        ("a", "e"),
+        ("b", "c"),
+        ("b", "d"),
+        ("d", "e"),
+        ("f", "c"),
+        ("f", "g"),
+        ("h", "f"),
+    ]
+)
+
+for layer, nodes in enumerate(nx.topological_generations(G)):
+    # `multipartite_layout` expects the layer as a node attribute, so add the
+    # numeric layer value as a node attribute
+    for node in nodes:
+        G.nodes[node]["layer"] = layer
+
+# Compute the multipartite_layout using the "layer" node attribute
+pos = nx.multipartite_layout(G, subset_key="layer")
+
+fig, ax = plt.subplots()
+nx.draw_networkx(G, pos=pos, ax=ax)
+ax.set_title("DAG layout in topological order")
+fig.tight_layout()
+plt.show()
diff --git a/examples/graph/plot_degree_sequence.py b/examples/graph/plot_degree_sequence.py
index 0d32091..87abb64 100644
--- a/examples/graph/plot_degree_sequence.py
+++ b/examples/graph/plot_degree_sequence.py
@@ -15,7 +15,9 @@ z = [5, 3, 3, 3, 3, 2, 2, 2, 1, 1, 1]
 print(nx.is_graphical(z))
 
 print("Configuration model")
-G = nx.configuration_model(z, seed=seed)  # configuration model, seed for reproduciblity
+G = nx.configuration_model(
+    z, seed=seed
+)  # configuration model, seed for reproducibility
 degree_sequence = [d for n, d in G.degree()]  # degree sequence
 print(f"Degree sequence {degree_sequence}")
 print("Degree histogram")
diff --git a/examples/graph/plot_morse_trie.py b/examples/graph/plot_morse_trie.py
index 88b424c..22c1b99 100644
--- a/examples/graph/plot_morse_trie.py
+++ b/examples/graph/plot_morse_trie.py
@@ -77,6 +77,7 @@ nx.draw(G, pos=pos, with_labels=True)
 elabels = {(u, v): l for u, v, l in G.edges(data="char")}
 nx.draw_networkx_edge_labels(G, pos, edge_labels=elabels)
 
+
 # A letter can be encoded by following the path from the given letter (node) to
 # the root node
 def morse_encode(letter):
diff --git a/examples/graph/plot_mst.py b/examples/graph/plot_mst.py
new file mode 100644
index 0000000..ed38d0b
--- /dev/null
+++ b/examples/graph/plot_mst.py
@@ -0,0 +1,50 @@
+"""
+=====================
+Minimum Spanning Tree
+=====================
+
+A minimum spanning tree (MST) is a subset of edges in a weighted, 
+connected graph that connects all vertices together with the 
+minimum possible total edge weight. The `minimum_spanning_tree`
+function is used to compare the original graph with its MST.
+
+"""
+
+
+import networkx as nx
+import matplotlib.pyplot as plt
+
+# Create a graph
+G = nx.Graph()
+G.add_edges_from(
+    [
+        (0, 1, {"weight": 4}),
+        (0, 7, {"weight": 8}),
+        (1, 7, {"weight": 11}),
+        (1, 2, {"weight": 8}),
+        (2, 8, {"weight": 2}),
+        (2, 5, {"weight": 4}),
+        (2, 3, {"weight": 7}),
+        (3, 4, {"weight": 9}),
+        (3, 5, {"weight": 14}),
+        (4, 5, {"weight": 10}),
+        (5, 6, {"weight": 2}),
+        (6, 8, {"weight": 6}),
+        (7, 8, {"weight": 7}),
+    ]
+)
+
+# Find the minimum spanning tree
+T = nx.minimum_spanning_tree(G)
+
+# Visualize the graph and the minimum spanning tree
+pos = nx.spring_layout(G)
+nx.draw_networkx_nodes(G, pos, node_color="lightblue", node_size=500)
+nx.draw_networkx_edges(G, pos, edge_color="grey")
+nx.draw_networkx_labels(G, pos, font_size=12, font_family="sans-serif")
+nx.draw_networkx_edge_labels(
+    G, pos, edge_labels={(u, v): d["weight"] for u, v, d in G.edges(data=True)}
+)
+nx.draw_networkx_edges(T, pos, edge_color="green", width=2)
+plt.axis("off")
+plt.show()
diff --git a/examples/graph/plot_triad_types.py b/examples/graph/plot_triad_types.py
new file mode 100644
index 0000000..a04c930
--- /dev/null
+++ b/examples/graph/plot_triad_types.py
@@ -0,0 +1,63 @@
+"""
+======
+Triads
+======
+According to the paper by Snijders, T. (2012). “Transitivity and triads.”
+University of Oxford, there are 16 Triad Types possible. This plot shows
+the 16 Triad Types that can be identified within directed networks.
+Triadic relationships are especially useful when analysing Social Networks.
+The first three digits refer to the number of mutual, asymmetric and null
+dyads (bidirectional, unidirection and nonedges) and the letter gives
+the Orientation as Up (U), Down (D) , Cyclical (C) or Transitive (T).
+"""
+
+import networkx as nx
+import matplotlib.pyplot as plt
+
+fig, axes = plt.subplots(4, 4, figsize=(10, 10))
+triads = {
+    "003": [],
+    "012": [(1, 2)],
+    "102": [(1, 2), (2, 1)],
+    "021D": [(3, 1), (3, 2)],
+    "021U": [(1, 3), (2, 3)],
+    "021C": [(1, 3), (3, 2)],
+    "111D": [(1, 2), (2, 1), (3, 1)],
+    "111U": [(1, 2), (2, 1), (1, 3)],
+    "030T": [(1, 2), (3, 2), (1, 3)],
+    "030C": [(1, 3), (3, 2), (2, 1)],
+    "201": [(1, 2), (2, 1), (3, 1), (1, 3)],
+    "120D": [(1, 2), (2, 1), (3, 1), (3, 2)],
+    "120U": [(1, 2), (2, 1), (1, 3), (2, 3)],
+    "120C": [(1, 2), (2, 1), (1, 3), (3, 2)],
+    "210": [(1, 2), (2, 1), (1, 3), (3, 2), (2, 3)],
+    "300": [(1, 2), (2, 1), (2, 3), (3, 2), (1, 3), (3, 1)],
+}
+
+for (title, triad), ax in zip(triads.items(), axes.flatten()):
+    G = nx.DiGraph()
+    G.add_nodes_from([1, 2, 3])
+    G.add_edges_from(triad)
+    nx.draw_networkx(
+        G,
+        ax=ax,
+        with_labels=False,
+        node_color=["green"],
+        node_size=200,
+        arrowsize=20,
+        width=2,
+        pos=nx.planar_layout(G),
+    )
+    ax.set_xlim(val * 1.2 for val in ax.get_xlim())
+    ax.set_ylim(val * 1.2 for val in ax.get_ylim())
+    ax.text(
+        0,
+        0,
+        title,
+        fontsize=15,
+        fontweight="extra bold",
+        horizontalalignment="center",
+        bbox={"boxstyle": "square,pad=0.3", "fc": "none"},
+    )
+fig.tight_layout()
+plt.show()
diff --git a/examples/graph/plot_words.py b/examples/graph/plot_words.py
index e5211f5..19a49eb 100644
--- a/examples/graph/plot_words.py
+++ b/examples/graph/plot_words.py
@@ -65,7 +65,7 @@ print("Two words are connected if they differ in one letter.")
 print(G)
 print(f"{nx.number_connected_components(G)} connected components")
 
-for (source, target) in [("chaos", "order"), ("nodes", "graph"), ("pound", "marks")]:
+for source, target in [("chaos", "order"), ("nodes", "graph"), ("pound", "marks")]:
     print(f"Shortest path between {source} and {target} is")
     try:
         shortest_path = nx.shortest_path(G, source, target)
diff --git a/networkx/__init__.py b/networkx/__init__.py
index d49e31e..0c724c3 100644
--- a/networkx/__init__.py
+++ b/networkx/__init__.py
@@ -8,47 +8,7 @@ structure, dynamics, and functions of complex networks.
 See https://networkx.org for complete documentation.
 """
 
-__version__ = "2.8.8"
-
-
-def __getattr__(name):
-    """Remove functions and provide informative error messages."""
-    if name == "nx_yaml":
-        raise ImportError(
-            "\nThe nx_yaml module has been removed from NetworkX.\n"
-            "Please use the `yaml` package directly for working with yaml data.\n"
-            "For example, a networkx.Graph `G` can be written to and loaded\n"
-            "from a yaml file with:\n\n"
-            "    import yaml\n\n"
-            "    with open('path_to_yaml_file', 'w') as fh:\n"
-            "        yaml.dump(G, fh)\n"
-            "    with open('path_to_yaml_file', 'r') as fh:\n"
-            "        G = yaml.load(fh, Loader=yaml.Loader)\n\n"
-            "Note that yaml.Loader is considered insecure - see the pyyaml\n"
-            "documentation for further details.\n\n"
-            "This message will be removed in NetworkX 3.0."
-        )
-    if name == "read_yaml":
-        raise ImportError(
-            "\nread_yaml has been removed from NetworkX, please use `yaml`\n"
-            "directly:\n\n"
-            "    import yaml\n\n"
-            "    with open('path', 'r') as fh:\n"
-            "        yaml.load(fh, Loader=yaml.Loader)\n\n"
-            "Note that yaml.Loader is considered insecure - see the pyyaml\n"
-            "documentation for further details.\n\n"
-            "This message will be removed in NetworkX 3.0."
-        )
-    if name == "write_yaml":
-        raise ImportError(
-            "\nwrite_yaml has been removed from NetworkX, please use `yaml`\n"
-            "directly:\n\n"
-            "    import yaml\n\n"
-            "    with open('path_for_yaml_output', 'w') as fh:\n"
-            "        yaml.dump(G_to_be_yaml, fh)\n\n"
-            "This message will be removed in NetworkX 3.0."
-        )
-    raise AttributeError(f"module {__name__} has no attribute {name}")
+__version__ = "3.1"
 
 
 # These are imported in order as listed
@@ -61,6 +21,7 @@ from networkx import utils
 from networkx import classes
 from networkx.classes import filters
 from networkx.classes import *
+from networkx.classes import _dispatch
 
 from networkx import convert
 from networkx.convert import *
@@ -84,7 +45,5 @@ from networkx.algorithms import *
 from networkx import linalg
 from networkx.linalg import *
 
-from networkx.testing.test import run as test
-
 from networkx import drawing
 from networkx.drawing import *
diff --git a/networkx/algorithms/__init__.py b/networkx/algorithms/__init__.py
index 9fa60b9..cb5ea77 100644
--- a/networkx/algorithms/__init__.py
+++ b/networkx/algorithms/__init__.py
@@ -85,7 +85,6 @@ from networkx.algorithms import tree
 # to the user as direct imports from the `networkx` namespace.
 from networkx.algorithms.bipartite import complete_bipartite_graph
 from networkx.algorithms.bipartite import is_bipartite
-from networkx.algorithms.bipartite import project
 from networkx.algorithms.bipartite import projected_graph
 from networkx.algorithms.connectivity import all_pairs_node_connectivity
 from networkx.algorithms.connectivity import all_node_cuts
@@ -117,6 +116,7 @@ from networkx.algorithms.isomorphism import could_be_isomorphic
 from networkx.algorithms.isomorphism import fast_could_be_isomorphic
 from networkx.algorithms.isomorphism import faster_could_be_isomorphic
 from networkx.algorithms.isomorphism import is_isomorphic
+from networkx.algorithms.isomorphism.vf2pp import *
 from networkx.algorithms.tree.branchings import maximum_branching
 from networkx.algorithms.tree.branchings import maximum_spanning_arborescence
 from networkx.algorithms.tree.branchings import minimum_branching
diff --git a/networkx/algorithms/approximation/__init__.py b/networkx/algorithms/approximation/__init__.py
index 13fc21f..e39dc00 100644
--- a/networkx/algorithms/approximation/__init__.py
+++ b/networkx/algorithms/approximation/__init__.py
@@ -1,12 +1,12 @@
 """Approximations of graph properties and Heuristic methods for optimization.
 
-    .. warning:: These functions are not imported in the top-level of ``networkx``
+The functions in this class are not imported into the top-level ``networkx``
+namespace so the easiest way to use them is with::
 
-    These functions can be accessed using
-    ``networkx.approximation.function_name``
+    >>> from networkx.algorithms import approximation
 
-    They can be imported using ``from networkx.algorithms import approximation``
-    or ``from networkx.algorithms.approximation import function_name``
+Another option is to import the specific function with
+``from networkx.algorithms.approximation import function_name``.
 
 """
 from networkx.algorithms.approximation.clustering_coefficient import *
diff --git a/networkx/algorithms/approximation/connectivity.py b/networkx/algorithms/approximation/connectivity.py
index ced4c38..a919062 100644
--- a/networkx/algorithms/approximation/connectivity.py
+++ b/networkx/algorithms/approximation/connectivity.py
@@ -387,7 +387,7 @@ def _bidirectional_pred_succ(G, source, target, exclude):
         # thus source and target will only trigger "found path" when they are
         # adjacent and then they can be safely included in the container 'exclude'
         level += 1
-        if not level % 2 == 0:
+        if level % 2 != 0:
             this_level = forward_fringe
             forward_fringe = []
             for v in this_level:
diff --git a/networkx/algorithms/approximation/kcomponents.py b/networkx/algorithms/approximation/kcomponents.py
index 239cc0f..94a9891 100644
--- a/networkx/algorithms/approximation/kcomponents.py
+++ b/networkx/algorithms/approximation/kcomponents.py
@@ -212,7 +212,7 @@ class _AntiGraph(nx.Graph):
     def single_edge_dict(self):
         return self.all_edge_dict
 
-    edge_attr_dict_factory = single_edge_dict  # type: ignore
+    edge_attr_dict_factory = single_edge_dict  # type: ignore[assignment]
 
     def __getitem__(self, n):
         """Returns a dict of neighbors of node n in the dense graph.
diff --git a/networkx/algorithms/approximation/steinertree.py b/networkx/algorithms/approximation/steinertree.py
index 496098b..ef7fb21 100644
--- a/networkx/algorithms/approximation/steinertree.py
+++ b/networkx/algorithms/approximation/steinertree.py
@@ -46,22 +46,109 @@ def metric_closure(G, weight="weight"):
     return M
 
 
+def _mehlhorn_steiner_tree(G, terminal_nodes, weight):
+    paths = nx.multi_source_dijkstra_path(G, terminal_nodes)
+
+    d_1 = {}
+    s = {}
+    for v in G.nodes():
+        s[v] = paths[v][0]
+        d_1[(v, s[v])] = len(paths[v]) - 1
+
+    # G1-G4 names match those from the Mehlhorn 1988 paper.
+    G_1_prime = nx.Graph()
+    for u, v, data in G.edges(data=True):
+        su, sv = s[u], s[v]
+        weight_here = d_1[(u, su)] + data.get(weight, 1) + d_1[(v, sv)]
+        if not G_1_prime.has_edge(su, sv):
+            G_1_prime.add_edge(su, sv, weight=weight_here)
+        else:
+            new_weight = min(weight_here, G_1_prime[su][sv][weight])
+            G_1_prime.add_edge(su, sv, weight=new_weight)
+
+    G_2 = nx.minimum_spanning_edges(G_1_prime, data=True)
+
+    G_3 = nx.Graph()
+    for u, v, d in G_2:
+        path = nx.shortest_path(G, u, v, weight)
+        for n1, n2 in pairwise(path):
+            G_3.add_edge(n1, n2)
+
+    G_3_mst = list(nx.minimum_spanning_edges(G_3, data=False))
+    if G.is_multigraph():
+        G_3_mst = (
+            (u, v, min(G[u][v], key=lambda k: G[u][v][k][weight])) for u, v in G_3_mst
+        )
+    G_4 = G.edge_subgraph(G_3_mst).copy()
+    _remove_nonterminal_leaves(G_4, terminal_nodes)
+    return G_4.edges()
+
+
+def _kou_steiner_tree(G, terminal_nodes, weight):
+    # H is the subgraph induced by terminal_nodes in the metric closure M of G.
+    M = metric_closure(G, weight=weight)
+    H = M.subgraph(terminal_nodes)
+
+    # Use the 'distance' attribute of each edge provided by M.
+    mst_edges = nx.minimum_spanning_edges(H, weight="distance", data=True)
+
+    # Create an iterator over each edge in each shortest path; repeats are okay
+    mst_all_edges = chain.from_iterable(pairwise(d["path"]) for u, v, d in mst_edges)
+    if G.is_multigraph():
+        mst_all_edges = (
+            (u, v, min(G[u][v], key=lambda k: G[u][v][k][weight]))
+            for u, v in mst_all_edges
+        )
+
+    # Find the MST again, over this new set of edges
+    G_S = G.edge_subgraph(mst_all_edges)
+    T_S = nx.minimum_spanning_edges(G_S, weight="weight", data=False)
+
+    # Leaf nodes that are not terminal might still remain; remove them here
+    T_H = G.edge_subgraph(T_S).copy()
+    _remove_nonterminal_leaves(T_H, terminal_nodes)
+
+    return T_H.edges()
+
+
+def _remove_nonterminal_leaves(G, terminals):
+    terminals_set = set(terminals)
+    for n in list(G.nodes):
+        if n not in terminals_set and G.degree(n) == 1:
+            G.remove_node(n)
+
+
+ALGORITHMS = {
+    "kou": _kou_steiner_tree,
+    "mehlhorn": _mehlhorn_steiner_tree,
+}
+
+
 @not_implemented_for("directed")
-def steiner_tree(G, terminal_nodes, weight="weight"):
-    """Return an approximation to the minimum Steiner tree of a graph.
-
-    The minimum Steiner tree of `G` w.r.t a set of `terminal_nodes`
-    is a tree within `G` that spans those nodes and has minimum size
-    (sum of edge weights) among all such trees.
-
-    The minimum Steiner tree can be approximated by computing the minimum
-    spanning tree of the subgraph of the metric closure of *G* induced by the
-    terminal nodes, where the metric closure of *G* is the complete graph in
-    which each edge is weighted by the shortest path distance between the
-    nodes in *G* .
-    This algorithm produces a tree whose weight is within a (2 - (2 / t))
-    factor of the weight of the optimal Steiner tree where *t* is number of
-    terminal nodes.
+def steiner_tree(G, terminal_nodes, weight="weight", method=None):
+    r"""Return an approximation to the minimum Steiner tree of a graph.
+
+    The minimum Steiner tree of `G` w.r.t a set of `terminal_nodes` (also *S*)
+    is a tree within `G` that spans those nodes and has minimum size (sum of
+    edge weights) among all such trees.
+
+    The approximation algorithm is specified with the `method` keyword
+    argument. All three available algorithms produce a tree whose weight is
+    within a ``(2 - (2 / l))`` factor of the weight of the optimal Steiner tree,
+    where ``l`` is the minimum number of leaf nodes across all possible Steiner
+    trees.
+
+    * ``"kou"`` [2]_ (runtime $O(|S| |V|^2)$) computes the minimum spanning tree of
+      the subgraph of the metric closure of *G* induced by the terminal nodes,
+      where the metric closure of *G* is the complete graph in which each edge is
+      weighted by the shortest path distance between the nodes in *G*.
+
+    * ``"mehlhorn"`` [3]_ (runtime $O(|E|+|V|\log|V|)$) modifies Kou et al.'s
+      algorithm, beginning by finding the closest terminal node for each
+      non-terminal. This data is used to create a complete graph containing only
+      the terminal nodes, in which edge is weighted with the shortest path
+      distance between them. The algorithm then proceeds in the same way as Kou
+      et al..
 
     Parameters
     ----------
@@ -71,6 +158,15 @@ def steiner_tree(G, terminal_nodes, weight="weight"):
          A list of terminal nodes for which minimum steiner tree is
          to be found.
 
+    weight : string (default = 'weight')
+        Use the edge attribute specified by this string as the edge weight.
+        Any edge attribute not present defaults to 1.
+
+    method : string, optional (default = 'kou')
+        The algorithm to use to approximate the Steiner tree.
+        Supported options: 'kou', 'mehlhorn'.
+        Other inputs produce a ValueError.
+
     Returns
     -------
     NetworkX graph
@@ -86,15 +182,33 @@ def steiner_tree(G, terminal_nodes, weight="weight"):
     References
     ----------
     .. [1] Steiner_tree_problem on Wikipedia.
-       https://en.wikipedia.org/wiki/Steiner_tree_problem
+           https://en.wikipedia.org/wiki/Steiner_tree_problem
+    .. [2] Kou, L., G. Markowsky, and L. Berman. 1981.
+           ‘A Fast Algorithm for Steiner Trees’.
+           Acta Informatica 15 (2): 141–45.
+           https://doi.org/10.1007/BF00288961.
+    .. [3] Mehlhorn, Kurt. 1988.
+           ‘A Faster Approximation Algorithm for the Steiner Problem in Graphs’.
+           Information Processing Letters 27 (3): 125–28.
+           https://doi.org/10.1016/0020-0190(88)90066-X.
     """
-    # H is the subgraph induced by terminal_nodes in the metric closure M of G.
-    M = metric_closure(G, weight=weight)
-    H = M.subgraph(terminal_nodes)
-    # Use the 'distance' attribute of each edge provided by M.
-    mst_edges = nx.minimum_spanning_edges(H, weight="distance", data=True)
-    # Create an iterator over each edge in each shortest path; repeats are okay
-    edges = chain.from_iterable(pairwise(d["path"]) for u, v, d in mst_edges)
+    if method is None:
+        import warnings
+
+        msg = (
+            "steiner_tree will change default method from 'kou' to 'mehlhorn'"
+            "in version 3.2.\nSet the `method` kwarg to remove this warning."
+        )
+        warnings.warn(msg, FutureWarning, stacklevel=4)
+        method = "kou"
+
+    try:
+        algo = ALGORITHMS[method]
+    except KeyError as e:
+        msg = f"{method} is not a valid choice for an algorithm."
+        raise ValueError(msg) from e
+
+    edges = algo(G, terminal_nodes, weight)
     # For multigraph we should add the minimal weight edge keys
     if G.is_multigraph():
         edges = (
diff --git a/networkx/algorithms/approximation/tests/test_maxcut.py b/networkx/algorithms/approximation/tests/test_maxcut.py
index ec75b59..39291fb 100644
--- a/networkx/algorithms/approximation/tests/test_maxcut.py
+++ b/networkx/algorithms/approximation/tests/test_maxcut.py
@@ -36,7 +36,7 @@ def test_random_partitioning_all_to_one():
 def test_one_exchange_basic():
     G = nx.complete_graph(5)
     random.seed(5)
-    for (u, v, w) in G.edges(data=True):
+    for u, v, w in G.edges(data=True):
         w["weight"] = random.randrange(-100, 100, 1) / 10
 
     initial_cut = set(random.sample(sorted(G.nodes()), k=5))
@@ -68,7 +68,7 @@ def test_one_exchange_optimal():
 def test_negative_weights():
     G = nx.complete_graph(5)
     random.seed(5)
-    for (u, v, w) in G.edges(data=True):
+    for u, v, w in G.edges(data=True):
         w["weight"] = -1 * random.random()
 
     initial_cut = set(random.sample(sorted(G.nodes()), k=5))
diff --git a/networkx/algorithms/approximation/tests/test_ramsey.py b/networkx/algorithms/approximation/tests/test_ramsey.py
index 856a8ef..32fe1fb 100644
--- a/networkx/algorithms/approximation/tests/test_ramsey.py
+++ b/networkx/algorithms/approximation/tests/test_ramsey.py
@@ -11,7 +11,7 @@ def test_ramsey():
     idens = nx.density(graph.subgraph(i))
     assert idens == 0.0, "i-set not correctly found by ramsey!"
 
-    # this trival graph has no cliques. should just find i-sets
+    # this trivial graph has no cliques. should just find i-sets
     graph = nx.trivial_graph()
     c, i = apxa.ramsey_R2(graph)
     assert c == {0}, "clique not correctly found by ramsey!"
diff --git a/networkx/algorithms/approximation/tests/test_steinertree.py b/networkx/algorithms/approximation/tests/test_steinertree.py
index d58eb66..d7af1a1 100644
--- a/networkx/algorithms/approximation/tests/test_steinertree.py
+++ b/networkx/algorithms/approximation/tests/test_steinertree.py
@@ -8,24 +8,75 @@ from networkx.utils import edges_equal
 class TestSteinerTree:
     @classmethod
     def setup_class(cls):
-        G = nx.Graph()
-        G.add_edge(1, 2, weight=10)
-        G.add_edge(2, 3, weight=10)
-        G.add_edge(3, 4, weight=10)
-        G.add_edge(4, 5, weight=10)
-        G.add_edge(5, 6, weight=10)
-        G.add_edge(2, 7, weight=1)
-        G.add_edge(7, 5, weight=1)
-        cls.G = G
-        cls.term_nodes = [1, 2, 3, 4, 5]
+        G1 = nx.Graph()
+        G1.add_edge(1, 2, weight=10)
+        G1.add_edge(2, 3, weight=10)
+        G1.add_edge(3, 4, weight=10)
+        G1.add_edge(4, 5, weight=10)
+        G1.add_edge(5, 6, weight=10)
+        G1.add_edge(2, 7, weight=1)
+        G1.add_edge(7, 5, weight=1)
+
+        G2 = nx.Graph()
+        G2.add_edge(0, 5, weight=6)
+        G2.add_edge(1, 2, weight=2)
+        G2.add_edge(1, 5, weight=3)
+        G2.add_edge(2, 4, weight=4)
+        G2.add_edge(3, 5, weight=5)
+        G2.add_edge(4, 5, weight=1)
+
+        G3 = nx.Graph()
+        G3.add_edge(1, 2, weight=8)
+        G3.add_edge(1, 9, weight=3)
+        G3.add_edge(1, 8, weight=6)
+        G3.add_edge(1, 10, weight=2)
+        G3.add_edge(1, 14, weight=3)
+        G3.add_edge(2, 3, weight=6)
+        G3.add_edge(3, 4, weight=3)
+        G3.add_edge(3, 10, weight=2)
+        G3.add_edge(3, 11, weight=1)
+        G3.add_edge(4, 5, weight=1)
+        G3.add_edge(4, 11, weight=1)
+        G3.add_edge(5, 6, weight=4)
+        G3.add_edge(5, 11, weight=2)
+        G3.add_edge(5, 12, weight=1)
+        G3.add_edge(5, 13, weight=3)
+        G3.add_edge(6, 7, weight=2)
+        G3.add_edge(6, 12, weight=3)
+        G3.add_edge(6, 13, weight=1)
+        G3.add_edge(7, 8, weight=3)
+        G3.add_edge(7, 9, weight=3)
+        G3.add_edge(7, 11, weight=5)
+        G3.add_edge(7, 13, weight=2)
+        G3.add_edge(7, 14, weight=4)
+        G3.add_edge(8, 9, weight=2)
+        G3.add_edge(9, 14, weight=1)
+        G3.add_edge(10, 11, weight=2)
+        G3.add_edge(10, 14, weight=1)
+        G3.add_edge(11, 12, weight=1)
+        G3.add_edge(11, 14, weight=7)
+        G3.add_edge(12, 14, weight=3)
+        G3.add_edge(12, 15, weight=1)
+        G3.add_edge(13, 14, weight=4)
+        G3.add_edge(13, 15, weight=1)
+        G3.add_edge(14, 15, weight=2)
+
+        cls.G1 = G1
+        cls.G2 = G2
+        cls.G3 = G3
+        cls.G1_term_nodes = [1, 2, 3, 4, 5]
+        cls.G2_term_nodes = [0, 2, 3]
+        cls.G3_term_nodes = [1, 3, 5, 6, 8, 10, 11, 12, 13]
+
+        cls.methods = ["kou", "mehlhorn"]
 
     def test_connected_metric_closure(self):
-        G = self.G.copy()
+        G = self.G1.copy()
         G.add_node(100)
         pytest.raises(nx.NetworkXError, metric_closure, G)
 
     def test_metric_closure(self):
-        M = metric_closure(self.G)
+        M = metric_closure(self.G1)
         mc = [
             (1, 2, {"distance": 10, "path": [1, 2]}),
             (1, 3, {"distance": 20, "path": [1, 2, 3]}),
@@ -52,15 +103,71 @@ class TestSteinerTree:
         assert edges_equal(list(M.edges(data=True)), mc)
 
     def test_steiner_tree(self):
-        S = steiner_tree(self.G, self.term_nodes)
-        expected_steiner_tree = [
-            (1, 2, {"weight": 10}),
-            (2, 3, {"weight": 10}),
-            (2, 7, {"weight": 1}),
-            (3, 4, {"weight": 10}),
-            (5, 7, {"weight": 1}),
+        valid_steiner_trees = [
+            [
+                [
+                    (1, 2, {"weight": 10}),
+                    (2, 3, {"weight": 10}),
+                    (2, 7, {"weight": 1}),
+                    (3, 4, {"weight": 10}),
+                    (5, 7, {"weight": 1}),
+                ],
+                [
+                    (1, 2, {"weight": 10}),
+                    (2, 7, {"weight": 1}),
+                    (3, 4, {"weight": 10}),
+                    (4, 5, {"weight": 10}),
+                    (5, 7, {"weight": 1}),
+                ],
+                [
+                    (1, 2, {"weight": 10}),
+                    (2, 3, {"weight": 10}),
+                    (2, 7, {"weight": 1}),
+                    (4, 5, {"weight": 10}),
+                    (5, 7, {"weight": 1}),
+                ],
+            ],
+            [
+                [
+                    (0, 5, {"weight": 6}),
+                    (1, 2, {"weight": 2}),
+                    (1, 5, {"weight": 3}),
+                    (3, 5, {"weight": 5}),
+                ],
+                [
+                    (0, 5, {"weight": 6}),
+                    (4, 2, {"weight": 4}),
+                    (4, 5, {"weight": 1}),
+                    (3, 5, {"weight": 5}),
+                ],
+            ],
+            [
+                [
+                    (1, 10, {"weight": 2}),
+                    (3, 10, {"weight": 2}),
+                    (3, 11, {"weight": 1}),
+                    (5, 12, {"weight": 1}),
+                    (6, 13, {"weight": 1}),
+                    (8, 9, {"weight": 2}),
+                    (9, 14, {"weight": 1}),
+                    (10, 14, {"weight": 1}),
+                    (11, 12, {"weight": 1}),
+                    (12, 15, {"weight": 1}),
+                    (13, 15, {"weight": 1}),
+                ]
+            ],
         ]
-        assert edges_equal(list(S.edges(data=True)), expected_steiner_tree)
+        for method in self.methods:
+            for G, term_nodes, valid_trees in zip(
+                [self.G1, self.G2, self.G3],
+                [self.G1_term_nodes, self.G2_term_nodes, self.G3_term_nodes],
+                valid_steiner_trees,
+            ):
+                S = steiner_tree(G, term_nodes, method=method)
+                assert any(
+                    edges_equal(list(S.edges(data=True)), valid_tree)
+                    for valid_tree in valid_trees
+                )
 
     def test_multigraph_steiner_tree(self):
         G = nx.MultiGraph()
@@ -79,5 +186,6 @@ class TestSteinerTree:
             (3, 4, 0, {"weight": 1}),
             (3, 5, 0, {"weight": 1}),
         ]
-        T = steiner_tree(G, terminal_nodes)
-        assert edges_equal(T.edges(data=True, keys=True), expected_edges)
+        for method in self.methods:
+            S = steiner_tree(G, terminal_nodes, method=method)
+            assert edges_equal(S.edges(data=True, keys=True), expected_edges)
diff --git a/networkx/algorithms/approximation/tests/test_traveling_salesman.py b/networkx/algorithms/approximation/tests/test_traveling_salesman.py
index 6f9b3b0..ccb553e 100644
--- a/networkx/algorithms/approximation/tests/test_traveling_salesman.py
+++ b/networkx/algorithms/approximation/tests/test_traveling_salesman.py
@@ -12,7 +12,7 @@ pairwise = nx.utils.pairwise
 def test_christofides_hamiltonian():
     random.seed(42)
     G = nx.complete_graph(20)
-    for (u, v) in G.edges():
+    for u, v in G.edges():
         G[u][v]["weight"] = random.randint(0, 10)
 
     H = nx.Graph()
diff --git a/networkx/algorithms/approximation/tests/test_treewidth.py b/networkx/algorithms/approximation/tests/test_treewidth.py
index fdccfd1..461b0f2 100644
--- a/networkx/algorithms/approximation/tests/test_treewidth.py
+++ b/networkx/algorithms/approximation/tests/test_treewidth.py
@@ -22,7 +22,7 @@ def is_tree_decomp(graph, decomp):
         assert appear_once
 
     # Check if each connected pair of nodes are at least once together in a bag
-    for (x, y) in graph.edges():
+    for x, y in graph.edges():
         appear_together = False
         for bag in decomp.nodes():
             if x in bag and y in bag:
diff --git a/networkx/algorithms/approximation/traveling_salesman.py b/networkx/algorithms/approximation/traveling_salesman.py
index 806c8b7..75f17e9 100644
--- a/networkx/algorithms/approximation/traveling_salesman.py
+++ b/networkx/algorithms/approximation/traveling_salesman.py
@@ -178,7 +178,7 @@ def christofides(G, weight="weight", tree=None):
     L.remove_nodes_from([v for v, degree in tree.degree if not (degree % 2)])
     MG = nx.MultiGraph()
     MG.add_edges_from(tree.edges)
-    edges = nx.min_weight_matching(L, maxcardinality=True, weight=weight)
+    edges = nx.min_weight_matching(L, weight=weight)
     MG.add_edges_from(edges)
     return _shortcutting(nx.eulerian_circuit(MG))
 
@@ -530,7 +530,7 @@ def held_karp_ascent(G, weight="weight"):
            pp.1138-1162
     """
     import numpy as np
-    import scipy.optimize as optimize
+    from scipy import optimize
 
     def k_pi():
         """
@@ -785,7 +785,7 @@ def held_karp_ascent(G, weight="weight"):
     # reference [1]
     z_star = {}
     scale_factor = (G.order() - 1) / G.order()
-    for u, v in x_star.keys():
+    for u, v in x_star:
         frequency = x_star[(u, v)] + x_star[(v, u)]
         if frequency > 0:
             z_star[(u, v)] = scale_factor * frequency
diff --git a/networkx/algorithms/assortativity/connectivity.py b/networkx/algorithms/assortativity/connectivity.py
index ad05418..dad28f9 100644
--- a/networkx/algorithms/assortativity/connectivity.py
+++ b/networkx/algorithms/assortativity/connectivity.py
@@ -2,7 +2,7 @@ from collections import defaultdict
 
 import networkx as nx
 
-__all__ = ["average_degree_connectivity", "k_nearest_neighbors"]
+__all__ = ["average_degree_connectivity"]
 
 
 def average_degree_connectivity(
@@ -119,21 +119,3 @@ def average_degree_connectivity(
 
     # normalize
     return {k: avg if dnorm[k] == 0 else avg / dnorm[k] for k, avg in dsum.items()}
-
-
-def k_nearest_neighbors(G, source="in+out", target="in+out", nodes=None, weight=None):
-    """Compute the average degree connectivity of graph.
-
-    .. deprecated 2.6
-
-      k_nearest_neighbors function is deprecated and will be removed in v3.0.
-      Use `average_degree_connectivity` instead.
-    """
-    import warnings
-
-    msg = (
-        "k_nearest_neighbors function is deprecated and will be removed in v3.0.\n"
-        "Use `average_degree_connectivity` instead."
-    )
-    warnings.warn(msg, DeprecationWarning, stacklevel=2)
-    return average_degree_connectivity(G, source, target, nodes, weight)
diff --git a/networkx/algorithms/assortativity/mixing.py b/networkx/algorithms/assortativity/mixing.py
index f457d75..6a44841 100644
--- a/networkx/algorithms/assortativity/mixing.py
+++ b/networkx/algorithms/assortativity/mixing.py
@@ -9,7 +9,6 @@ __all__ = [
     "attribute_mixing_dict",
     "degree_mixing_matrix",
     "degree_mixing_dict",
-    "numeric_mixing_matrix",
     "mixing_dict",
 ]
 
@@ -209,58 +208,6 @@ def degree_mixing_matrix(
     return a
 
 
-def numeric_mixing_matrix(G, attribute, nodes=None, normalized=True, mapping=None):
-    """Returns numeric mixing matrix for attribute.
-
-    .. deprecated:: 2.6
-
-       numeric_mixing_matrix is deprecated and will be removed in 3.0.
-       Use `attribute_mixing_matrix` instead.
-
-    Parameters
-    ----------
-    G : graph
-       NetworkX graph object.
-
-    attribute : string
-       Node attribute key.
-
-    nodes: list or iterable (optional)
-        Build the matrix only with nodes in container. The default is all nodes.
-
-    normalized : bool (default=True)
-       Return counts if False or probabilities if True.
-
-    mapping : dictionary, optional
-       Mapping from node attribute to integer index in matrix.
-       If not specified, an arbitrary ordering will be used.
-
-    Notes
-    -----
-    If each node has a unique attribute value, the unnormalized mixing matrix
-    will be equal to the adjacency matrix. To get a denser mixing matrix,
-    the rounding can be performed to form groups of nodes with equal values.
-    For example, the exact height of persons in cm (180.79155222, 163.9080892,
-    163.30095355, 167.99016217, 168.21590163, ...) can be rounded to (180, 163,
-    163, 168, 168, ...).
-
-    Returns
-    -------
-    m: numpy array
-       Counts, or joint, probability of occurrence of node attribute pairs.
-    """
-    import warnings
-
-    msg = (
-        "numeric_mixing_matrix is deprecated and will be removed in v3.0.\n"
-        "Use `attribute_mixing_matrix` instead."
-    )
-    warnings.warn(msg, DeprecationWarning, stacklevel=2)
-    return attribute_mixing_matrix(
-        G, attribute, nodes=nodes, normalized=normalized, mapping=mapping
-    )
-
-
 def mixing_dict(xy, normalized=False):
     """Returns a dictionary representation of mixing matrix.
 
diff --git a/networkx/algorithms/assortativity/tests/base_test.py b/networkx/algorithms/assortativity/tests/base_test.py
index 73bb32d..46d6300 100644
--- a/networkx/algorithms/assortativity/tests/base_test.py
+++ b/networkx/algorithms/assortativity/tests/base_test.py
@@ -37,6 +37,27 @@ class BaseTestAttributeMixing:
         S.add_edge(2, 2)
         cls.S = S
 
+        N = nx.Graph()
+        N.add_nodes_from([0, 1], margin=-2)
+        N.add_nodes_from([2, 3], margin=-2)
+        N.add_nodes_from([4], margin=-3)
+        N.add_nodes_from([5], margin=-4)
+        N.add_edges_from([(0, 1), (2, 3), (0, 4), (2, 5)])
+        cls.N = N
+
+        F = nx.Graph()
+        F.add_edges_from([(0, 3), (1, 3), (2, 3)], weight=0.5)
+        F.add_edge(0, 2, weight=1)
+        nx.set_node_attributes(F, dict(F.degree(weight="weight")), "margin")
+        cls.F = F
+
+        K = nx.Graph()
+        K.add_nodes_from([1, 2], margin=-1)
+        K.add_nodes_from([3], margin=1)
+        K.add_nodes_from([4], margin=2)
+        K.add_edges_from([(3, 4), (1, 2), (1, 3)])
+        cls.K = K
+
 
 class BaseTestDegreeMixing:
     @classmethod
@@ -58,28 +79,3 @@ class BaseTestDegreeMixing:
         S2 = nx.star_graph(4)
         cls.DS = nx.disjoint_union(S1, S2)
         cls.DS.add_edge(4, 5)
-
-
-class BaseTestNumericMixing:
-    @classmethod
-    def setup_class(cls):
-        N = nx.Graph()
-        N.add_nodes_from([0, 1], margin=-2)
-        N.add_nodes_from([2, 3], margin=-2)
-        N.add_nodes_from([4], margin=-3)
-        N.add_nodes_from([5], margin=-4)
-        N.add_edges_from([(0, 1), (2, 3), (0, 4), (2, 5)])
-        cls.N = N
-
-        F = nx.Graph()
-        F.add_edges_from([(0, 3), (1, 3), (2, 3)], weight=0.5)
-        F.add_edge(0, 2, weight=1)
-        nx.set_node_attributes(F, dict(F.degree(weight="weight")), "margin")
-        cls.F = F
-
-        M = nx.Graph()
-        M.add_nodes_from([1, 2], margin=-1)
-        M.add_nodes_from([3], margin=1)
-        M.add_nodes_from([4], margin=2)
-        M.add_edges_from([(3, 4), (1, 2), (1, 3)])
-        cls.M = M
diff --git a/networkx/algorithms/assortativity/tests/test_connectivity.py b/networkx/algorithms/assortativity/tests/test_connectivity.py
index c8fae23..21c6287 100644
--- a/networkx/algorithms/assortativity/tests/test_connectivity.py
+++ b/networkx/algorithms/assortativity/tests/test_connectivity.py
@@ -86,8 +86,6 @@ class TestNeighborConnectivity:
         assert nd == 1.8
         nd = nx.average_degree_connectivity(G, weight="weight")[5]
         assert nd == pytest.approx(3.222222, abs=1e-5)
-        nd = nx.k_nearest_neighbors(G, weight="weight")[5]
-        assert nd == pytest.approx(3.222222, abs=1e-5)
 
     def test_zero_deg(self):
         G = nx.DiGraph()
diff --git a/networkx/algorithms/assortativity/tests/test_correlation.py b/networkx/algorithms/assortativity/tests/test_correlation.py
index ffba703..dbaa432 100644
--- a/networkx/algorithms/assortativity/tests/test_correlation.py
+++ b/networkx/algorithms/assortativity/tests/test_correlation.py
@@ -7,11 +7,7 @@ pytest.importorskip("scipy")
 import networkx as nx
 from networkx.algorithms.assortativity.correlation import attribute_ac
 
-from .base_test import (
-    BaseTestAttributeMixing,
-    BaseTestDegreeMixing,
-    BaseTestNumericMixing,
-)
+from .base_test import BaseTestAttributeMixing, BaseTestDegreeMixing
 
 
 class TestDegreeMixingCorrelation(BaseTestDegreeMixing):
@@ -99,16 +95,14 @@ class TestAttributeMixingCorrelation(BaseTestAttributeMixing):
         r = attribute_ac(a)
         np.testing.assert_almost_equal(r, 0.029, decimal=3)
 
-
-class TestNumericMixingCorrelation(BaseTestNumericMixing):
-    def test_numeric_assortativity_negative(self):
+    def test_attribute_assortativity_negative(self):
         r = nx.numeric_assortativity_coefficient(self.N, "margin")
         np.testing.assert_almost_equal(r, -0.2903, decimal=4)
 
-    def test_numeric_assortativity_float(self):
+    def test_attribute_assortativity_float(self):
         r = nx.numeric_assortativity_coefficient(self.F, "margin")
         np.testing.assert_almost_equal(r, -0.1429, decimal=4)
 
-    def test_numeric_assortativity_mixed(self):
-        r = nx.numeric_assortativity_coefficient(self.M, "margin")
+    def test_attribute_assortativity_mixed(self):
+        r = nx.numeric_assortativity_coefficient(self.K, "margin")
         np.testing.assert_almost_equal(r, 0.4340, decimal=4)
diff --git a/networkx/algorithms/assortativity/tests/test_mixing.py b/networkx/algorithms/assortativity/tests/test_mixing.py
index cb4ae07..9af0986 100644
--- a/networkx/algorithms/assortativity/tests/test_mixing.py
+++ b/networkx/algorithms/assortativity/tests/test_mixing.py
@@ -5,11 +5,7 @@ np = pytest.importorskip("numpy")
 
 import networkx as nx
 
-from .base_test import (
-    BaseTestAttributeMixing,
-    BaseTestDegreeMixing,
-    BaseTestNumericMixing,
-)
+from .base_test import BaseTestAttributeMixing, BaseTestDegreeMixing
 
 
 class TestDegreeMixingDict(BaseTestDegreeMixing):
@@ -159,24 +155,22 @@ class TestAttributeMixingMatrix(BaseTestAttributeMixing):
         a = nx.attribute_mixing_matrix(self.M, "fish", mapping=mapping)
         np.testing.assert_equal(a, a_result / a_result.sum())
 
-
-class TestNumericMixingMatrix(BaseTestNumericMixing):
-    def test_numeric_mixing_matrix_negative(self):
+    def test_attribute_mixing_matrix_negative(self):
         mapping = {-2: 0, -3: 1, -4: 2}
         a_result = np.array([[4.0, 1.0, 1.0], [1.0, 0.0, 0.0], [1.0, 0.0, 0.0]])
-        a = nx.numeric_mixing_matrix(
+        a = nx.attribute_mixing_matrix(
             self.N, "margin", mapping=mapping, normalized=False
         )
         np.testing.assert_equal(a, a_result)
-        a = nx.numeric_mixing_matrix(self.N, "margin", mapping=mapping)
+        a = nx.attribute_mixing_matrix(self.N, "margin", mapping=mapping)
         np.testing.assert_equal(a, a_result / float(a_result.sum()))
 
-    def test_numeric_mixing_matrix_float(self):
+    def test_attribute_mixing_matrix_float(self):
         mapping = {0.5: 1, 1.5: 0}
         a_result = np.array([[6.0, 1.0], [1.0, 0.0]])
-        a = nx.numeric_mixing_matrix(
+        a = nx.attribute_mixing_matrix(
             self.F, "margin", mapping=mapping, normalized=False
         )
         np.testing.assert_equal(a, a_result)
-        a = nx.numeric_mixing_matrix(self.F, "margin", mapping=mapping)
+        a = nx.attribute_mixing_matrix(self.F, "margin", mapping=mapping)
         np.testing.assert_equal(a, a_result / a_result.sum())
diff --git a/networkx/algorithms/bipartite/basic.py b/networkx/algorithms/bipartite/basic.py
index ac4686a..808b335 100644
--- a/networkx/algorithms/bipartite/basic.py
+++ b/networkx/algorithms/bipartite/basic.py
@@ -82,6 +82,7 @@ def color(G):
     return color
 
 
+@nx._dispatch
 def is_bipartite(G):
     """Returns True if graph G is bipartite, False if not.
 
diff --git a/networkx/algorithms/bipartite/matching.py b/networkx/algorithms/bipartite/matching.py
index 48a22f9..d00595f 100644
--- a/networkx/algorithms/bipartite/matching.py
+++ b/networkx/algorithms/bipartite/matching.py
@@ -115,6 +115,7 @@ def hopcroft_karp_matching(G, top_nodes=None):
        2.4 (1973), pp. 225--231. <https://doi.org/10.1137/0202019>.
 
     """
+
     # First we define some auxiliary search functions.
     #
     # If you are a human reading these auxiliary search functions, the "global"
@@ -269,12 +270,17 @@ def eppstein_matching(G, top_nodes=None):
 
         # did we finish layering without finding any alternating paths?
         if not unmatched:
-            unlayered = {}
-            for u in G:
-                # TODO Why is extra inner loop necessary?
-                for v in G[u]:
-                    if v not in preds:
-                        unlayered[v] = None
+            # TODO - The lines between --- were unused and were thus commented
+            # out. This whole commented chunk should be reviewed to determine
+            # whether it should be built upon or completely removed.
+            # ---
+            # unlayered = {}
+            # for u in G:
+            #     # TODO Why is extra inner loop necessary?
+            #     for v in G[u]:
+            #         if v not in preds:
+            #             unlayered[v] = None
+            # ---
             # TODO Originally, this function returned a three-tuple:
             #
             #     return (matching, list(pred), list(unlayered))
diff --git a/networkx/algorithms/bipartite/matrix.py b/networkx/algorithms/bipartite/matrix.py
index 276d3e4..3e5db20 100644
--- a/networkx/algorithms/bipartite/matrix.py
+++ b/networkx/algorithms/bipartite/matrix.py
@@ -50,7 +50,7 @@ def biadjacency_matrix(
 
     Returns
     -------
-    M : SciPy sparse matrix
+    M : SciPy sparse array
         Biadjacency matrix representation of the bipartite graph G.
 
     Notes
@@ -103,16 +103,8 @@ def biadjacency_matrix(
                 if u in row_index and v in col_index
             )
         )
-    # TODO: change coo_matrix -> coo_array for NX 3.0
-    A = sp.sparse.coo_matrix((data, (row, col)), shape=(nlen, mlen), dtype=dtype)
+    A = sp.sparse.coo_array((data, (row, col)), shape=(nlen, mlen), dtype=dtype)
     try:
-        import warnings
-
-        warnings.warn(
-            "biadjacency_matrix will return a scipy.sparse array instead of a matrix in NetworkX 3.0",
-            FutureWarning,
-            stacklevel=2,
-        )
         return A.asformat(format)
     except ValueError as err:
         raise nx.NetworkXError(f"Unknown sparse array format: {format}") from err
@@ -120,11 +112,11 @@ def biadjacency_matrix(
 
 def from_biadjacency_matrix(A, create_using=None, edge_attribute="weight"):
     r"""Creates a new bipartite graph from a biadjacency matrix given as a
-    SciPy sparse matrix.
+    SciPy sparse array.
 
     Parameters
     ----------
-    A: scipy sparse matrix
+    A: scipy sparse array
       A biadjacency matrix representation of a graph
 
     create_using: NetworkX graph
diff --git a/networkx/algorithms/bipartite/projection.py b/networkx/algorithms/bipartite/projection.py
index 8864195..93f2c29 100644
--- a/networkx/algorithms/bipartite/projection.py
+++ b/networkx/algorithms/bipartite/projection.py
@@ -4,7 +4,6 @@ from networkx.exception import NetworkXAlgorithmError
 from networkx.utils import not_implemented_for
 
 __all__ = [
-    "project",
     "projected_graph",
     "weighted_projected_graph",
     "collaboration_weighted_projected_graph",
@@ -522,17 +521,3 @@ def generic_weighted_projected_graph(B, nodes, weight_function=None):
             weight = weight_function(B, u, v)
             G.add_edge(u, v, weight=weight)
     return G
-
-
-def project(B, nodes, create_using=None):
-    import warnings
-
-    warnings.warn(
-        (
-            "networkx.project is deprecated and will be removed"
-            "in NetworkX 3.0, use networkx.projected_graph instead."
-        ),
-        DeprecationWarning,
-        stacklevel=2,
-    )
-    return projected_graph(B, nodes)
diff --git a/networkx/algorithms/bipartite/redundancy.py b/networkx/algorithms/bipartite/redundancy.py
index 55de063..dd6e343 100644
--- a/networkx/algorithms/bipartite/redundancy.py
+++ b/networkx/algorithms/bipartite/redundancy.py
@@ -103,8 +103,6 @@ def _node_redundancy(G, v):
 
     """
     n = len(G[v])
-    # TODO On Python 3, we could just use `G[u].keys() & G[w].keys()` instead
-    # of instantiating the entire sets.
     overlap = sum(
         1 for (u, w) in combinations(G[v], 2) if (set(G[u]) & set(G[w])) - {v}
     )
diff --git a/networkx/algorithms/bipartite/tests/test_centrality.py b/networkx/algorithms/bipartite/tests/test_centrality.py
index 50ac906..19fb5d1 100644
--- a/networkx/algorithms/bipartite/tests/test_centrality.py
+++ b/networkx/algorithms/bipartite/tests/test_centrality.py
@@ -55,6 +55,22 @@ class TestBipartiteCentrality:
         c = bipartite.closeness_centrality(G, [1])
         assert c == {0: 0.0, 1: 0.0}
 
+    def test_bipartite_closeness_centrality_unconnected(self):
+        G = nx.complete_bipartite_graph(3, 3)
+        G.add_edge(6, 7)
+        c = bipartite.closeness_centrality(G, [0, 2, 4, 6], normalized=False)
+        answer = {
+            0: 10.0 / 7,
+            2: 10.0 / 7,
+            4: 10.0 / 7,
+            6: 10.0,
+            1: 10.0 / 7,
+            3: 10.0 / 7,
+            5: 10.0 / 7,
+            7: 10.0,
+        }
+        assert c == answer
+
     def test_davis_degree_centrality(self):
         G = self.davis
         deg = bipartite.degree_centrality(G, self.top_nodes)
diff --git a/networkx/algorithms/bipartite/tests/test_cluster.py b/networkx/algorithms/bipartite/tests/test_cluster.py
index e33e6c0..72e2dba 100644
--- a/networkx/algorithms/bipartite/tests/test_cluster.py
+++ b/networkx/algorithms/bipartite/tests/test_cluster.py
@@ -1,7 +1,7 @@
 import pytest
 
 import networkx as nx
-import networkx.algorithms.bipartite as bipartite
+from networkx.algorithms import bipartite
 from networkx.algorithms.bipartite.cluster import cc_dot, cc_max, cc_min
 
 
diff --git a/networkx/algorithms/bipartite/tests/test_covering.py b/networkx/algorithms/bipartite/tests/test_covering.py
index 2f1b02e..9507e13 100644
--- a/networkx/algorithms/bipartite/tests/test_covering.py
+++ b/networkx/algorithms/bipartite/tests/test_covering.py
@@ -1,5 +1,5 @@
 import networkx as nx
-import networkx.algorithms.bipartite as bipartite
+from networkx.algorithms import bipartite
 
 
 class TestMinEdgeCover:
diff --git a/networkx/algorithms/bipartite/tests/test_edgelist.py b/networkx/algorithms/bipartite/tests/test_edgelist.py
index 9cf0c43..b388465 100644
--- a/networkx/algorithms/bipartite/tests/test_edgelist.py
+++ b/networkx/algorithms/bipartite/tests/test_edgelist.py
@@ -190,3 +190,40 @@ class TestEdgelist:
             G = nx.path_graph(4)
             bytesIO = io.BytesIO()
             bipartite.write_edgelist(G, bytesIO)
+
+    def test_parse_edgelist(self):
+        """Tests for conditions specific to
+        parse_edge_list method"""
+
+        # ignore strings of length less than 2
+        lines = ["1 2", "2 3", "3 1", "4", " "]
+        G = bipartite.parse_edgelist(lines, nodetype=int)
+        assert list(G.nodes) == [1, 2, 3]
+
+        # Exception raised when node is not convertible
+        # to specified data type
+        with pytest.raises(TypeError, match=".*Failed to convert nodes"):
+            lines = ["a b", "b c", "c a"]
+            G = bipartite.parse_edgelist(lines, nodetype=int)
+
+        # Exception raised when format of data is not
+        # convertible to dictionary object
+        with pytest.raises(TypeError, match=".*Failed to convert edge data"):
+            lines = ["1 2 3", "2 3 4", "3 1 2"]
+            G = bipartite.parse_edgelist(lines, nodetype=int)
+
+        # Exception raised when edge data and data
+        # keys are not of same length
+        with pytest.raises(IndexError):
+            lines = ["1 2 3 4", "2 3 4"]
+            G = bipartite.parse_edgelist(
+                lines, nodetype=int, data=[("weight", int), ("key", int)]
+            )
+
+        # Exception raised when edge data is not
+        # convertible to specified data type
+        with pytest.raises(TypeError, match=".*Failed to convert key data"):
+            lines = ["1 2 3 a", "2 3 4 b"]
+            G = bipartite.parse_edgelist(
+                lines, nodetype=int, data=[("weight", int), ("key", int)]
+            )
diff --git a/networkx/algorithms/bipartite/tests/test_matching.py b/networkx/algorithms/bipartite/tests/test_matching.py
index abef330..7ed7cdc 100644
--- a/networkx/algorithms/bipartite/tests/test_matching.py
+++ b/networkx/algorithms/bipartite/tests/test_matching.py
@@ -104,7 +104,7 @@ class TestMatching:
         # the number of vertices in a minimum vertex cover.
         assert len(vertices) == 5
         # Assert that the set is truly a vertex cover.
-        for (u, v) in self.graph.edges():
+        for u, v in self.graph.edges():
             assert u in vertices or v in vertices
         # TODO Assert that the vertices are the correct ones.
 
diff --git a/networkx/algorithms/bipartite/tests/test_project.py b/networkx/algorithms/bipartite/tests/test_project.py
index 74f8059..076bb42 100644
--- a/networkx/algorithms/bipartite/tests/test_project.py
+++ b/networkx/algorithms/bipartite/tests/test_project.py
@@ -14,6 +14,9 @@ class TestBipartiteProject:
         P = bipartite.projected_graph(G, [0, 2])
         assert nodes_equal(list(P), [0, 2])
         assert edges_equal(list(P.edges()), [(0, 2)])
+        G = nx.MultiGraph([(0, 1)])
+        with pytest.raises(nx.NetworkXError, match="not defined for multigraphs"):
+            bipartite.projected_graph(G, [0])
 
     def test_path_projected_properties_graph(self):
         G = nx.path_graph(4)
@@ -66,6 +69,12 @@ class TestBipartiteProject:
         assert edges_equal(list(P.edges()), [(0, 2)])
         P[0][2]["weight"] = 1
 
+    def test_digraph_weighted_projection(self):
+        G = nx.DiGraph([(0, 1), (1, 2), (2, 3), (3, 4)])
+        P = bipartite.overlap_weighted_projected_graph(G, [1, 3])
+        assert nx.get_edge_attributes(P, "weight") == {(1, 3): 1.0}
+        assert len(P) == 2
+
     def test_path_weighted_projected_directed_graph(self):
         G = nx.DiGraph()
         nx.add_path(G, range(4))
@@ -353,14 +362,14 @@ class TestBipartiteWeightedProjection:
         )
         assert nodes_equal(list(G), [0, 2, 4])
         assert edges_equal(
-            list(list(G.edges(data=True))),
+            list(G.edges(data=True)),
             [(0, 2, {"weight": 1}), (2, 4, {"weight": 1})],
         )
 
         G = bipartite.generic_weighted_projected_graph(B, [0, 2, 4])
         assert nodes_equal(list(G), [0, 2, 4])
         assert edges_equal(
-            list(list(G.edges(data=True))),
+            list(G.edges(data=True)),
             [(0, 2, {"weight": 1}), (2, 4, {"weight": 1})],
         )
         B = nx.DiGraph()
diff --git a/networkx/algorithms/bipartite/tests/test_redundancy.py b/networkx/algorithms/bipartite/tests/test_redundancy.py
index 8c04e95..7ab7813 100644
--- a/networkx/algorithms/bipartite/tests/test_redundancy.py
+++ b/networkx/algorithms/bipartite/tests/test_redundancy.py
@@ -10,9 +10,15 @@ from networkx.algorithms.bipartite import complete_bipartite_graph, node_redunda
 
 def test_no_redundant_nodes():
     G = complete_bipartite_graph(2, 2)
+
+    # when nodes is None
     rc = node_redundancy(G)
     assert all(redundancy == 1 for redundancy in rc.values())
 
+    # when set of nodes is specified
+    rc = node_redundancy(G, (2, 3))
+    assert rc == {2: 1.0, 3: 1.0}
+
 
 def test_redundant_nodes():
     G = cycle_graph(6)
diff --git a/networkx/algorithms/bipartite/tests/test_spectral_bipartivity.py b/networkx/algorithms/bipartite/tests/test_spectral_bipartivity.py
index 0cdc2d6..b940649 100644
--- a/networkx/algorithms/bipartite/tests/test_spectral_bipartivity.py
+++ b/networkx/algorithms/bipartite/tests/test_spectral_bipartivity.py
@@ -66,7 +66,6 @@ class TestSpectralBipartivity:
         assert sb(G) == pytest.approx(0.597, abs=1e-3)
 
     def test_single_nodes(self):
-
         # single nodes
         G = nx.complete_bipartite_graph(2, 3)
         G.add_edge(2, 4)
diff --git a/networkx/algorithms/boundary.py b/networkx/algorithms/boundary.py
index 25c1e28..4e98d43 100644
--- a/networkx/algorithms/boundary.py
+++ b/networkx/algorithms/boundary.py
@@ -10,9 +10,12 @@ nodes in *S* that are outside *S*.
 """
 from itertools import chain
 
+import networkx as nx
+
 __all__ = ["edge_boundary", "node_boundary"]
 
 
+@nx._dispatch
 def edge_boundary(G, nbunch1, nbunch2=None, data=False, keys=False, default=None):
     """Returns the edge boundary of `nbunch1`.
 
@@ -56,6 +59,20 @@ def edge_boundary(G, nbunch1, nbunch2=None, data=False, keys=False, default=None
         are specified and `G` is a multigraph, then edges are returned
         with keys and/or data, as in :meth:`MultiGraph.edges`.
 
+    Examples
+    --------
+    >>> G = nx.wheel_graph(6)
+
+    When nbunch2=None:
+
+    >>> list(nx.edge_boundary(G, (1, 3)))
+    [(1, 0), (1, 2), (1, 5), (3, 0), (3, 2), (3, 4)]
+
+    When nbunch2 is given:
+
+    >>> list(nx.edge_boundary(G, (1, 3), (2, 0)))
+    [(1, 0), (1, 2), (3, 0), (3, 2)]
+
     Notes
     -----
     Any element of `nbunch` that is not in the graph `G` will be
@@ -89,6 +106,7 @@ def edge_boundary(G, nbunch1, nbunch2=None, data=False, keys=False, default=None
     )
 
 
+@nx._dispatch()
 def node_boundary(G, nbunch1, nbunch2=None):
     """Returns the node boundary of `nbunch1`.
 
@@ -117,6 +135,20 @@ def node_boundary(G, nbunch1, nbunch2=None):
     set
         The node boundary of `nbunch1` with respect to `nbunch2`.
 
+    Examples
+    --------
+    >>> G = nx.wheel_graph(6)
+
+    When nbunch2=None:
+
+    >>> list(nx.node_boundary(G, (3, 4)))
+    [0, 2, 5]
+
+    When nbunch2 is given:
+
+    >>> list(nx.node_boundary(G, (3, 4), (0, 1, 5)))
+    [0, 5]
+
     Notes
     -----
     Any element of `nbunch` that is not in the graph `G` will be
diff --git a/networkx/algorithms/centrality/__init__.py b/networkx/algorithms/centrality/__init__.py
index cf07fe2..c91a904 100644
--- a/networkx/algorithms/centrality/__init__.py
+++ b/networkx/algorithms/centrality/__init__.py
@@ -17,3 +17,4 @@ from .second_order import *
 from .subgraph_alg import *
 from .trophic import *
 from .voterank_alg import *
+from .laplacian import *
diff --git a/networkx/algorithms/centrality/betweenness.py b/networkx/algorithms/centrality/betweenness.py
index 54b7db9..65ddaa2 100644
--- a/networkx/algorithms/centrality/betweenness.py
+++ b/networkx/algorithms/centrality/betweenness.py
@@ -1,16 +1,17 @@
 """Betweenness centrality measures."""
-import warnings
 from collections import deque
 from heapq import heappop, heappush
 from itertools import count
 
+import networkx as nx
 from networkx.algorithms.shortest_paths.weighted import _weight_function
 from networkx.utils import py_random_state
 from networkx.utils.decorators import not_implemented_for
 
-__all__ = ["betweenness_centrality", "edge_betweenness_centrality", "edge_betweenness"]
+__all__ = ["betweenness_centrality", "edge_betweenness_centrality"]
 
 
+@nx._dispatch
 @py_random_state(5)
 def betweenness_centrality(
     G, k=None, normalized=True, weight=None, endpoints=False, seed=None
@@ -147,6 +148,7 @@ def betweenness_centrality(
     return betweenness
 
 
+@nx._dispatch
 @py_random_state(4)
 def edge_betweenness_centrality(G, k=None, normalized=True, weight=None, seed=None):
     r"""Compute betweenness centrality for edges.
@@ -222,7 +224,7 @@ def edge_betweenness_centrality(G, k=None, normalized=True, weight=None, seed=No
     if k is None:
         nodes = G
     else:
-        nodes = seed.sample(G.nodes(), k)
+        nodes = seed.sample(list(G.nodes()), k)
     for s in nodes:
         # single source shortest paths
         if weight is None:  # use BFS
@@ -242,14 +244,6 @@ def edge_betweenness_centrality(G, k=None, normalized=True, weight=None, seed=No
     return betweenness
 
 
-# obsolete name
-def edge_betweenness(G, k=None, normalized=True, weight=None, seed=None):
-    warnings.warn(
-        "edge_betweeness is replaced by edge_betweenness_centrality", DeprecationWarning
-    )
-    return edge_betweenness_centrality(G, k, normalized, weight, seed)
-
-
 # helpers for betweenness centrality
 
 
diff --git a/networkx/algorithms/centrality/betweenness_subset.py b/networkx/algorithms/centrality/betweenness_subset.py
index 6b6958f..2a29b5f 100644
--- a/networkx/algorithms/centrality/betweenness_subset.py
+++ b/networkx/algorithms/centrality/betweenness_subset.py
@@ -1,7 +1,7 @@
 """Betweenness centrality measures for subsets of nodes."""
-import warnings
-
-from networkx.algorithms.centrality.betweenness import _add_edge_keys
+from networkx.algorithms.centrality.betweenness import (
+    _add_edge_keys,
+)
 from networkx.algorithms.centrality.betweenness import (
     _single_source_dijkstra_path_basic as dijkstra,
 )
@@ -11,7 +11,6 @@ from networkx.algorithms.centrality.betweenness import (
 
 __all__ = [
     "betweenness_centrality_subset",
-    "betweenness_centrality_source",
     "edge_betweenness_centrality_subset",
 ]
 
@@ -199,16 +198,6 @@ def edge_betweenness_centrality_subset(
     return b
 
 
-# obsolete name
-def betweenness_centrality_source(G, normalized=True, weight=None, sources=None):
-    msg = "betweenness_centrality_source --> betweenness_centrality_subset"
-    warnings.warn(msg, DeprecationWarning)
-    if sources is None:
-        sources = G.nodes()
-    targets = list(G)
-    return betweenness_centrality_subset(G, sources, targets, normalized, weight)
-
-
 def _accumulate_subset(betweenness, S, P, sigma, s, targets):
     delta = dict.fromkeys(S, 0.0)
     target_set = set(targets) - {s}
diff --git a/networkx/algorithms/centrality/degree_alg.py b/networkx/algorithms/centrality/degree_alg.py
index a7e7b92..87beef8 100644
--- a/networkx/algorithms/centrality/degree_alg.py
+++ b/networkx/algorithms/centrality/degree_alg.py
@@ -1,9 +1,11 @@
 """Degree centrality measures."""
+import networkx as nx
 from networkx.utils.decorators import not_implemented_for
 
 __all__ = ["degree_centrality", "in_degree_centrality", "out_degree_centrality"]
 
 
+@nx._dispatch
 def degree_centrality(G):
     """Compute the degree centrality for nodes.
 
@@ -20,6 +22,12 @@ def degree_centrality(G):
     nodes : dictionary
        Dictionary of nodes with degree centrality as the value.
 
+    Examples
+    --------
+    >>> G = nx.Graph([(0, 1), (0, 2), (0, 3), (1, 2), (1, 3)])
+    >>> nx.degree_centrality(G)
+    {0: 1.0, 1: 1.0, 2: 0.6666666666666666, 3: 0.6666666666666666}
+
     See Also
     --------
     betweenness_centrality, load_centrality, eigenvector_centrality
@@ -41,6 +49,7 @@ def degree_centrality(G):
     return centrality
 
 
+@nx._dispatch
 @not_implemented_for("undirected")
 def in_degree_centrality(G):
     """Compute the in-degree centrality for nodes.
@@ -63,6 +72,12 @@ def in_degree_centrality(G):
     NetworkXNotImplemented
         If G is undirected.
 
+    Examples
+    --------
+    >>> G = nx.DiGraph([(0, 1), (0, 2), (0, 3), (1, 2), (1, 3)])
+    >>> nx.in_degree_centrality(G)
+    {0: 0.0, 1: 0.3333333333333333, 2: 0.6666666666666666, 3: 0.6666666666666666}
+
     See Also
     --------
     degree_centrality, out_degree_centrality
@@ -84,6 +99,7 @@ def in_degree_centrality(G):
     return centrality
 
 
+@nx._dispatch
 @not_implemented_for("undirected")
 def out_degree_centrality(G):
     """Compute the out-degree centrality for nodes.
@@ -106,6 +122,12 @@ def out_degree_centrality(G):
     NetworkXNotImplemented
         If G is undirected.
 
+    Examples
+    --------
+    >>> G = nx.DiGraph([(0, 1), (0, 2), (0, 3), (1, 2), (1, 3)])
+    >>> nx.out_degree_centrality(G)
+    {0: 1.0, 1: 0.6666666666666666, 2: 0.0, 3: 0.0}
+
     See Also
     --------
     degree_centrality, in_degree_centrality
diff --git a/networkx/algorithms/centrality/dispersion.py b/networkx/algorithms/centrality/dispersion.py
index 8005670..4c38ad0 100644
--- a/networkx/algorithms/centrality/dispersion.py
+++ b/networkx/algorithms/centrality/dispersion.py
@@ -19,6 +19,13 @@ def dispersion(G, u=None, v=None, normalized=True, alpha=1.0, b=0.0, c=0.0):
         The target of the dispersion score if specified.
     normalized : bool
         If True (default) normalize by the embededness of the nodes (u and v).
+    alpha, b, c : float
+        Parameters for the normalization procedure. When `normalized` is True,
+        the dispersion value is normalized by::
+
+            result = ((dispersion + b) ** alpha) / (embeddedness + c)
+
+        as long as the denominator is nonzero.
 
     Returns
     -------
@@ -52,7 +59,7 @@ def dispersion(G, u=None, v=None, normalized=True, alpha=1.0, b=0.0, c=0.0):
         # all possible ties of connections that u and b share
         possib = combinations(ST, 2)
         total = 0
-        for (s, t) in possib:
+        for s, t in possib:
             # neighbors of s that are in G_u, not including u and v
             nbrs_s = u_nbrs.intersection(G_u[s]) - set_uv
             # s and t are not directly connected
diff --git a/networkx/algorithms/centrality/eigenvector.py b/networkx/algorithms/centrality/eigenvector.py
index f55cedb..bd8a8fd 100644
--- a/networkx/algorithms/centrality/eigenvector.py
+++ b/networkx/algorithms/centrality/eigenvector.py
@@ -7,6 +7,7 @@ from networkx.utils import not_implemented_for
 __all__ = ["eigenvector_centrality", "eigenvector_centrality_numpy"]
 
 
+@nx._dispatch
 @not_implemented_for("multigraph")
 def eigenvector_centrality(G, max_iter=100, tol=1.0e-6, nstart=None, weight=None):
     r"""Compute the eigenvector centrality for the graph `G`.
diff --git a/networkx/algorithms/centrality/group.py b/networkx/algorithms/centrality/group.py
index 5d4e43a..0c82b4c 100644
--- a/networkx/algorithms/centrality/group.py
+++ b/networkx/algorithms/centrality/group.py
@@ -207,7 +207,7 @@ def _group_preprocessing(G, set_v, weight):
         else:  # use Dijkstra's algorithm
             S, P, sigma[s], D[s] = _single_source_dijkstra_path_basic(G, s, weight)
         betweenness, delta[s] = _accumulate_endpoints(betweenness, S, P, sigma[s], s)
-        for i in delta[s].keys():  # add the paths from s to i and rescale sigma
+        for i in delta[s]:  # add the paths from s to i and rescale sigma
             if s != i:
                 delta[s][i] += 1
             if weight is not None:
@@ -414,7 +414,7 @@ def _dfbnb(G, k, DF_tree, max_GBC, root, D, max_group, nodes, greedy):
     if len(DF_tree.nodes[root]["GM"]) == k and DF_tree.nodes[root]["GBC"] > max_GBC:
         return DF_tree.nodes[root]["GBC"], DF_tree, DF_tree.nodes[root]["GM"]
     # stopping condition - if the size of group members equal to k or there are less than
-    # k - |GM| in the candidate list or the heuristic function plus the GBC is bellow the
+    # k - |GM| in the candidate list or the heuristic function plus the GBC is below the
     # maximal GBC found then prune
     if (
         len(DF_tree.nodes[root]["GM"]) == k
@@ -682,7 +682,7 @@ def group_degree_centrality(G, S):
        Journal of Mathematical Sociology. 23(3): 181-201. 1999.
        http://www.analytictech.com/borgatti/group_centrality.htm
     """
-    centrality = len(set().union(*list(set(G.neighbors(i)) for i in S)) - set(S))
+    centrality = len(set().union(*[set(G.neighbors(i)) for i in S]) - set(S))
     centrality /= len(G.nodes()) - len(S)
     return centrality
 
diff --git a/networkx/algorithms/centrality/katz.py b/networkx/algorithms/centrality/katz.py
index fd0bb93..f429400 100644
--- a/networkx/algorithms/centrality/katz.py
+++ b/networkx/algorithms/centrality/katz.py
@@ -7,6 +7,7 @@ from networkx.utils import not_implemented_for
 __all__ = ["katz_centrality", "katz_centrality_numpy"]
 
 
+@nx._dispatch
 @not_implemented_for("multigraph")
 def katz_centrality(
     G,
@@ -168,7 +169,7 @@ def katz_centrality(
     for _ in range(max_iter):
         xlast = x
         x = dict.fromkeys(xlast, 0)
-        # do the multiplication y^T = Alpha * x^T A - Beta
+        # do the multiplication y^T = Alpha * x^T A + Beta
         for n in x:
             for nbr in G[n]:
                 x[nbr] += xlast[n] * G[n][nbr].get(weight, 1)
diff --git a/networkx/algorithms/centrality/laplacian.py b/networkx/algorithms/centrality/laplacian.py
new file mode 100644
index 0000000..073bb92
--- /dev/null
+++ b/networkx/algorithms/centrality/laplacian.py
@@ -0,0 +1,136 @@
+"""
+Laplacian centrality measures.
+"""
+import networkx as nx
+
+__all__ = ["laplacian_centrality"]
+
+
+def laplacian_centrality(
+    G, normalized=True, nodelist=None, weight="weight", walk_type=None, alpha=0.95
+):
+    r"""Compute the Laplacian centrality for nodes in the graph `G`.
+
+    The Laplacian Centrality of a node ``i`` is measured by the drop in the
+    Laplacian Energy after deleting node ``i`` from the graph. The Laplacian Energy
+    is the sum of the squared eigenvalues of a graph's Laplacian matrix.
+
+    .. math::
+
+        C_L(u_i,G) = \frac{(\Delta E)_i}{E_L (G)} = \frac{E_L (G)-E_L (G_i)}{E_L (G)}
+
+        E_L (G) = \sum_{i=0}^n \lambda_i^2
+
+    Where $E_L (G)$ is the Laplacian energy of graph `G`,
+    E_L (G_i) is the Laplacian energy of graph `G` after deleting node ``i``
+    and $\lambda_i$ are the eigenvalues of `G`'s Laplacian matrix.
+    This formula shows the normalized value. Without normalization,
+    the numerator on the right side is returned.
+
+    Parameters
+    ----------
+    G : graph
+        A networkx graph
+
+    normalized : bool (default = True)
+        If True the centrality score is scaled so the sum over all nodes is 1.
+        If False the centrality score for each node is the drop in Laplacian
+        energy when that node is removed.
+
+    nodelist : list, optional (default = None)
+        The rows and columns are ordered according to the nodes in nodelist.
+        If nodelist is None, then the ordering is produced by G.nodes().
+
+    weight: string or None, optional (default=`weight`)
+        Optional parameter `weight` to compute the Laplacian matrix.
+        The edge data key used to compute each value in the matrix.
+        If None, then each edge has weight 1.
+
+    walk_type : string or None, optional (default=None)
+        Optional parameter `walk_type` used when calling
+        :func:`directed_laplacian_matrix <networkx.directed_laplacian_matrix>`.
+        If None, the transition matrix is selected depending on the properties
+        of the graph. Otherwise can be `random`, `lazy`, or `pagerank`.
+
+    alpha : real (default = 0.95)
+        Optional parameter `alpha` used when calling
+        :func:`directed_laplacian_matrix <networkx.directed_laplacian_matrix>`.
+        (1 - alpha) is the teleportation probability used with pagerank.
+
+    Returns
+    -------
+    nodes : dictionary
+       Dictionary of nodes with Laplacian centrality as the value.
+
+    Examples
+    --------
+    >>> G = nx.Graph()
+    >>> edges = [(0, 1, 4), (0, 2, 2), (2, 1, 1), (1, 3, 2), (1, 4, 2), (4, 5, 1)]
+    >>> G.add_weighted_edges_from(edges)
+    >>> sorted((v, f"{c:0.2f}") for v, c in laplacian_centrality(G).items())
+    [(0, '0.70'), (1, '0.90'), (2, '0.28'), (3, '0.22'), (4, '0.26'), (5, '0.04')]
+
+    Notes
+    -----
+    The algorithm is implemented based on [1]_ with an extension to directed graphs
+    using the ``directed_laplacian_matrix`` function.
+
+    Raises
+    ------
+    NetworkXPointlessConcept
+        If the graph `G` is the null graph.
+
+    References
+    ----------
+    .. [1] Qi, X., Fuller, E., Wu, Q., Wu, Y., and Zhang, C.-Q. (2012).
+       Laplacian centrality: A new centrality measure for weighted networks.
+       Information Sciences, 194:240-253.
+       https://math.wvu.edu/~cqzhang/Publication-files/my-paper/INS-2012-Laplacian-W.pdf
+
+    See Also
+    --------
+    directed_laplacian_matrix
+    laplacian_matrix
+    """
+    import numpy as np
+    import scipy as sp
+    import scipy.linalg  # call as sp.linalg
+
+    if len(G) == 0:
+        raise nx.NetworkXPointlessConcept("null graph has no centrality defined")
+
+    if nodelist != None:
+        nodeset = set(G.nbunch_iter(nodelist))
+        if len(nodeset) != len(nodelist):
+            raise nx.NetworkXError("nodelist has duplicate nodes or nodes not in G")
+        nodes = nodelist + [n for n in G if n not in nodeset]
+    else:
+        nodelist = nodes = list(G)
+
+    if G.is_directed():
+        lap_matrix = nx.directed_laplacian_matrix(G, nodes, weight, walk_type, alpha)
+    else:
+        lap_matrix = nx.laplacian_matrix(G, nodes, weight).toarray()
+
+    full_energy = np.power(sp.linalg.eigh(lap_matrix, eigvals_only=True), 2).sum()
+
+    # calculate laplacian centrality
+    laplace_centralities_dict = {}
+    for i, node in enumerate(nodelist):
+        # remove row and col i from lap_matrix
+        all_but_i = list(np.arange(lap_matrix.shape[0]))
+        all_but_i.remove(i)
+        A_2 = lap_matrix[all_but_i, :][:, all_but_i]
+
+        # Adjust diagonal for removed row
+        new_diag = lap_matrix.diagonal() - abs(lap_matrix[:, i])
+        np.fill_diagonal(A_2, new_diag[all_but_i])
+
+        new_energy = np.power(sp.linalg.eigh(A_2, eigvals_only=True), 2).sum()
+        lapl_cent = full_energy - new_energy
+        if normalized:
+            lapl_cent = lapl_cent / full_energy
+
+        laplace_centralities_dict[node] = lapl_cent
+
+    return laplace_centralities_dict
diff --git a/networkx/algorithms/centrality/percolation.py b/networkx/algorithms/centrality/percolation.py
index 4d70338..c15ff56 100644
--- a/networkx/algorithms/centrality/percolation.py
+++ b/networkx/algorithms/centrality/percolation.py
@@ -62,7 +62,7 @@ def percolation_centrality(G, attribute="percolation", states=None, weight=None)
     -----
     The algorithm is from Mahendra Piraveenan, Mikhail Prokopenko, and
     Liaquat Hossain [1]_
-    Pair dependecies are calculated and accumulated using [2]_
+    Pair dependencies are calculated and accumulated using [2]_
 
     For weighted graphs the edge weights must be greater than zero.
     Zero edge weights can produce an infinite number of equal length
diff --git a/networkx/algorithms/centrality/tests/test_betweenness_centrality_subset.py b/networkx/algorithms/centrality/tests/test_betweenness_centrality_subset.py
index 6b66b8a..a35a401 100644
--- a/networkx/algorithms/centrality/tests/test_betweenness_centrality_subset.py
+++ b/networkx/algorithms/centrality/tests/test_betweenness_centrality_subset.py
@@ -116,21 +116,48 @@ class TestSubsetBetweennessCentrality:
         for n in sorted(G):
             assert b[n] == pytest.approx(expected_b[n], abs=1e-7)
 
+    def test_normalized_p2(self):
+        """
+        Betweenness Centrality Subset: Normalized P2
+        if n <= 2:  no normalization, betweenness centrality should be 0 for all nodes.
+        """
+        G = nx.Graph()
+        nx.add_path(G, range(2))
+        b_answer = {0: 0, 1: 0.0}
+        b = nx.betweenness_centrality_subset(
+            G, sources=[0], targets=[1], normalized=True, weight=None
+        )
+        for n in sorted(G):
+            assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
 
-class TestBetweennessCentralitySources:
-    def test_K5(self):
-        """Betweenness Centrality Sources: K5"""
-        G = nx.complete_graph(5)
-        b = nx.betweenness_centrality_source(G, weight=None, normalized=False)
-        b_answer = {0: 0.0, 1: 0.0, 2: 0.0, 3: 0.0, 4: 0.0}
+    def test_normalized_P5_directed(self):
+        """Betweenness Centrality Subset: Normalized Directed P5"""
+        G = nx.DiGraph()
+        nx.add_path(G, range(5))
+        b_answer = {0: 0, 1: 1.0 / 12.0, 2: 1.0 / 12.0, 3: 0, 4: 0, 5: 0}
+        b = nx.betweenness_centrality_subset(
+            G, sources=[0], targets=[3], normalized=True, weight=None
+        )
         for n in sorted(G):
             assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
 
-    def test_P3(self):
-        """Betweenness Centrality Sources: P3"""
-        G = nx.path_graph(3)
-        b_answer = {0: 0.0, 1: 1.0, 2: 0.0}
-        b = nx.betweenness_centrality_source(G, weight=None, normalized=True)
+    def test_weighted_graph(self):
+        """Betweenness Centrality Subset: Weighted Graph"""
+        G = nx.DiGraph()
+        G.add_edge(0, 1, weight=3)
+        G.add_edge(0, 2, weight=2)
+        G.add_edge(0, 3, weight=6)
+        G.add_edge(0, 4, weight=4)
+        G.add_edge(1, 3, weight=5)
+        G.add_edge(1, 5, weight=5)
+        G.add_edge(2, 4, weight=1)
+        G.add_edge(3, 4, weight=2)
+        G.add_edge(3, 5, weight=1)
+        G.add_edge(4, 5, weight=4)
+        b_answer = {0: 0.0, 1: 0.0, 2: 0.5, 3: 0.5, 4: 0.5, 5: 0.0}
+        b = nx.betweenness_centrality_subset(
+            G, sources=[0], targets=[5], normalized=False, weight="weight"
+        )
         for n in sorted(G):
             assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
 
@@ -225,3 +252,89 @@ class TestEdgeSubsetBetweennessCentrality:
         )
         for n in sorted(G.edges()):
             assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
+
+    def test_diamond_multi_path(self):
+        """Edge betweenness subset centrality: Diamond Multi Path"""
+        G = nx.Graph()
+        G.add_edges_from(
+            [
+                (1, 2),
+                (1, 3),
+                (1, 4),
+                (1, 5),
+                (1, 10),
+                (10, 11),
+                (11, 12),
+                (12, 9),
+                (2, 6),
+                (3, 6),
+                (4, 6),
+                (5, 7),
+                (7, 8),
+                (6, 8),
+                (8, 9),
+            ]
+        )
+        b_answer = dict.fromkeys(G.edges(), 0)
+        b_answer[(8, 9)] = 0.4
+        b_answer[(6, 8)] = b_answer[(7, 8)] = 0.2
+        b_answer[(2, 6)] = b_answer[(3, 6)] = b_answer[(4, 6)] = 0.2 / 3.0
+        b_answer[(1, 2)] = b_answer[(1, 3)] = b_answer[(1, 4)] = 0.2 / 3.0
+        b_answer[(5, 7)] = 0.2
+        b_answer[(1, 5)] = 0.2
+        b_answer[(9, 12)] = 0.1
+        b_answer[(11, 12)] = b_answer[(10, 11)] = b_answer[(1, 10)] = 0.1
+        b = nx.edge_betweenness_centrality_subset(
+            G, sources=[1], targets=[9], weight=None
+        )
+        for n in G.edges():
+            sort_n = tuple(sorted(n))
+            assert b[n] == pytest.approx(b_answer[sort_n], abs=1e-7)
+
+    def test_normalized_p1(self):
+        """
+        Edge betweenness subset centrality: P1
+        if n <= 1: no normalization b=0 for all nodes
+        """
+        G = nx.Graph()
+        nx.add_path(G, range(1))
+        b_answer = dict.fromkeys(G.edges(), 0)
+        b = nx.edge_betweenness_centrality_subset(
+            G, sources=[0], targets=[0], normalized=True, weight=None
+        )
+        for n in G.edges():
+            assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
+
+    def test_normalized_P5_directed(self):
+        """Edge betweenness subset centrality: Normalized Directed P5"""
+        G = nx.DiGraph()
+        nx.add_path(G, range(5))
+        b_answer = dict.fromkeys(G.edges(), 0)
+        b_answer[(0, 1)] = b_answer[(1, 2)] = b_answer[(2, 3)] = 0.05
+        b = nx.edge_betweenness_centrality_subset(
+            G, sources=[0], targets=[3], normalized=True, weight=None
+        )
+        for n in G.edges():
+            assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
+
+    def test_weighted_graph(self):
+        """Edge betweenness subset centrality: Weighted Graph"""
+        G = nx.DiGraph()
+        G.add_edge(0, 1, weight=3)
+        G.add_edge(0, 2, weight=2)
+        G.add_edge(0, 3, weight=6)
+        G.add_edge(0, 4, weight=4)
+        G.add_edge(1, 3, weight=5)
+        G.add_edge(1, 5, weight=5)
+        G.add_edge(2, 4, weight=1)
+        G.add_edge(3, 4, weight=2)
+        G.add_edge(3, 5, weight=1)
+        G.add_edge(4, 5, weight=4)
+        b_answer = dict.fromkeys(G.edges(), 0)
+        b_answer[(0, 2)] = b_answer[(2, 4)] = b_answer[(4, 5)] = 0.5
+        b_answer[(0, 3)] = b_answer[(3, 5)] = 0.5
+        b = nx.edge_betweenness_centrality_subset(
+            G, sources=[0], targets=[5], normalized=False, weight="weight"
+        )
+        for n in G.edges():
+            assert b[n] == pytest.approx(b_answer[n], abs=1e-7)
diff --git a/networkx/algorithms/centrality/tests/test_current_flow_betweenness_centrality.py b/networkx/algorithms/centrality/tests/test_current_flow_betweenness_centrality.py
index e9f5179..4e3d438 100644
--- a/networkx/algorithms/centrality/tests/test_current_flow_betweenness_centrality.py
+++ b/networkx/algorithms/centrality/tests/test_current_flow_betweenness_centrality.py
@@ -134,6 +134,11 @@ class TestApproximateFlowBetweennessCentrality:
             for n in sorted(G):
                 np.testing.assert_allclose(b[n], b_answer[n], atol=epsilon)
 
+    def test_lower_kmax(self):
+        G = nx.complete_graph(4)
+        with pytest.raises(nx.NetworkXError, match="Increase kmax or epsilon"):
+            nx.approximate_current_flow_betweenness_centrality(G, kmax=4)
+
 
 class TestWeightedFlowBetweennessCentrality:
     pass
@@ -175,3 +180,18 @@ class TestEdgeFlowBetweennessCentrality:
         for (s, t), v1 in b_answer.items():
             v2 = b.get((s, t), b.get((t, s)))
             assert v1 == pytest.approx(v2, abs=1e-7)
+
+
+@pytest.mark.parametrize(
+    "centrality_func",
+    (
+        nx.current_flow_betweenness_centrality,
+        nx.edge_current_flow_betweenness_centrality,
+        nx.approximate_current_flow_betweenness_centrality,
+    ),
+)
+def test_unconnected_graphs_betweenness_centrality(centrality_func):
+    G = nx.Graph([(1, 2), (3, 4)])
+    G.add_node(5)
+    with pytest.raises(nx.NetworkXError, match="Graph not connected"):
+        centrality_func(G)
diff --git a/networkx/algorithms/centrality/tests/test_degree_centrality.py b/networkx/algorithms/centrality/tests/test_degree_centrality.py
index 591df6a..f3f6c39 100644
--- a/networkx/algorithms/centrality/tests/test_degree_centrality.py
+++ b/networkx/algorithms/centrality/tests/test_degree_centrality.py
@@ -9,7 +9,6 @@ import networkx as nx
 
 class TestDegreeCentrality:
     def setup_method(self):
-
         self.K = nx.krackhardt_kite_graph()
         self.P3 = nx.path_graph(3)
         self.K5 = nx.complete_graph(5)
diff --git a/networkx/algorithms/centrality/tests/test_eigenvector_centrality.py b/networkx/algorithms/centrality/tests/test_eigenvector_centrality.py
index 7a44aff..b862005 100644
--- a/networkx/algorithms/centrality/tests/test_eigenvector_centrality.py
+++ b/networkx/algorithms/centrality/tests/test_eigenvector_centrality.py
@@ -128,25 +128,25 @@ class TestEigenvectorCentralityDirected:
     def test_eigenvector_centrality_weighted(self):
         G = self.G
         p = nx.eigenvector_centrality(G)
-        for (a, b) in zip(list(p.values()), self.G.evc):
+        for a, b in zip(list(p.values()), self.G.evc):
             assert a == pytest.approx(b, abs=1e-4)
 
     def test_eigenvector_centrality_weighted_numpy(self):
         G = self.G
         p = nx.eigenvector_centrality_numpy(G)
-        for (a, b) in zip(list(p.values()), self.G.evc):
+        for a, b in zip(list(p.values()), self.G.evc):
             assert a == pytest.approx(b, abs=1e-7)
 
     def test_eigenvector_centrality_unweighted(self):
         G = self.H
         p = nx.eigenvector_centrality(G)
-        for (a, b) in zip(list(p.values()), self.G.evc):
+        for a, b in zip(list(p.values()), self.G.evc):
             assert a == pytest.approx(b, abs=1e-4)
 
     def test_eigenvector_centrality_unweighted_numpy(self):
         G = self.H
         p = nx.eigenvector_centrality_numpy(G)
-        for (a, b) in zip(list(p.values()), self.G.evc):
+        for a, b in zip(list(p.values()), self.G.evc):
             assert a == pytest.approx(b, abs=1e-7)
 
 
@@ -166,3 +166,10 @@ class TestEigenvectorCentralityExceptions:
     def test_empty_numpy(self):
         with pytest.raises(nx.NetworkXException):
             nx.eigenvector_centrality_numpy(nx.Graph())
+
+    def test_zero_nstart(self):
+        G = nx.Graph([(1, 2), (1, 3), (2, 3)])
+        with pytest.raises(
+            nx.NetworkXException, match="initial vector cannot have all zero values"
+        ):
+            nx.eigenvector_centrality(G, nstart={v: 0 for v in G})
diff --git a/networkx/algorithms/centrality/tests/test_katz_centrality.py b/networkx/algorithms/centrality/tests/test_katz_centrality.py
index 2511453..a070e5a 100644
--- a/networkx/algorithms/centrality/tests/test_katz_centrality.py
+++ b/networkx/algorithms/centrality/tests/test_katz_centrality.py
@@ -295,14 +295,14 @@ class TestKatzCentralityDirected:
         G = self.G
         alpha = self.G.alpha
         p = nx.katz_centrality(G, alpha, weight="weight")
-        for (a, b) in zip(list(p.values()), self.G.evc):
+        for a, b in zip(list(p.values()), self.G.evc):
             assert a == pytest.approx(b, abs=1e-7)
 
     def test_katz_centrality_unweighted(self):
         H = self.H
         alpha = self.H.alpha
         p = nx.katz_centrality(H, alpha, weight="weight")
-        for (a, b) in zip(list(p.values()), self.H.evc):
+        for a, b in zip(list(p.values()), self.H.evc):
             assert a == pytest.approx(b, abs=1e-7)
 
 
@@ -318,14 +318,14 @@ class TestKatzCentralityDirectedNumpy(TestKatzCentralityDirected):
         G = self.G
         alpha = self.G.alpha
         p = nx.katz_centrality_numpy(G, alpha, weight="weight")
-        for (a, b) in zip(list(p.values()), self.G.evc):
+        for a, b in zip(list(p.values()), self.G.evc):
             assert a == pytest.approx(b, abs=1e-7)
 
     def test_katz_centrality_unweighted(self):
         H = self.H
         alpha = self.H.alpha
         p = nx.katz_centrality_numpy(H, alpha, weight="weight")
-        for (a, b) in zip(list(p.values()), self.H.evc):
+        for a, b in zip(list(p.values()), self.H.evc):
             assert a == pytest.approx(b, abs=1e-7)
 
 
diff --git a/networkx/algorithms/centrality/tests/test_laplacian_centrality.py b/networkx/algorithms/centrality/tests/test_laplacian_centrality.py
new file mode 100644
index 0000000..0cc59c6
--- /dev/null
+++ b/networkx/algorithms/centrality/tests/test_laplacian_centrality.py
@@ -0,0 +1,189 @@
+import pytest
+
+import networkx as nx
+
+np = pytest.importorskip("numpy")
+sp = pytest.importorskip("scipy")
+
+
+def test_laplacian_centrality_E():
+    E = nx.Graph()
+    E.add_weighted_edges_from(
+        [(0, 1, 4), (4, 5, 1), (0, 2, 2), (2, 1, 1), (1, 3, 2), (1, 4, 2)]
+    )
+    d = nx.laplacian_centrality(E)
+    exact = {
+        0: 0.700000,
+        1: 0.900000,
+        2: 0.280000,
+        3: 0.220000,
+        4: 0.260000,
+        5: 0.040000,
+    }
+
+    for n, dc in d.items():
+        assert exact[n] == pytest.approx(dc, abs=1e-7)
+
+    # Check not normalized
+    full_energy = 200
+    dnn = nx.laplacian_centrality(E, normalized=False)
+    for n, dc in dnn.items():
+        assert exact[n] * full_energy == pytest.approx(dc, abs=1e-7)
+
+    # Check unweighted not-normalized version
+    duw_nn = nx.laplacian_centrality(E, normalized=False, weight=None)
+    print(duw_nn)
+    exact_uw_nn = {
+        0: 18,
+        1: 34,
+        2: 18,
+        3: 10,
+        4: 16,
+        5: 6,
+    }
+    for n, dc in duw_nn.items():
+        assert exact_uw_nn[n] == pytest.approx(dc, abs=1e-7)
+
+    # Check unweighted version
+    duw = nx.laplacian_centrality(E, weight=None)
+    full_energy = 42
+    for n, dc in duw.items():
+        assert exact_uw_nn[n] / full_energy == pytest.approx(dc, abs=1e-7)
+
+
+def test_laplacian_centrality_KC():
+    KC = nx.karate_club_graph()
+    d = nx.laplacian_centrality(KC)
+    exact = {
+        0: 0.2543593,
+        1: 0.1724524,
+        2: 0.2166053,
+        3: 0.0964646,
+        4: 0.0350344,
+        5: 0.0571109,
+        6: 0.0540713,
+        7: 0.0788674,
+        8: 0.1222204,
+        9: 0.0217565,
+        10: 0.0308751,
+        11: 0.0215965,
+        12: 0.0174372,
+        13: 0.118861,
+        14: 0.0366341,
+        15: 0.0548712,
+        16: 0.0172772,
+        17: 0.0191969,
+        18: 0.0225564,
+        19: 0.0331147,
+        20: 0.0279955,
+        21: 0.0246361,
+        22: 0.0382339,
+        23: 0.1294193,
+        24: 0.0227164,
+        25: 0.0644697,
+        26: 0.0281555,
+        27: 0.075188,
+        28: 0.0364742,
+        29: 0.0707087,
+        30: 0.0708687,
+        31: 0.131019,
+        32: 0.2370821,
+        33: 0.3066709,
+    }
+    for n, dc in d.items():
+        assert exact[n] == pytest.approx(dc, abs=1e-7)
+
+    # Check not normalized
+    full_energy = 12502
+    dnn = nx.laplacian_centrality(KC, normalized=False)
+    for n, dc in dnn.items():
+        assert exact[n] * full_energy == pytest.approx(dc, abs=1e-3)
+
+
+def test_laplacian_centrality_K():
+    K = nx.krackhardt_kite_graph()
+    d = nx.laplacian_centrality(K)
+    exact = {
+        0: 0.3010753,
+        1: 0.3010753,
+        2: 0.2258065,
+        3: 0.483871,
+        4: 0.2258065,
+        5: 0.3870968,
+        6: 0.3870968,
+        7: 0.1935484,
+        8: 0.0752688,
+        9: 0.0322581,
+    }
+    for n, dc in d.items():
+        assert exact[n] == pytest.approx(dc, abs=1e-7)
+
+    # Check not normalized
+    full_energy = 186
+    dnn = nx.laplacian_centrality(K, normalized=False)
+    for n, dc in dnn.items():
+        assert exact[n] * full_energy == pytest.approx(dc, abs=1e-3)
+
+
+def test_laplacian_centrality_P3():
+    P3 = nx.path_graph(3)
+    d = nx.laplacian_centrality(P3)
+    exact = {0: 0.6, 1: 1.0, 2: 0.6}
+    for n, dc in d.items():
+        assert exact[n] == pytest.approx(dc, abs=1e-7)
+
+
+def test_laplacian_centrality_K5():
+    K5 = nx.complete_graph(5)
+    d = nx.laplacian_centrality(K5)
+    exact = {0: 0.52, 1: 0.52, 2: 0.52, 3: 0.52, 4: 0.52}
+    for n, dc in d.items():
+        assert exact[n] == pytest.approx(dc, abs=1e-7)
+
+
+def test_laplacian_centrality_FF():
+    FF = nx.florentine_families_graph()
+    d = nx.laplacian_centrality(FF)
+    exact = {
+        "Acciaiuoli": 0.0804598,
+        "Medici": 0.4022989,
+        "Castellani": 0.1724138,
+        "Peruzzi": 0.183908,
+        "Strozzi": 0.2528736,
+        "Barbadori": 0.137931,
+        "Ridolfi": 0.2183908,
+        "Tornabuoni": 0.2183908,
+        "Albizzi": 0.1954023,
+        "Salviati": 0.1149425,
+        "Pazzi": 0.0344828,
+        "Bischeri": 0.1954023,
+        "Guadagni": 0.2298851,
+        "Ginori": 0.045977,
+        "Lamberteschi": 0.0574713,
+    }
+    for n, dc in d.items():
+        assert exact[n] == pytest.approx(dc, abs=1e-7)
+
+
+def test_laplacian_centrality_DG():
+    DG = nx.DiGraph([(0, 5), (1, 5), (2, 5), (3, 5), (4, 5), (5, 6), (5, 7), (5, 8)])
+    d = nx.laplacian_centrality(DG)
+    exact = {
+        0: 0.2123352,
+        5: 0.515391,
+        1: 0.2123352,
+        2: 0.2123352,
+        3: 0.2123352,
+        4: 0.2123352,
+        6: 0.2952031,
+        7: 0.2952031,
+        8: 0.2952031,
+    }
+    for n, dc in d.items():
+        assert exact[n] == pytest.approx(dc, abs=1e-7)
+
+    # Check not normalized
+    full_energy = 9.50704
+    dnn = nx.laplacian_centrality(DG, normalized=False)
+    for n, dc in dnn.items():
+        assert exact[n] * full_energy == pytest.approx(dc, abs=1e-4)
diff --git a/networkx/algorithms/centrality/tests/test_load_centrality.py b/networkx/algorithms/centrality/tests/test_load_centrality.py
index d994394..bb7da65 100644
--- a/networkx/algorithms/centrality/tests/test_load_centrality.py
+++ b/networkx/algorithms/centrality/tests/test_load_centrality.py
@@ -6,7 +6,6 @@ import networkx as nx
 class TestLoadCentrality:
     @classmethod
     def setup_class(cls):
-
         G = nx.Graph()
         G.add_edge(0, 1, weight=3)
         G.add_edge(0, 2, weight=2)
diff --git a/networkx/algorithms/centrality/tests/test_voterank.py b/networkx/algorithms/centrality/tests/test_voterank.py
index aa653ae..1212681 100644
--- a/networkx/algorithms/centrality/tests/test_voterank.py
+++ b/networkx/algorithms/centrality/tests/test_voterank.py
@@ -28,6 +28,10 @@ class TestVoteRankCentrality:
         )
         assert [0, 7, 6] == nx.voterank(G)
 
+    def test_voterank_emptygraph(self):
+        G = nx.Graph()
+        assert [] == nx.voterank(G)
+
     # Graph unit test
     def test_voterank_centrality_2(self):
         G = nx.florentine_families_graph()
diff --git a/networkx/algorithms/chordal.py b/networkx/algorithms/chordal.py
index ad17ef7..6ff8b04 100644
--- a/networkx/algorithms/chordal.py
+++ b/networkx/algorithms/chordal.py
@@ -6,7 +6,6 @@ A graph is chordal if every cycle of length at least 4 has a chord
 https://en.wikipedia.org/wiki/Chordal_graph
 """
 import sys
-import warnings
 
 import networkx as nx
 from networkx.algorithms.components import connected_components
@@ -162,7 +161,7 @@ def find_induced_nodes(G, s, t, treewidth_bound=sys.maxsize):
 
 
 def chordal_graph_cliques(G):
-    """Returns the set of maximal cliques of a chordal graph.
+    """Returns all maximal cliques of a chordal graph.
 
     The algorithm breaks the graph in connected components and performs a
     maximum cardinality search in each component to get the cliques.
@@ -172,9 +171,11 @@ def chordal_graph_cliques(G):
     G : graph
       A NetworkX graph
 
-    Returns
-    -------
-    cliques : A set containing the maximal cliques in G.
+    Yields
+    ------
+    frozenset of nodes
+        Maximal cliques, each of which is a frozenset of
+        nodes in `G`. The order of cliques is arbitrary.
 
     Raises
     ------
@@ -200,11 +201,35 @@ def chordal_graph_cliques(G):
     ... ]
     >>> G = nx.Graph(e)
     >>> G.add_node(9)
-    >>> setlist = nx.chordal_graph_cliques(G)
+    >>> cliques = [c for c in chordal_graph_cliques(G)]
+    >>> cliques[0]
+    frozenset({1, 2, 3})
     """
-    msg = "This will return a generator in 3.0."
-    warnings.warn(msg, DeprecationWarning)
-    return {c for c in _chordal_graph_cliques(G)}
+    for C in (G.subgraph(c).copy() for c in connected_components(G)):
+        if C.number_of_nodes() == 1:
+            if nx.number_of_selfloops(C) > 0:
+                raise nx.NetworkXError("Input graph is not chordal.")
+            yield frozenset(C.nodes())
+        else:
+            unnumbered = set(C.nodes())
+            v = arbitrary_element(C)
+            unnumbered.remove(v)
+            numbered = {v}
+            clique_wanna_be = {v}
+            while unnumbered:
+                v = _max_cardinality_node(C, unnumbered, numbered)
+                unnumbered.remove(v)
+                numbered.add(v)
+                new_clique_wanna_be = set(C.neighbors(v)) & numbered
+                sg = C.subgraph(clique_wanna_be)
+                if _is_complete_graph(sg):
+                    new_clique_wanna_be.add(v)
+                    if not new_clique_wanna_be >= clique_wanna_be:
+                        yield frozenset(clique_wanna_be)
+                    clique_wanna_be = new_clique_wanna_be
+                else:
+                    raise nx.NetworkXError("Input graph is not chordal.")
+            yield frozenset(clique_wanna_be)
 
 
 def chordal_graph_treewidth(G):
@@ -331,78 +356,6 @@ def _find_chordality_breaker(G, s=None, treewidth_bound=sys.maxsize):
     return ()
 
 
-def _chordal_graph_cliques(G):
-    """Returns all maximal cliques of a chordal graph.
-
-    The algorithm breaks the graph in connected components and performs a
-    maximum cardinality search in each component to get the cliques.
-
-    Parameters
-    ----------
-    G : graph
-      A NetworkX graph
-
-    Returns
-    -------
-    iterator
-        An iterator over maximal cliques, each of which is a frozenset of
-        nodes in `G`. The order of cliques is arbitrary.
-
-    Raises
-    ------
-    NetworkXError
-        The algorithm does not support DiGraph, MultiGraph and MultiDiGraph.
-        The algorithm can only be applied to chordal graphs. If the input
-        graph is found to be non-chordal, a :exc:`NetworkXError` is raised.
-
-    Examples
-    --------
-    >>> e = [
-    ...     (1, 2),
-    ...     (1, 3),
-    ...     (2, 3),
-    ...     (2, 4),
-    ...     (3, 4),
-    ...     (3, 5),
-    ...     (3, 6),
-    ...     (4, 5),
-    ...     (4, 6),
-    ...     (5, 6),
-    ...     (7, 8),
-    ... ]
-    >>> G = nx.Graph(e)
-    >>> G.add_node(9)
-    >>> cliques = [c for c in _chordal_graph_cliques(G)]
-    >>> cliques[0]
-    frozenset({1, 2, 3})
-    """
-    for C in (G.subgraph(c).copy() for c in connected_components(G)):
-        if C.number_of_nodes() == 1:
-            if nx.number_of_selfloops(C) > 0:
-                raise nx.NetworkXError("Input graph is not chordal.")
-            yield frozenset(C.nodes())
-        else:
-            unnumbered = set(C.nodes())
-            v = arbitrary_element(C)
-            unnumbered.remove(v)
-            numbered = {v}
-            clique_wanna_be = {v}
-            while unnumbered:
-                v = _max_cardinality_node(C, unnumbered, numbered)
-                unnumbered.remove(v)
-                numbered.add(v)
-                new_clique_wanna_be = set(C.neighbors(v)) & numbered
-                sg = C.subgraph(clique_wanna_be)
-                if _is_complete_graph(sg):
-                    new_clique_wanna_be.add(v)
-                    if not new_clique_wanna_be >= clique_wanna_be:
-                        yield frozenset(clique_wanna_be)
-                    clique_wanna_be = new_clique_wanna_be
-                else:
-                    raise nx.NetworkXError("Input graph is not chordal.")
-            yield frozenset(clique_wanna_be)
-
-
 @not_implemented_for("directed")
 def complete_to_chordal_graph(G):
     """Return a copy of G completed to a chordal graph
diff --git a/networkx/algorithms/clique.py b/networkx/algorithms/clique.py
index afdaa47..c563e2d 100644
--- a/networkx/algorithms/clique.py
+++ b/networkx/algorithms/clique.py
@@ -137,6 +137,67 @@ def find_cliques(G, nodes=None):
     ValueError
         If `nodes` is not a clique.
 
+    Examples
+    --------
+    >>> from pprint import pprint  # For nice dict formatting
+    >>> G = nx.karate_club_graph()
+    >>> sum(1 for c in nx.find_cliques(G))  # The number of maximal cliques in G
+    36
+    >>> max(nx.find_cliques(G), key=len)  # The largest maximal clique in G
+    [0, 1, 2, 3, 13]
+
+    The size of the largest maximal clique is known as the *clique number* of
+    the graph, which can be found directly with:
+
+    >>> max(len(c) for c in nx.find_cliques(G))
+    5
+
+    One can also compute the number of maximal cliques in `G` that contain a given
+    node. The following produces a dictionary keyed by node whose
+    values are the number of maximal cliques in `G` that contain the node:
+
+    >>> pprint({n: sum(1 for c in nx.find_cliques(G) if n in c) for n in G})
+    {0: 13,
+     1: 6,
+     2: 7,
+     3: 3,
+     4: 2,
+     5: 3,
+     6: 3,
+     7: 1,
+     8: 3,
+     9: 2,
+     10: 2,
+     11: 1,
+     12: 1,
+     13: 2,
+     14: 1,
+     15: 1,
+     16: 1,
+     17: 1,
+     18: 1,
+     19: 2,
+     20: 1,
+     21: 1,
+     22: 1,
+     23: 3,
+     24: 2,
+     25: 2,
+     26: 1,
+     27: 3,
+     28: 2,
+     29: 2,
+     30: 2,
+     31: 4,
+     32: 9,
+     33: 14}
+
+    Or, similarly, the maximal cliques in `G` that contain a given node.
+    For example, the 4 maximal cliques that contain node 31:
+
+    >>> [c for c in nx.find_cliques(G) if 31 in c]
+    [[0, 31], [33, 32, 31], [33, 28, 31], [24, 25, 31]]
+
     See Also
     --------
     find_cliques_recursive
@@ -274,7 +335,7 @@ def find_cliques_recursive(G, nodes=None):
     See Also
     --------
     find_cliques
-        An iterative version of the same algorithm.
+        An iterative version of the same algorithm. See docstring for examples.
 
     Notes
     -----
@@ -451,6 +512,14 @@ def graph_clique_number(G, cliques=None):
     The *clique number* of a graph is the size of the largest clique in
     the graph.
 
+    .. deprecated:: 3.0
+
+       graph_clique_number is deprecated in NetworkX 3.0 and will be removed
+       in v3.2. The graph clique number can be computed directly with::
+
+           max(len(c) for c in nx.find_cliques(G))
+
+
     Parameters
     ----------
     G : NetworkX graph
@@ -473,6 +542,16 @@ def graph_clique_number(G, cliques=None):
     maximal cliques.
 
     """
+    import warnings
+
+    warnings.warn(
+        (
+            "\n\ngraph_clique_number is deprecated and will be removed.\n"
+            "Use: ``max(len(c) for c in nx.find_cliques(G))`` instead."
+        ),
+        DeprecationWarning,
+        stacklevel=2,
+    )
     if len(G.nodes) < 1:
         return 0
     if cliques is None:
@@ -483,6 +562,13 @@ def graph_clique_number(G, cliques=None):
 def graph_number_of_cliques(G, cliques=None):
     """Returns the number of maximal cliques in the graph.
 
+    .. deprecated:: 3.0
+
+       graph_number_of_cliques is deprecated and will be removed in v3.2.
+       The number of maximal cliques can be computed directly with::
+
+           sum(1 for _ in nx.find_cliques(G))
+
     Parameters
     ----------
     G : NetworkX graph
@@ -505,6 +591,16 @@ def graph_number_of_cliques(G, cliques=None):
     maximal cliques.
 
     """
+    import warnings
+
+    warnings.warn(
+        (
+            "\n\ngraph_number_of_cliques is deprecated and will be removed.\n"
+            "Use: ``sum(1 for _ in nx.find_cliques(G))`` instead."
+        ),
+        DeprecationWarning,
+        stacklevel=2,
+    )
     if cliques is None:
         cliques = list(find_cliques(G))
     return len(cliques)
@@ -576,9 +672,29 @@ def node_clique_number(G, nodes=None, cliques=None, separate_nodes=False):
 def number_of_cliques(G, nodes=None, cliques=None):
     """Returns the number of maximal cliques for each node.
 
+    .. deprecated:: 3.0
+
+       number_of_cliques is deprecated and will be removed in v3.2.
+       Use the result of `find_cliques` directly to compute the number of
+       cliques containing each node::
+
+           {n: sum(1 for c in nx.find_cliques(G) if n in c) for n in G}
+
     Returns a single or list depending on input nodes.
     Optional list of cliques can be input if already computed.
     """
+    import warnings
+
+    warnings.warn(
+        (
+            "\n\nnumber_of_cliques is deprecated and will be removed.\n"
+            "Use the result of find_cliques directly to compute the number\n"
+            "of cliques containing each node:\n\n"
+            "    {n: sum(1 for c in nx.find_cliques(G) if n in c) for n in G}\n\n"
+        ),
+        DeprecationWarning,
+        stacklevel=2,
+    )
     if cliques is None:
         cliques = list(find_cliques(G))
 
@@ -599,9 +715,29 @@ def number_of_cliques(G, nodes=None, cliques=None):
 def cliques_containing_node(G, nodes=None, cliques=None):
     """Returns a list of cliques containing the given node.
 
+    .. deprecated:: 3.0
+
+       cliques_containing_node is deprecated and will be removed in 3.2.
+       Use the result of `find_cliques` directly to compute the cliques that
+       contain each node::
+
+           {n: [c for c in nx.find_cliques(G) if n in c] for n in G}
+
     Returns a single list or list of lists depending on input nodes.
     Optional list of cliques can be input if already computed.
     """
+    import warnings
+
+    warnings.warn(
+        (
+            "\n\ncliques_containing_node is deprecated and will be removed.\n"
+            "Use the result of find_cliques directly to compute maximal cliques\n"
+            "containing each node:\n\n"
+            "    {n: [c for c in nx.find_cliques(G) if n in c] for n in G}\n\n"
+        ),
+        DeprecationWarning,
+        stacklevel=2,
+    )
     if cliques is None:
         cliques = list(find_cliques(G))
 
diff --git a/networkx/algorithms/cluster.py b/networkx/algorithms/cluster.py
index 1421fef..ec65e97 100644
--- a/networkx/algorithms/cluster.py
+++ b/networkx/algorithms/cluster.py
@@ -3,6 +3,7 @@
 from collections import Counter
 from itertools import chain, combinations
 
+import networkx as nx
 from networkx.utils import not_implemented_for
 
 __all__ = [
@@ -15,6 +16,7 @@ __all__ = [
 ]
 
 
+@nx._dispatch("triangles")
 @not_implemented_for("directed")
 def triangles(G, nodes=None):
     """Compute the number of triangles.
@@ -218,6 +220,7 @@ def _directed_weighted_triangles_and_degree_iter(G, nodes=None, weight="weight")
         yield (i, dtotal, dbidirectional, directed_triangles)
 
 
+@nx._dispatch(name="average_clustering")
 def average_clustering(G, nodes=None, weight=None, count_zeros=True):
     r"""Compute the average clustering coefficient for the graph G.
 
@@ -277,6 +280,7 @@ def average_clustering(G, nodes=None, weight=None, count_zeros=True):
     return sum(c) / len(c)
 
 
+@nx._dispatch(name="clustering")
 def clustering(G, nodes=None, weight=None):
     r"""Compute the clustering coefficient for nodes.
 
@@ -312,8 +316,7 @@ def clustering(G, nodes=None, weight=None):
 
     .. math::
 
-       c_u = \frac{2}{deg^{tot}(u)(deg^{tot}(u)-1) - 2deg^{\leftrightarrow}(u)}
-             T(u),
+       c_u = \frac{T(u)}{2(deg^{tot}(u)(deg^{tot}(u)-1) - 2deg^{\leftrightarrow}(u))},
 
     where :math:`T(u)` is the number of directed triangles through node
     :math:`u`, :math:`deg^{tot}(u)` is the sum of in degree and out degree of
@@ -325,8 +328,10 @@ def clustering(G, nodes=None, weight=None):
     ----------
     G : graph
 
-    nodes : container of nodes, optional (default=all nodes in G)
-       Compute clustering for nodes in this container.
+    nodes : node, iterable of nodes, or None (default=None)
+        If a singleton node, return the number of triangles for that node.
+        If an iterable, compute the number of triangles for each of those nodes.
+        If `None` (the default) compute the number of triangles for all nodes in `G`.
 
     weight : string or None, optional (default=None)
        The edge attribute that holds the numerical value used as a weight.
@@ -390,6 +395,7 @@ def clustering(G, nodes=None, weight=None):
     return clusterc
 
 
+@nx._dispatch("transitivity")
 def transitivity(G):
     r"""Compute graph transitivity, the fraction of all possible triangles
     present in G.
@@ -428,6 +434,7 @@ def transitivity(G):
     return 0 if triangles == 0 else triangles / contri
 
 
+@nx._dispatch(name="square_clustering")
 def square_clustering(G, nodes=None):
     r"""Compute the squares clustering coefficient for nodes.
 
@@ -505,6 +512,7 @@ def square_clustering(G, nodes=None):
     return clustering
 
 
+@nx._dispatch("generalized_degree")
 @not_implemented_for("directed")
 def generalized_degree(G, nodes=None):
     r"""Compute the generalized degree for nodes.
@@ -546,7 +554,7 @@ def generalized_degree(G, nodes=None):
 
     Notes
     -----
-    In a network of N nodes, the highest triangle multiplicty an edge can have
+    In a network of N nodes, the highest triangle multiplicity an edge can have
     is N-2.
 
     The return value does not include a `zero` entry if no edges of a
diff --git a/networkx/algorithms/coloring/equitable_coloring.py b/networkx/algorithms/coloring/equitable_coloring.py
index 3072a0a..8c08609 100644
--- a/networkx/algorithms/coloring/equitable_coloring.py
+++ b/networkx/algorithms/coloring/equitable_coloring.py
@@ -12,10 +12,7 @@ __all__ = ["equitable_color"]
 def is_coloring(G, coloring):
     """Determine if the coloring is a valid coloring for the graph G."""
     # Verify that the coloring is valid.
-    for (s, d) in G.edges:
-        if coloring[s] == coloring[d]:
-            return False
-    return True
+    return all(coloring[s] != coloring[d] for s, d in G.edges)
 
 
 def is_equitable(G, coloring, num_colors=None):
@@ -49,7 +46,7 @@ def is_equitable(G, coloring, num_colors=None):
 
 
 def make_C_from_F(F):
-    C = defaultdict(lambda: [])
+    C = defaultdict(list)
     for node, color in F.items():
         C[color].append(node)
 
@@ -68,9 +65,7 @@ def make_N_from_L_C(L, C):
 
 def make_H_from_C_N(C, N):
     return {
-        (c1, c2): sum(1 for node in C[c1] if N[(node, c2)] == 0)
-        for c1 in C.keys()
-        for c2 in C.keys()
+        (c1, c2): sum(1 for node in C[c1] if N[(node, c2)] == 0) for c1 in C for c2 in C
     }
 
 
@@ -81,7 +76,7 @@ def change_color(u, X, Y, N, H, F, C, L):
     # Change the class of 'u' from X to Y
     F[u] = Y
 
-    for k in C.keys():
+    for k in C:
         # 'u' witnesses an edge from k -> Y instead of from k -> X now.
         if N[u, k] == 0:
             H[(X, k)] -= 1
@@ -166,7 +161,7 @@ def procedure_P(V_minus, V_plus, N, H, F, C, L, excluded_colors=None):
         # using a look-up table instead of testing for membership in a set by a
         # logarithmic factor.
         next_layer = []
-        for k in C.keys():
+        for k in C:
             if (
                 H[(k, pop)] > 0
                 and k not in A_cal
@@ -201,11 +196,10 @@ def procedure_P(V_minus, V_plus, N, H, F, C, L, excluded_colors=None):
         made_equitable = False
 
         for W_1 in R_cal[::-1]:
-
             for v in C[W_1]:
                 X = None
 
-                for U in C.keys():
+                for U in C:
                     if N[(v, U)] == 0 and U in A_cal and U != W_1:
                         X = U
 
@@ -213,7 +207,7 @@ def procedure_P(V_minus, V_plus, N, H, F, C, L, excluded_colors=None):
                 if X is None:
                     continue
 
-                for U in C.keys():
+                for U in C:
                     # Note: Departing from the paper here.
                     if N[(v, U)] >= 1 and U not in A_cal:
                         X_prime = U
@@ -294,7 +288,7 @@ def procedure_P(V_minus, V_plus, N, H, F, C, L, excluded_colors=None):
                     # they only exclude colors from A_cal
                     next_layer = [
                         k
-                        for k in C.keys()
+                        for k in C
                         if H[(pop, k)] > 0 and k not in B_cal_prime and k not in marked
                     ]
 
@@ -318,7 +312,7 @@ def procedure_P(V_minus, V_plus, N, H, F, C, L, excluded_colors=None):
 
                     I_set.add(z)
                     I_covered.add(z)
-                    I_covered.update([nbr for nbr in L[z]])
+                    I_covered.update(list(L[z]))
 
                     for w in L[z]:
                         if F[w] in A_cal_0 and N[(z, F[w])] == 1:
@@ -355,19 +349,13 @@ def procedure_P(V_minus, V_plus, N, H, F, C, L, excluded_colors=None):
 
                                 # change color of w to some color in B_cal
                                 W_plus = next(
-                                    k
-                                    for k in C.keys()
-                                    if N[(w, k)] == 0 and k not in A_cal
+                                    k for k in C if N[(w, k)] == 0 and k not in A_cal
                                 )
                                 change_color(w, W, W_plus, N=N, H=H, F=F, C=C, L=L)
 
                                 # recurse with G[B \cup W*]
                                 excluded_colors.update(
-                                    [
-                                        k
-                                        for k in C.keys()
-                                        if k != W and k not in B_cal_prime
-                                    ]
+                                    [k for k in C if k != W and k not in B_cal_prime]
                                 )
                                 procedure_P(
                                     V_minus=W,
@@ -482,7 +470,6 @@ def equitable_color(G, num_colors):
 
     for u in sorted(G.nodes):
         for v in sorted(G.neighbors(u)):
-
             # Do not double count edges if (v, u) has already been seen.
             if (v, u) in edges_seen:
                 continue
@@ -505,7 +492,7 @@ def equitable_color(G, num_colors):
 
         if N[(u, F[u])] != 0:
             # Find the first color where 'u' does not have any neighbors.
-            Y = next(k for k in C.keys() if N[(u, k)] == 0)
+            Y = next(k for k in C if N[(u, k)] == 0)
             X = F[u]
             change_color(u, X, Y, N=N, H=H, F=F, C=C, L=L_)
 
diff --git a/networkx/algorithms/coloring/greedy_coloring.py b/networkx/algorithms/coloring/greedy_coloring.py
index 329746c..b078cb2 100644
--- a/networkx/algorithms/coloring/greedy_coloring.py
+++ b/networkx/algorithms/coloring/greedy_coloring.py
@@ -196,7 +196,7 @@ def strategy_connected_sequential(G, colors, traversal="bfs"):
         # Yield the source node, then all the nodes in the specified
         # traversal order.
         yield source
-        for (_, end) in traverse(G.subgraph(component), source):
+        for _, end in traverse(G.subgraph(component), source):
             yield end
 
 
@@ -209,29 +209,42 @@ def strategy_saturation_largest_first(G, colors):
 
     """
     distinct_colors = {v: set() for v in G}
-    for i in range(len(G)):
-        # On the first time through, simply choose the node of highest degree.
-        if i == 0:
-            node = max(G, key=G.degree)
-            yield node
-            # Add the color 0 to the distinct colors set for each
-            # neighbors of that node.
-            for v in G[node]:
-                distinct_colors[v].add(0)
-        else:
-            # Compute the maximum saturation and the set of nodes that
-            # achieve that saturation.
-            saturation = {
-                v: len(c) for v, c in distinct_colors.items() if v not in colors
-            }
-            # Yield the node with the highest saturation, and break ties by
-            # degree.
-            node = max(saturation, key=lambda v: (saturation[v], G.degree(v)))
-            yield node
-            # Update the distinct color sets for the neighbors.
-            color = colors[node]
-            for v in G[node]:
-                distinct_colors[v].add(color)
+
+    # Add the node color assignments given in colors to the
+    # distinct colors set for each neighbor of that node
+    for node, color in colors.items():
+        for neighbor in G[node]:
+            distinct_colors[neighbor].add(color)
+
+    # Check that the color assignments in colors are valid
+    # i.e. no neighboring nodes have the same color
+    if len(colors) >= 2:
+        for node, color in colors.items():
+            if color in distinct_colors[node]:
+                raise nx.NetworkXError("Neighboring nodes must have different colors")
+
+    # If 0 nodes have been colored, simply choose the node of highest degree.
+    if not colors:
+        node = max(G, key=G.degree)
+        yield node
+        # Add the color 0 to the distinct colors set for each
+        # neighbor of that node.
+        for v in G[node]:
+            distinct_colors[v].add(0)
+
+    while len(G) != len(colors):
+        # Update the distinct color sets for the neighbors.
+        for node, color in colors.items():
+            for neighbor in G[node]:
+                distinct_colors[neighbor].add(color)
+
+        # Compute the maximum saturation and the set of nodes that
+        # achieve that saturation.
+        saturation = {v: len(c) for v, c in distinct_colors.items() if v not in colors}
+        # Yield the node with the highest saturation, and break ties by
+        # degree.
+        node = max(saturation, key=lambda v: (saturation[v], G.degree(v)))
+        yield node
 
 
 #: Dictionary mapping name of a strategy as a string to the strategy function.
@@ -425,7 +438,7 @@ class _AdjEntry:
 
 
 def _greedy_coloring_with_interchange(G, nodes):
-    """Return a coloring for `orginal_graph` using interchange approach
+    """Return a coloring for `original_graph` using interchange approach
 
     This procedure is an adaption of the algorithm described by [1]_,
     and is an implementation of coloring with interchange. Please be
@@ -457,7 +470,7 @@ def _greedy_coloring_with_interchange(G, nodes):
 
     graph = {node: _Node(node, n) for node in G}
 
-    for (node1, node2) in G.edges():
+    for node1, node2 in G.edges():
         adj_entry1 = _AdjEntry(node2)
         adj_entry2 = _AdjEntry(node1)
         adj_entry1.mate = adj_entry2
@@ -486,7 +499,7 @@ def _greedy_coloring_with_interchange(G, nodes):
             while connected and col1 < k:
                 col1 += 1
                 neighbor_cols = graph[node].iter_neighbors_color(col1)
-                col1_adj = [it for it in neighbor_cols]
+                col1_adj = list(neighbor_cols)
 
                 col2 = col1
                 while connected and col2 < k:
diff --git a/networkx/algorithms/coloring/tests/test_coloring.py b/networkx/algorithms/coloring/tests/test_coloring.py
index cc422e3..a2a4e39 100644
--- a/networkx/algorithms/coloring/tests/test_coloring.py
+++ b/networkx/algorithms/coloring/tests/test_coloring.py
@@ -2,6 +2,8 @@
 
 """
 
+import itertools
+
 import pytest
 
 import networkx as nx
@@ -429,6 +431,76 @@ class TestColoring:
         )
         check_state(**params)
 
+    def test_strategy_saturation_largest_first(self):
+        def color_remaining_nodes(
+            G,
+            colored_nodes,
+            full_color_assignment=None,
+            nodes_to_add_between_calls=1,
+        ):
+            color_assignments = []
+            aux_colored_nodes = colored_nodes.copy()
+
+            node_iterator = nx.algorithms.coloring.greedy_coloring.strategy_saturation_largest_first(
+                G, aux_colored_nodes
+            )
+
+            for u in node_iterator:
+                # Set to keep track of colors of neighbours
+                neighbour_colors = {
+                    aux_colored_nodes[v] for v in G[u] if v in aux_colored_nodes
+                }
+                # Find the first unused color.
+                for color in itertools.count():
+                    if color not in neighbour_colors:
+                        break
+                aux_colored_nodes[u] = color
+                color_assignments.append((u, color))
+
+                # Color nodes between iterations
+                for i in range(nodes_to_add_between_calls - 1):
+                    if not len(color_assignments) + len(colored_nodes) >= len(
+                        full_color_assignment
+                    ):
+                        full_color_assignment_node, color = full_color_assignment[
+                            len(color_assignments) + len(colored_nodes)
+                        ]
+
+                        # Assign the new color to the current node.
+                        aux_colored_nodes[full_color_assignment_node] = color
+                        color_assignments.append((full_color_assignment_node, color))
+
+            return color_assignments, aux_colored_nodes
+
+        for G, _, _ in SPECIAL_TEST_CASES["saturation_largest_first"]:
+            G = G()
+
+            # Check that function still works when nodes are colored between iterations
+            for nodes_to_add_between_calls in range(1, 5):
+                # Get a full color assignment, (including the order in which nodes were colored)
+                colored_nodes = {}
+                full_color_assignment, full_colored_nodes = color_remaining_nodes(
+                    G, colored_nodes
+                )
+
+                # For each node in the color assignment, add it to colored_nodes and re-run the function
+                for ind, (node, color) in enumerate(full_color_assignment):
+                    colored_nodes[node] = color
+
+                    (
+                        partial_color_assignment,
+                        partial_colored_nodes,
+                    ) = color_remaining_nodes(
+                        G,
+                        colored_nodes,
+                        full_color_assignment=full_color_assignment,
+                        nodes_to_add_between_calls=nodes_to_add_between_calls,
+                    )
+
+                    # Check that the color assignment and order of remaining nodes are the same
+                    assert full_color_assignment[ind + 1 :] == partial_color_assignment
+                    assert full_colored_nodes == partial_colored_nodes
+
 
 #  ############################  Utility functions ############################
 def verify_coloring(graph, coloring):
@@ -456,7 +528,7 @@ def dict_to_sets(colors):
     k = max(colors.values()) + 1
     sets = [set() for _ in range(k)]
 
-    for (node, color) in colors.items():
+    for node, color in colors.items():
         sets[color].add(node)
 
     return sets
@@ -765,12 +837,12 @@ def check_state(L, N, H, F, C):
     s = len(C[0])
     num_colors = len(C.keys())
 
-    assert all(u in L[v] for u in L.keys() for v in L[u])
-    assert all(F[u] != F[v] for u in L.keys() for v in L[u])
-    assert all(len(L[u]) < num_colors for u in L.keys())
+    assert all(u in L[v] for u in L for v in L[u])
+    assert all(F[u] != F[v] for u in L for v in L[u])
+    assert all(len(L[u]) < num_colors for u in L)
     assert all(len(C[x]) == s for x in C)
-    assert all(H[(c1, c2)] >= 0 for c1 in C.keys() for c2 in C.keys())
-    assert all(N[(u, F[u])] == 0 for u in F.keys())
+    assert all(H[(c1, c2)] >= 0 for c1 in C for c2 in C)
+    assert all(N[(u, F[u])] == 0 for u in F)
 
 
 def max_degree(G):
@@ -782,7 +854,7 @@ def make_params_from_graph(G, F):
     """Returns {N, L, H, C} from the given graph."""
     num_nodes = len(G)
     L = {u: [] for u in range(num_nodes)}
-    for (u, v) in G.edges:
+    for u, v in G.edges:
         L[u].append(v)
         L[v].append(u)
 
diff --git a/networkx/algorithms/communicability_alg.py b/networkx/algorithms/communicability_alg.py
index ba4b4ab..1d2161d 100644
--- a/networkx/algorithms/communicability_alg.py
+++ b/networkx/algorithms/communicability_alg.py
@@ -36,7 +36,7 @@ def communicability(G):
        Communicability between all pairs of nodes in G  using spectral
        decomposition.
     communicability_betweenness_centrality:
-       Communicability betweeness centrality for each node in G.
+       Communicability betweenness centrality for each node in G.
 
     Notes
     -----
@@ -116,7 +116,7 @@ def communicability_exp(G):
     communicability:
        Communicability between pairs of nodes in G.
     communicability_betweenness_centrality:
-       Communicability betweeness centrality for each node in G.
+       Communicability betweenness centrality for each node in G.
 
     Notes
     -----
diff --git a/networkx/algorithms/community/__init__.py b/networkx/algorithms/community/__init__.py
index 9aea405..fa78220 100644
--- a/networkx/algorithms/community/__init__.py
+++ b/networkx/algorithms/community/__init__.py
@@ -1,13 +1,11 @@
 """Functions for computing and measuring community structure.
 
-The functions in this class are not imported into the top-level
-:mod:`networkx` namespace. You can access these functions by importing
-the :mod:`networkx.algorithms.community` module, then accessing the
+The ``community`` subpackage can be accessed by using :mod:`networkx.community`, then accessing the
 functions as attributes of ``community``. For example::
 
-    >>> from networkx.algorithms import community
+    >>> import networkx as nx
     >>> G = nx.barbell_graph(5, 1)
-    >>> communities_generator = community.girvan_newman(G)
+    >>> communities_generator = nx.community.girvan_newman(G)
     >>> top_level_communities = next(communities_generator)
     >>> next_level_communities = next(communities_generator)
     >>> sorted(map(sorted, next_level_communities))
diff --git a/networkx/algorithms/community/asyn_fluid.py b/networkx/algorithms/community/asyn_fluid.py
index 3d723be..0ec99fb 100644
--- a/networkx/algorithms/community/asyn_fluid.py
+++ b/networkx/algorithms/community/asyn_fluid.py
@@ -79,7 +79,7 @@ def asyn_fluidc(G, k, max_iter=100, seed=None):
     communities = {n: i for i, n in enumerate(vertices[:k])}
     density = {}
     com_to_numvertices = {}
-    for vertex in communities.keys():
+    for vertex in communities:
         com_to_numvertices[communities[vertex]] = 1
         density[communities[vertex]] = max_density
     # Set up control variables and start iterating
diff --git a/networkx/algorithms/community/kclique.py b/networkx/algorithms/community/kclique.py
index 804b1c9..21404c7 100644
--- a/networkx/algorithms/community/kclique.py
+++ b/networkx/algorithms/community/kclique.py
@@ -27,14 +27,13 @@ def k_clique_communities(G, k, cliques=None):
 
     Examples
     --------
-    >>> from networkx.algorithms.community import k_clique_communities
     >>> G = nx.complete_graph(5)
     >>> K5 = nx.convert_node_labels_to_integers(G, first_label=2)
     >>> G.add_edges_from(K5.edges())
-    >>> c = list(k_clique_communities(G, 4))
+    >>> c = list(nx.community.k_clique_communities(G, 4))
     >>> sorted(list(c[0]))
     [0, 1, 2, 3, 4, 5, 6]
-    >>> list(k_clique_communities(G, 6))
+    >>> list(nx.community.k_clique_communities(G, 6))
     []
 
     References
diff --git a/networkx/algorithms/community/label_propagation.py b/networkx/algorithms/community/label_propagation.py
index 09c07c2..8494b79 100644
--- a/networkx/algorithms/community/label_propagation.py
+++ b/networkx/algorithms/community/label_propagation.py
@@ -69,7 +69,6 @@ def asyn_lpa_communities(G, weight=None, seed=None):
         seed.shuffle(nodes)
 
         for node in nodes:
-
             if not G[node]:
                 continue
 
@@ -155,7 +154,7 @@ def _color_network(G):
 
     Returns a dict keyed by color to a set of nodes with that color.
     """
-    coloring = dict()  # color => set(node)
+    coloring = {}  # color => set(node)
     colors = nx.coloring.greedy_color(G)
     for node, color in colors.items():
         if color in coloring:
diff --git a/networkx/algorithms/community/louvain.py b/networkx/algorithms/community/louvain.py
index 9471130..ca71c0c 100644
--- a/networkx/algorithms/community/louvain.py
+++ b/networkx/algorithms/community/louvain.py
@@ -83,9 +83,8 @@ def louvain_communities(
     Examples
     --------
     >>> import networkx as nx
-    >>> import networkx.algorithms.community as nx_comm
     >>> G = nx.petersen_graph()
-    >>> nx_comm.louvain_communities(G, seed=123)
+    >>> nx.community.louvain_communities(G, seed=123)
     [{0, 4, 5, 7, 9}, {1, 2, 3, 6, 8}]
 
     Notes
@@ -125,7 +124,7 @@ def louvain_partitions(
     A dendrogram is a diagram representing a tree and each level represents
     a partition of the G graph. The top level contains the smallest communities
     and as you traverse to the bottom of the tree the communities get bigger
-    and the overal modularity increases making the partition better.
+    and the overall modularity increases making the partition better.
 
     Each level is generated by executing the two phases of the Louvain Community
     Detection Algorithm.
@@ -218,8 +217,8 @@ def _one_level(G, m, partition, resolution=1, is_directed=False, seed=None):
     if is_directed:
         in_degrees = dict(G.in_degree(weight="weight"))
         out_degrees = dict(G.out_degree(weight="weight"))
-        Stot_in = [deg for deg in in_degrees.values()]
-        Stot_out = [deg for deg in out_degrees.values()]
+        Stot_in = list(in_degrees.values())
+        Stot_out = list(out_degrees.values())
         # Calculate weights for both in and out neighbours
         nbrs = {}
         for u in G:
@@ -230,7 +229,7 @@ def _one_level(G, m, partition, resolution=1, is_directed=False, seed=None):
                 nbrs[u][n] += wt
     else:
         degrees = dict(G.degree(weight="weight"))
-        Stot = [deg for deg in degrees.values()]
+        Stot = list(degrees.values())
         nbrs = {u: {v: data["weight"] for v, data in G[u].items() if v != u} for u in G}
     rand_nodes = list(G.nodes)
     seed.shuffle(rand_nodes)
@@ -332,7 +331,7 @@ def _gen_graph(G, partition):
         com1 = node2com[node1]
         com2 = node2com[node2]
         temp = H.get_edge_data(com1, com2, {"weight": 0})["weight"]
-        H.add_edge(com1, com2, **{"weight": wt + temp})
+        H.add_edge(com1, com2, weight=wt + temp)
     return H
 
 
diff --git a/networkx/algorithms/community/lukes.py b/networkx/algorithms/community/lukes.py
index b34077a..632dbd9 100644
--- a/networkx/algorithms/community/lukes.py
+++ b/networkx/algorithms/community/lukes.py
@@ -26,7 +26,6 @@ def _split_n_from(n, min_size_of_first_part):
 
 
 def lukes_partitioning(G, max_size, node_weight=None, edge_weight=None):
-
     """Optimal partitioning of a weighted tree using the Lukes algorithm.
 
     This algorithm partitions a connected, acyclic graph featuring integer
@@ -126,7 +125,7 @@ def lukes_partitioning(G, max_size, node_weight=None, edge_weight=None):
     def _a_parent_of_leaves_only(gr):
         tleaves = set(_leaves(gr))
         for n in set(gr.nodes) - tleaves:
-            if all([x in tleaves for x in nx.descendants(gr, n)]):
+            if all(x in tleaves for x in nx.descendants(gr, n)):
                 return n
 
     @lru_cache(CLUSTER_EVAL_CACHE_SIZE)
@@ -146,15 +145,14 @@ def lukes_partitioning(G, max_size, node_weight=None, edge_weight=None):
         assert len(ccx) == 1
         return ccx[0]
 
-    def _concatenate_or_merge(partition_1, partition_2, x, i, ref_weigth):
-
+    def _concatenate_or_merge(partition_1, partition_2, x, i, ref_weight):
         ccx = _pivot(partition_1, x)
         cci = _pivot(partition_2, i)
         merged_xi = ccx.union(cci)
 
         # We first check if we can do the merge.
         # If so, we do the actual calculations, otherwise we concatenate
-        if _weight_of_cluster(frozenset(merged_xi)) <= ref_weigth:
+        if _weight_of_cluster(frozenset(merged_xi)) <= ref_weight:
             cp1 = list(filter(lambda x: x != ccx, partition_1))
             cp2 = list(filter(lambda x: x != cci, partition_2))
 
@@ -167,13 +165,13 @@ def lukes_partitioning(G, max_size, node_weight=None, edge_weight=None):
     # INITIALIZATION -----------------------
     leaves = set(_leaves(t_G))
     for lv in leaves:
-        t_G.nodes[lv][PKEY] = dict()
+        t_G.nodes[lv][PKEY] = {}
         slot = safe_G.nodes[lv][node_weight]
         t_G.nodes[lv][PKEY][slot] = [{lv}]
         t_G.nodes[lv][PKEY][0] = [{lv}]
 
     for inner in [x for x in t_G.nodes if x not in leaves]:
-        t_G.nodes[inner][PKEY] = dict()
+        t_G.nodes[inner][PKEY] = {}
         slot = safe_G.nodes[inner][node_weight]
         t_G.nodes[inner][PKEY][slot] = [{inner}]
 
@@ -183,7 +181,7 @@ def lukes_partitioning(G, max_size, node_weight=None, edge_weight=None):
         weight_of_x = safe_G.nodes[x_node][node_weight]
         best_value = 0
         best_partition = None
-        bp_buffer = dict()
+        bp_buffer = {}
         x_descendants = nx.descendants(t_G, x_node)
         for i_node in x_descendants:
             for j in range(weight_of_x, max_size + 1):
diff --git a/networkx/algorithms/community/modularity_max.py b/networkx/algorithms/community/modularity_max.py
index 67a4961..277a0c3 100644
--- a/networkx/algorithms/community/modularity_max.py
+++ b/networkx/algorithms/community/modularity_max.py
@@ -10,7 +10,6 @@ from networkx.utils.mapped_queue import MappedQueue
 __all__ = [
     "greedy_modularity_communities",
     "naive_greedy_modularity_communities",
-    "_naive_greedy_modularity_communities",
 ]
 
 
@@ -225,7 +224,11 @@ def _greedy_modularity_communities_generator(G, weight=None, resolution=1):
 
 
 def greedy_modularity_communities(
-    G, weight=None, resolution=1, cutoff=1, best_n=None, n_communities=None
+    G,
+    weight=None,
+    resolution=1,
+    cutoff=1,
+    best_n=None,
 ):
     r"""Find communities in G using greedy modularity maximization.
 
@@ -234,7 +237,7 @@ def greedy_modularity_communities(
 
     Greedy modularity maximization begins with each node in its own community
     and repeatedly joins the pair of communities that lead to the largest
-    modularity until no futher increase in modularity is possible (a maximum).
+    modularity until no further increase in modularity is possible (a maximum).
     Two keyword arguments adjust the stopping condition. `cutoff` is a lower
     limit on the number of communities so you can stop the process before
     reaching a maximum (used to save computation time). `best_n` is an upper
@@ -271,23 +274,10 @@ def greedy_modularity_communities(
         starts to decrease until `best_n` communities remain.
         If ``None``, don't force it to continue beyond a maximum.
 
-    n_communities : int or None, optional (default=None)
-
-        .. deprecated:: 3.0
-           The `n_communities` parameter is deprecated - use `cutoff` and/or
-           `best_n` to set bounds on the desired number of communities instead.
-
-        A minimum number of communities below which the merging process stops.
-        The process stops at this number of communities even if modularity
-        is not maximized. The goal is to let the user stop the process early.
-        The process stops before the cutoff if it finds a maximum of modularity.
-
     Raises
     ------
     ValueError : If the `cutoff` or `best_n`  value is not in the range
         ``[1, G.number_of_nodes()]``, or if `best_n` < `cutoff`.
-        Also raised if `cutoff` is used with the deprecated `n_communities`
-        parameter.
 
     Returns
     -------
@@ -297,9 +287,8 @@ def greedy_modularity_communities(
 
     Examples
     --------
-    >>> from networkx.algorithms.community import greedy_modularity_communities
     >>> G = nx.karate_club_graph()
-    >>> c = greedy_modularity_communities(G)
+    >>> c = nx.community.greedy_modularity_communities(G)
     >>> sorted(c[0])
     [8, 14, 15, 18, 20, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33]
 
@@ -330,18 +319,6 @@ def greedy_modularity_communities(
             return [set(G)]
     else:
         best_n = G.number_of_nodes()
-    if n_communities is not None:
-        import warnings
-
-        warnings.warn(
-            "kwarg ``n_communities`` in greedy_modularity_communities is deprecated"
-            "and will be removed in version 3.0.   Use ``cutoff`` instead.",
-            DeprecationWarning,
-        )
-        if cutoff == 1:
-            cutoff = n_communities
-        else:
-            raise ValueError(f"Can not set both n_communities and cutoff.")
 
     # retrieve generator object to construct output
     community_gen = _greedy_modularity_communities_generator(
@@ -410,10 +387,8 @@ def naive_greedy_modularity_communities(G, resolution=1, weight=None):
 
     Examples
     --------
-    >>> from networkx.algorithms.community import \
-    ... naive_greedy_modularity_communities
     >>> G = nx.karate_club_graph()
-    >>> c = naive_greedy_modularity_communities(G)
+    >>> c = nx.community.naive_greedy_modularity_communities(G)
     >>> sorted(c[0])
     [8, 14, 15, 18, 20, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33]
 
@@ -423,7 +398,7 @@ def naive_greedy_modularity_communities(G, resolution=1, weight=None):
     modularity
     """
     # First create one community for each node
-    communities = list(frozenset([u]) for u in G.nodes())
+    communities = [frozenset([u]) for u in G.nodes()]
     # Track merges
     merges = []
     # Greedily merge communities until no improvement is possible
@@ -468,7 +443,3 @@ def naive_greedy_modularity_communities(G, resolution=1, weight=None):
             communities[i] = frozenset([])
     # Remove empty communities and sort
     return sorted((c for c in communities if len(c) > 0), key=len, reverse=True)
-
-
-# old name
-_naive_greedy_modularity_communities = naive_greedy_modularity_communities
diff --git a/networkx/algorithms/community/quality.py b/networkx/algorithms/community/quality.py
index 7de8059..6353bd4 100644
--- a/networkx/algorithms/community/quality.py
+++ b/networkx/algorithms/community/quality.py
@@ -11,14 +11,14 @@ from networkx.algorithms.community.community_utils import is_partition
 from networkx.utils import not_implemented_for
 from networkx.utils.decorators import argmap
 
-__all__ = ["coverage", "modularity", "performance", "partition_quality"]
+__all__ = ["modularity", "partition_quality"]
 
 
 class NotAPartition(NetworkXError):
     """Raised if a given collection is not a partition."""
 
     def __init__(self, G, collection):
-        msg = f"{G} is not a valid partition of the graph {collection}"
+        msg = f"{collection} is not a valid partition of the graph {G}"
         super().__init__(msg)
 
 
@@ -59,6 +59,7 @@ def _require_partition(G, partition):
 require_partition = argmap(_require_partition, (0, 1))
 
 
+@nx._dispatch
 def intra_community_edges(G, partition):
     """Returns the number of intra-community edges for a partition of `G`.
 
@@ -76,6 +77,7 @@ def intra_community_edges(G, partition):
     return sum(G.subgraph(block).size() for block in partition)
 
 
+@nx._dispatch
 def inter_community_edges(G, partition):
     """Returns the number of inter-community edges for a partition of `G`.
     according to the given
@@ -139,109 +141,6 @@ def inter_community_non_edges(G, partition):
     return inter_community_edges(nx.complement(G), partition)
 
 
-@not_implemented_for("multigraph")
-@require_partition
-def performance(G, partition):
-    """Returns the performance of a partition.
-
-    .. deprecated:: 2.6
-       Use `partition_quality` instead.
-
-    The *performance* of a partition is the number of
-    intra-community edges plus inter-community non-edges divided by the total
-    number of potential edges.
-
-    Parameters
-    ----------
-    G : NetworkX graph
-        A simple graph (directed or undirected).
-
-    partition : sequence
-        Partition of the nodes of `G`, represented as a sequence of
-        sets of nodes. Each block of the partition represents a
-        community.
-
-    Returns
-    -------
-    float
-        The performance of the partition, as defined above.
-
-    Raises
-    ------
-    NetworkXError
-        If `partition` is not a valid partition of the nodes of `G`.
-
-    References
-    ----------
-    .. [1] Santo Fortunato.
-           "Community Detection in Graphs".
-           *Physical Reports*, Volume 486, Issue 3--5 pp. 75--174
-           <https://arxiv.org/abs/0906.0612>
-
-    """
-    # Compute the number of intra-community edges and inter-community
-    # edges.
-    intra_edges = intra_community_edges(G, partition)
-    inter_edges = inter_community_non_edges(G, partition)
-    # Compute the number of edges in the complete graph (directed or
-    # undirected, as it depends on `G`) on `n` nodes.
-    #
-    # (If `G` is an undirected graph, we divide by two since we have
-    # double-counted each potential edge. We use integer division since
-    # `total_pairs` is guaranteed to be even.)
-    n = len(G)
-    total_pairs = n * (n - 1)
-    if not G.is_directed():
-        total_pairs //= 2
-    return (intra_edges + inter_edges) / total_pairs
-
-
-@require_partition
-def coverage(G, partition):
-    """Returns the coverage of a partition.
-
-    .. deprecated:: 2.6
-       Use `partition_quality` instead.
-
-    The *coverage* of a partition is the ratio of the number of
-    intra-community edges to the total number of edges in the graph.
-
-    Parameters
-    ----------
-    G : NetworkX graph
-
-    partition : sequence
-        Partition of the nodes of `G`, represented as a sequence of
-        sets of nodes. Each block of the partition represents a
-        community.
-
-    Returns
-    -------
-    float
-        The coverage of the partition, as defined above.
-
-    Raises
-    ------
-    NetworkXError
-        If `partition` is not a valid partition of the nodes of `G`.
-
-    Notes
-    -----
-    If `G` is a multigraph, the multiplicity of edges is counted.
-
-    References
-    ----------
-    .. [1] Santo Fortunato.
-           "Community Detection in Graphs".
-           *Physical Reports*, Volume 486, Issue 3--5 pp. 75--174
-           <https://arxiv.org/abs/0906.0612>
-
-    """
-    intra_edges = intra_community_edges(G, partition)
-    total_edges = G.number_of_edges()
-    return intra_edges / total_edges
-
-
 def modularity(G, communities, weight="weight", resolution=1):
     r"""Returns the modularity of the given partition of the graph.
 
@@ -294,7 +193,7 @@ def modularity(G, communities, weight="weight", resolution=1):
     Returns
     -------
     Q : float
-        The modularity of the paritition.
+        The modularity of the partition.
 
     Raises
     ------
@@ -303,11 +202,10 @@ def modularity(G, communities, weight="weight", resolution=1):
 
     Examples
     --------
-    >>> import networkx.algorithms.community as nx_comm
     >>> G = nx.barbell_graph(3, 0)
-    >>> nx_comm.modularity(G, [{0, 1, 2}, {3, 4, 5}])
+    >>> nx.community.modularity(G, [{0, 1, 2}, {3, 4, 5}])
     0.35714285714285715
-    >>> nx_comm.modularity(G, nx_comm.label_propagation_communities(G))
+    >>> nx.community.modularity(G, nx.community.label_propagation_communities(G))
     0.35714285714285715
 
     References
diff --git a/networkx/algorithms/community/tests/test_asyn_fluid.py b/networkx/algorithms/community/tests/test_asyn_fluid.py
index f87e367..cd108dd 100644
--- a/networkx/algorithms/community/tests/test_asyn_fluid.py
+++ b/networkx/algorithms/community/tests/test_asyn_fluid.py
@@ -1,7 +1,8 @@
 import pytest
 
+import networkx as nx
 from networkx import Graph, NetworkXError
-from networkx.algorithms.community.asyn_fluid import asyn_fluidc
+from networkx.algorithms.community import asyn_fluidc
 
 
 def test_exceptions():
diff --git a/networkx/algorithms/community/tests/test_centrality.py b/networkx/algorithms/community/tests/test_centrality.py
index 43b6d2b..a31d9a8 100644
--- a/networkx/algorithms/community/tests/test_centrality.py
+++ b/networkx/algorithms/community/tests/test_centrality.py
@@ -5,7 +5,6 @@ module.
 from operator import itemgetter
 
 import networkx as nx
-from networkx.algorithms.community import girvan_newman
 
 
 def set_of_sets(iterable):
@@ -29,14 +28,14 @@ class TestGirvanNewman:
 
     def test_no_edges(self):
         G = nx.empty_graph(3)
-        communities = list(girvan_newman(G))
+        communities = list(nx.community.girvan_newman(G))
         assert len(communities) == 1
         validate_communities(communities[0], [{0}, {1}, {2}])
 
     def test_undirected(self):
         # Start with the graph .-.-.-.
         G = nx.path_graph(4)
-        communities = list(girvan_newman(G))
+        communities = list(nx.community.girvan_newman(G))
         assert len(communities) == 3
         # After one removal, we get the graph .-. .-.
         validate_communities(communities[0], [{0, 1}, {2, 3}])
@@ -50,7 +49,7 @@ class TestGirvanNewman:
 
     def test_directed(self):
         G = nx.DiGraph(nx.path_graph(4))
-        communities = list(girvan_newman(G))
+        communities = list(nx.community.girvan_newman(G))
         assert len(communities) == 3
         validate_communities(communities[0], [{0, 1}, {2, 3}])
         validate_possible_communities(
@@ -62,7 +61,7 @@ class TestGirvanNewman:
         G = nx.path_graph(4)
         G.add_edge(0, 0)
         G.add_edge(2, 2)
-        communities = list(girvan_newman(G))
+        communities = list(nx.community.girvan_newman(G))
         assert len(communities) == 3
         validate_communities(communities[0], [{0, 1}, {2, 3}])
         validate_possible_communities(
@@ -78,7 +77,7 @@ class TestGirvanNewman:
         def heaviest(G):
             return max(G.edges(data="weight"), key=itemgetter(2))[:2]
 
-        communities = list(girvan_newman(G, heaviest))
+        communities = list(nx.community.girvan_newman(G, heaviest))
         assert len(communities) == 3
         validate_communities(communities[0], [{0}, {1, 2, 3}])
         validate_communities(communities[1], [{0}, {1}, {2, 3}])
diff --git a/networkx/algorithms/community/tests/test_kclique.py b/networkx/algorithms/community/tests/test_kclique.py
index 12c8e60..044b3e2 100644
--- a/networkx/algorithms/community/tests/test_kclique.py
+++ b/networkx/algorithms/community/tests/test_kclique.py
@@ -3,16 +3,15 @@ from itertools import combinations
 import pytest
 
 import networkx as nx
-from networkx.algorithms.community import k_clique_communities
 
 
 def test_overlapping_K5():
     G = nx.Graph()
     G.add_edges_from(combinations(range(5), 2))  # Add a five clique
     G.add_edges_from(combinations(range(2, 7), 2))  # Add another five clique
-    c = list(k_clique_communities(G, 4))
+    c = list(nx.community.k_clique_communities(G, 4))
     assert c == [frozenset(range(7))]
-    c = set(k_clique_communities(G, 5))
+    c = set(nx.community.k_clique_communities(G, 5))
     assert c == {frozenset(range(5)), frozenset(range(2, 7))}
 
 
@@ -20,7 +19,7 @@ def test_isolated_K5():
     G = nx.Graph()
     G.add_edges_from(combinations(range(0, 5), 2))  # Add a five clique
     G.add_edges_from(combinations(range(5, 10), 2))  # Add another five clique
-    c = set(k_clique_communities(G, 5))
+    c = set(nx.community.k_clique_communities(G, 5))
     assert c == {frozenset(range(5)), frozenset(range(5, 10))}
 
 
@@ -29,7 +28,7 @@ class TestZacharyKarateClub:
         self.G = nx.karate_club_graph()
 
     def _check_communities(self, k, expected):
-        communities = set(k_clique_communities(self.G, k))
+        communities = set(nx.community.k_clique_communities(self.G, k))
         assert communities == expected
 
     def test_k2(self):
@@ -89,4 +88,4 @@ class TestZacharyKarateClub:
 
 def test_bad_k():
     with pytest.raises(nx.NetworkXError):
-        list(k_clique_communities(nx.Graph(), 1))
+        list(nx.community.k_clique_communities(nx.Graph(), 1))
diff --git a/networkx/algorithms/community/tests/test_label_propagation.py b/networkx/algorithms/community/tests/test_label_propagation.py
index 44e4489..8271ca5 100644
--- a/networkx/algorithms/community/tests/test_label_propagation.py
+++ b/networkx/algorithms/community/tests/test_label_propagation.py
@@ -3,10 +3,6 @@ from itertools import chain, combinations
 import pytest
 
 import networkx as nx
-from networkx.algorithms.community import (
-    asyn_lpa_communities,
-    label_propagation_communities,
-)
 
 
 def test_directed_not_supported():
@@ -16,15 +12,15 @@ def test_directed_not_supported():
         test.add_edge("a", "b")
         test.add_edge("a", "c")
         test.add_edge("b", "d")
-        result = label_propagation_communities(test)
+        result = nx.community.label_propagation_communities(test)
 
 
 def test_iterator_vs_iterable():
     G = nx.empty_graph("a")
-    assert list(label_propagation_communities(G)) == [{"a"}]
-    for community in label_propagation_communities(G):
+    assert list(nx.community.label_propagation_communities(G)) == [{"a"}]
+    for community in nx.community.label_propagation_communities(G):
         assert community == {"a"}
-    pytest.raises(TypeError, next, label_propagation_communities(G))
+    pytest.raises(TypeError, next, nx.community.label_propagation_communities(G))
 
 
 def test_one_node():
@@ -34,7 +30,7 @@ def test_one_node():
     # The expected communities are:
     ground_truth = {frozenset(["a"])}
 
-    communities = label_propagation_communities(test)
+    communities = nx.community.label_propagation_communities(test)
     result = {frozenset(c) for c in communities}
     assert result == ground_truth
 
@@ -53,7 +49,7 @@ def test_unconnected_communities():
     # The expected communities are:
     ground_truth = {frozenset(["a", "c", "d"]), frozenset(["b", "e", "f"])}
 
-    communities = label_propagation_communities(test)
+    communities = nx.community.label_propagation_communities(test)
     result = {frozenset(c) for c in communities}
     assert result == ground_truth
 
@@ -103,7 +99,7 @@ def test_connected_communities():
     }
     ground_truth = (ground_truth1, ground_truth2)
 
-    communities = label_propagation_communities(test)
+    communities = nx.community.label_propagation_communities(test)
     result = {frozenset(c) for c in communities}
     assert result in ground_truth
 
@@ -114,8 +110,8 @@ def test_termination():
     test1 = nx.karate_club_graph()
     test2 = nx.caveman_graph(2, 10)
     test2.add_edges_from([(0, 20), (20, 10)])
-    asyn_lpa_communities(test1)
-    asyn_lpa_communities(test2)
+    nx.community.asyn_lpa_communities(test1)
+    nx.community.asyn_lpa_communities(test2)
 
 
 class TestAsynLpaCommunities:
@@ -128,7 +124,7 @@ class TestAsynLpaCommunities:
         instances, each element of which is a node in the graph.
 
         """
-        communities = asyn_lpa_communities(G)
+        communities = nx.community.asyn_lpa_communities(G)
         result = {frozenset(c) for c in communities}
         assert result == expected
 
@@ -151,7 +147,7 @@ class TestAsynLpaCommunities:
     def test_seed_argument(self):
         G = nx.Graph(["ab", "ac", "bc", "de", "df", "fe"])
         ground_truth = {frozenset("abc"), frozenset("def")}
-        communities = asyn_lpa_communities(G, seed=1)
+        communities = nx.community.asyn_lpa_communities(G, seed=1)
         result = {frozenset(c) for c in communities}
         assert result == ground_truth
 
diff --git a/networkx/algorithms/community/tests/test_louvain.py b/networkx/algorithms/community/tests/test_louvain.py
index 1c9b287..ed5c2a3 100644
--- a/networkx/algorithms/community/tests/test_louvain.py
+++ b/networkx/algorithms/community/tests/test_louvain.py
@@ -1,11 +1,4 @@
 import networkx as nx
-from networkx.algorithms.community import (
-    is_partition,
-    louvain_communities,
-    louvain_partitions,
-    modularity,
-    partition_quality,
-)
 
 
 def test_modularity_increase():
@@ -13,10 +6,10 @@ def test_modularity_increase():
         250, 3, 1.5, 0.009, average_degree=5, min_community=20, seed=10
     )
     partition = [{u} for u in G.nodes()]
-    mod = modularity(G, partition)
-    partition = louvain_communities(G)
+    mod = nx.community.modularity(G, partition)
+    partition = nx.community.louvain_communities(G)
 
-    assert modularity(G, partition) > mod
+    assert nx.community.modularity(G, partition) > mod
 
 
 def test_valid_partition():
@@ -24,11 +17,11 @@ def test_valid_partition():
         250, 3, 1.5, 0.009, average_degree=5, min_community=20, seed=10
     )
     H = G.to_directed()
-    partition = louvain_communities(G)
-    partition2 = louvain_communities(H)
+    partition = nx.community.louvain_communities(G)
+    partition2 = nx.community.louvain_communities(H)
 
-    assert is_partition(G, partition)
-    assert is_partition(H, partition2)
+    assert nx.community.is_partition(G, partition)
+    assert nx.community.is_partition(H, partition2)
 
 
 def test_karate_club_partition():
@@ -39,14 +32,14 @@ def test_karate_club_partition():
         {23, 25, 27, 28, 24, 31},
         {32, 33, 8, 14, 15, 18, 20, 22, 26, 29, 30},
     ]
-    partition = louvain_communities(G, seed=2, weight=None)
+    partition = nx.community.louvain_communities(G, seed=2, weight=None)
 
     assert part == partition
 
 
 def test_partition_iterator():
     G = nx.path_graph(15)
-    parts_iter = louvain_partitions(G, seed=42)
+    parts_iter = nx.community.louvain_partitions(G, seed=42)
     first_part = next(parts_iter)
     first_copy = [s.copy() for s in first_part]
 
@@ -100,10 +93,10 @@ def test_directed_partition():
     H.add_edges_from(H_edges)
 
     G_expected_partition = [{0, 1, 2}, {3, 4}, {5}, {6}, {8, 7}, {9, 10}]
-    G_partition = louvain_communities(G, seed=123, weight=None)
+    G_partition = nx.community.louvain_communities(G, seed=123, weight=None)
 
     H_expected_partition = [{2, 3, 4, 5}, {8, 1, 6, 7}, {9, 10, 11}]
-    H_partition = louvain_communities(H, seed=123, weight=None)
+    H_partition = nx.community.louvain_communities(H, seed=123, weight=None)
 
     assert G_partition == G_expected_partition
     assert H_partition == H_expected_partition
@@ -121,9 +114,9 @@ def test_none_weight_param():
         {23, 25, 27, 28, 24, 31},
         {32, 33, 8, 14, 15, 18, 20, 22, 26, 29, 30},
     ]
-    partition1 = louvain_communities(G, weight=None, seed=2)
-    partition2 = louvain_communities(G, weight="foo", seed=2)
-    partition3 = louvain_communities(G, weight="weight", seed=2)
+    partition1 = nx.community.louvain_communities(G, weight=None, seed=2)
+    partition2 = nx.community.louvain_communities(G, weight="foo", seed=2)
+    partition3 = nx.community.louvain_communities(G, weight="weight", seed=2)
 
     assert part == partition1
     assert part != partition2
@@ -139,15 +132,15 @@ def test_quality():
     I = nx.MultiGraph(G)
     J = nx.MultiDiGraph(H)
 
-    partition = louvain_communities(G)
-    partition2 = louvain_communities(H)
-    partition3 = louvain_communities(I)
-    partition4 = louvain_communities(J)
+    partition = nx.community.louvain_communities(G)
+    partition2 = nx.community.louvain_communities(H)
+    partition3 = nx.community.louvain_communities(I)
+    partition4 = nx.community.louvain_communities(J)
 
-    quality = partition_quality(G, partition)[0]
-    quality2 = partition_quality(H, partition2)[0]
-    quality3 = partition_quality(I, partition3)[0]
-    quality4 = partition_quality(J, partition4)[0]
+    quality = nx.community.partition_quality(G, partition)[0]
+    quality2 = nx.community.partition_quality(H, partition2)[0]
+    quality3 = nx.community.partition_quality(I, partition3)[0]
+    quality4 = nx.community.partition_quality(J, partition4)[0]
 
     assert quality >= 0.65
     assert quality2 >= 0.65
@@ -163,9 +156,9 @@ def test_multigraph():
     G.add_edge(0, 9, foo=20)
     H.add_edge(0, 9, foo=20)
 
-    partition1 = louvain_communities(G, seed=1234)
-    partition2 = louvain_communities(H, seed=1234)
-    partition3 = louvain_communities(H, weight="foo", seed=1234)
+    partition1 = nx.community.louvain_communities(G, seed=1234)
+    partition2 = nx.community.louvain_communities(H, seed=1234)
+    partition3 = nx.community.louvain_communities(H, weight="foo", seed=1234)
 
     assert partition1 == partition2 != partition3
 
@@ -175,9 +168,9 @@ def test_resolution():
         250, 3, 1.5, 0.009, average_degree=5, min_community=20, seed=10
     )
 
-    partition1 = louvain_communities(G, resolution=0.5, seed=12)
-    partition2 = louvain_communities(G, seed=12)
-    partition3 = louvain_communities(G, resolution=2, seed=12)
+    partition1 = nx.community.louvain_communities(G, resolution=0.5, seed=12)
+    partition2 = nx.community.louvain_communities(G, seed=12)
+    partition3 = nx.community.louvain_communities(G, resolution=2, seed=12)
 
     assert len(partition1) <= len(partition2) <= len(partition3)
 
@@ -186,9 +179,9 @@ def test_threshold():
     G = nx.LFR_benchmark_graph(
         250, 3, 1.5, 0.009, average_degree=5, min_community=20, seed=10
     )
-    partition1 = louvain_communities(G, threshold=0.3, seed=2)
-    partition2 = louvain_communities(G, seed=2)
-    mod1 = modularity(G, partition1)
-    mod2 = modularity(G, partition2)
+    partition1 = nx.community.louvain_communities(G, threshold=0.3, seed=2)
+    partition2 = nx.community.louvain_communities(G, seed=2)
+    mod1 = nx.community.modularity(G, partition1)
+    mod2 = nx.community.modularity(G, partition2)
 
     assert mod1 < mod2
diff --git a/networkx/algorithms/community/tests/test_lukes.py b/networkx/algorithms/community/tests/test_lukes.py
index 80e2de3..cfa48f0 100644
--- a/networkx/algorithms/community/tests/test_lukes.py
+++ b/networkx/algorithms/community/tests/test_lukes.py
@@ -3,7 +3,6 @@ from itertools import product
 import pytest
 
 import networkx as nx
-from networkx.algorithms.community import lukes_partitioning
 
 EWL = "e_weight"
 NWL = "n_weight"
@@ -11,7 +10,6 @@ NWL = "n_weight"
 
 # first test from the Lukes original paper
 def paper_1_case(float_edge_wt=False, explicit_node_wt=True, directed=False):
-
     # problem-specific constants
     limit = 3
 
@@ -42,7 +40,9 @@ def paper_1_case(float_edge_wt=False, explicit_node_wt=True, directed=False):
     # partitioning
     clusters_1 = {
         frozenset(x)
-        for x in lukes_partitioning(example_1, limit, node_weight=wtu, edge_weight=EWL)
+        for x in nx.community.lukes_partitioning(
+            example_1, limit, node_weight=wtu, edge_weight=EWL
+        )
     }
 
     return clusters_1
@@ -50,7 +50,6 @@ def paper_1_case(float_edge_wt=False, explicit_node_wt=True, directed=False):
 
 # second test from the Lukes original paper
 def paper_2_case(explicit_edge_wt=True, directed=False):
-
     # problem specific constants
     byte_block_size = 32
 
@@ -94,7 +93,7 @@ def paper_2_case(explicit_edge_wt=True, directed=False):
     # partitioning
     clusters_2 = {
         frozenset(x)
-        for x in lukes_partitioning(
+        for x in nx.community.lukes_partitioning(
             example_2, byte_block_size, node_weight=NWL, edge_weight=wtu
         )
     }
@@ -128,11 +127,10 @@ def test_mandatory_tree():
     not_a_tree = nx.complete_graph(4)
 
     with pytest.raises(nx.NotATree):
-        lukes_partitioning(not_a_tree, 5)
+        nx.community.lukes_partitioning(not_a_tree, 5)
 
 
 def test_mandatory_integrality():
-
     byte_block_size = 32
 
     ex_1_broken = nx.DiGraph()
@@ -149,6 +147,6 @@ def test_mandatory_integrality():
     ex_1_broken.nodes[5][NWL] = 2
 
     with pytest.raises(TypeError):
-        lukes_partitioning(
+        nx.community.lukes_partitioning(
             ex_1_broken, byte_block_size, node_weight=NWL, edge_weight=EWL
         )
diff --git a/networkx/algorithms/community/tests/test_modularity_max.py b/networkx/algorithms/community/tests/test_modularity_max.py
index acdb19d..c1f82d0 100644
--- a/networkx/algorithms/community/tests/test_modularity_max.py
+++ b/networkx/algorithms/community/tests/test_modularity_max.py
@@ -96,7 +96,7 @@ def test_greedy_modularity_communities_directed():
 )
 def test_modularity_communities_weighted(func):
     G = nx.balanced_tree(2, 3)
-    for (a, b) in G.edges:
+    for a, b in G.edges:
         if ((a == 1) or (a == 2)) and (b != 0):
             G[a][b]["weight"] = 10.0
         else:
diff --git a/networkx/algorithms/community/tests/test_quality.py b/networkx/algorithms/community/tests/test_quality.py
index 1d6aeb8..3447c94 100644
--- a/networkx/algorithms/community/tests/test_quality.py
+++ b/networkx/algorithms/community/tests/test_quality.py
@@ -6,12 +6,7 @@ import pytest
 
 import networkx as nx
 from networkx import barbell_graph
-from networkx.algorithms.community import (
-    coverage,
-    modularity,
-    partition_quality,
-    performance,
-)
+from networkx.algorithms.community import modularity, partition_quality
 from networkx.algorithms.community.quality import inter_community_edges
 
 
@@ -22,14 +17,12 @@ class TestPerformance:
         """Tests that a poor partition has a low performance measure."""
         G = barbell_graph(3, 0)
         partition = [{0, 1, 4}, {2, 3, 5}]
-        assert 8 / 15 == pytest.approx(performance(G, partition), abs=1e-7)
         assert 8 / 15 == pytest.approx(partition_quality(G, partition)[1], abs=1e-7)
 
     def test_good_partition(self):
         """Tests that a good partition has a high performance measure."""
         G = barbell_graph(3, 0)
         partition = [{0, 1, 2}, {3, 4, 5}]
-        assert 14 / 15 == pytest.approx(performance(G, partition), abs=1e-7)
         assert 14 / 15 == pytest.approx(partition_quality(G, partition)[1], abs=1e-7)
 
 
@@ -40,14 +33,12 @@ class TestCoverage:
         """Tests that a poor partition has a low coverage measure."""
         G = barbell_graph(3, 0)
         partition = [{0, 1, 4}, {2, 3, 5}]
-        assert 3 / 7 == pytest.approx(coverage(G, partition), abs=1e-7)
         assert 3 / 7 == pytest.approx(partition_quality(G, partition)[0], abs=1e-7)
 
     def test_good_partition(self):
         """Tests that a good partition has a high coverage measure."""
         G = barbell_graph(3, 0)
         partition = [{0, 1, 2}, {3, 4, 5}]
-        assert 6 / 7 == pytest.approx(coverage(G, partition), abs=1e-7)
         assert 6 / 7 == pytest.approx(partition_quality(G, partition)[0], abs=1e-7)
 
 
diff --git a/networkx/algorithms/community/tests/test_utils.py b/networkx/algorithms/community/tests/test_utils.py
index a031782..329ff66 100644
--- a/networkx/algorithms/community/tests/test_utils.py
+++ b/networkx/algorithms/community/tests/test_utils.py
@@ -3,27 +3,26 @@
 """
 
 import networkx as nx
-from networkx.algorithms.community import is_partition
 
 
 def test_is_partition():
     G = nx.empty_graph(3)
-    assert is_partition(G, [{0, 1}, {2}])
-    assert is_partition(G, ({0, 1}, {2}))
-    assert is_partition(G, ([0, 1], [2]))
-    assert is_partition(G, [[0, 1], [2]])
+    assert nx.community.is_partition(G, [{0, 1}, {2}])
+    assert nx.community.is_partition(G, ({0, 1}, {2}))
+    assert nx.community.is_partition(G, ([0, 1], [2]))
+    assert nx.community.is_partition(G, [[0, 1], [2]])
 
 
 def test_not_covering():
     G = nx.empty_graph(3)
-    assert not is_partition(G, [{0}, {1}])
+    assert not nx.community.is_partition(G, [{0}, {1}])
 
 
 def test_not_disjoint():
     G = nx.empty_graph(3)
-    assert not is_partition(G, [{0, 1}, {1, 2}])
+    assert not nx.community.is_partition(G, [{0, 1}, {1, 2}])
 
 
 def test_not_node():
     G = nx.empty_graph(3)
-    assert not is_partition(G, [{0, 1}, {3}])
+    assert not nx.community.is_partition(G, [{0, 1}, {3}])
diff --git a/networkx/algorithms/components/biconnected.py b/networkx/algorithms/components/biconnected.py
index 1eebe8a..f638453 100644
--- a/networkx/algorithms/components/biconnected.py
+++ b/networkx/algorithms/components/biconnected.py
@@ -343,6 +343,7 @@ def _biconnected_dfs(G, components=True):
         visited.add(start)
         edge_stack = []
         stack = [(start, start, iter(G[start]))]
+        edge_index = {}
         while stack:
             grandparent, parent, children = stack[-1]
             try:
@@ -353,29 +354,34 @@ def _biconnected_dfs(G, components=True):
                     if discovery[child] <= discovery[parent]:  # back edge
                         low[parent] = min(low[parent], discovery[child])
                         if components:
+                            edge_index[parent, child] = len(edge_stack)
                             edge_stack.append((parent, child))
                 else:
                     low[child] = discovery[child] = len(discovery)
                     visited.add(child)
                     stack.append((parent, child, iter(G[child])))
                     if components:
+                        edge_index[parent, child] = len(edge_stack)
                         edge_stack.append((parent, child))
+
             except StopIteration:
                 stack.pop()
                 if len(stack) > 1:
                     if low[parent] >= discovery[grandparent]:
                         if components:
-                            ind = edge_stack.index((grandparent, parent))
+                            ind = edge_index[grandparent, parent]
                             yield edge_stack[ind:]
-                            edge_stack = edge_stack[:ind]
+                            del edge_stack[ind:]
+
                         else:
                             yield grandparent
                     low[grandparent] = min(low[parent], low[grandparent])
                 elif stack:  # length 1 so grandparent is root
                     root_children += 1
                     if components:
-                        ind = edge_stack.index((grandparent, parent))
+                        ind = edge_index[grandparent, parent]
                         yield edge_stack[ind:]
+                        del edge_stack[ind:]
         if not components:
             # root node is articulation point if it has more than 1 child
             if root_children > 1:
diff --git a/networkx/algorithms/components/connected.py b/networkx/algorithms/components/connected.py
index e6b122e..192d0b8 100644
--- a/networkx/algorithms/components/connected.py
+++ b/networkx/algorithms/components/connected.py
@@ -12,6 +12,7 @@ __all__ = [
 ]
 
 
+@nx._dispatch
 @not_implemented_for("directed")
 def connected_components(G):
     """Generate connected components.
@@ -100,6 +101,7 @@ def number_connected_components(G):
     return sum(1 for cc in connected_components(G))
 
 
+@nx._dispatch
 @not_implemented_for("directed")
 def is_connected(G):
     """Returns True if the graph is connected, False otherwise.
@@ -145,6 +147,7 @@ def is_connected(G):
     return sum(1 for node in _plain_bfs(G, arbitrary_element(G))) == len(G)
 
 
+@nx._dispatch
 @not_implemented_for("directed")
 def node_connected_component(G, n):
     """Returns the set of nodes in the component of graph containing node n.
diff --git a/networkx/algorithms/components/strongly_connected.py b/networkx/algorithms/components/strongly_connected.py
index 1967740..b4a089c 100644
--- a/networkx/algorithms/components/strongly_connected.py
+++ b/networkx/algorithms/components/strongly_connected.py
@@ -12,6 +12,7 @@ __all__ = [
 ]
 
 
+@nx._dispatch
 @not_implemented_for("undirected")
 def strongly_connected_components(G):
     """Generate nodes in strongly connected components of graph.
diff --git a/networkx/algorithms/components/tests/test_connected.py b/networkx/algorithms/components/tests/test_connected.py
index bf3954e..4c9b8d2 100644
--- a/networkx/algorithms/components/tests/test_connected.py
+++ b/networkx/algorithms/components/tests/test_connected.py
@@ -3,6 +3,7 @@ import pytest
 import networkx as nx
 from networkx import NetworkXNotImplemented
 from networkx import convert_node_labels_to_integers as cnlti
+from networkx.classes.tests import dispatch_interface
 
 
 class TestConnected:
@@ -60,9 +61,12 @@ class TestConnected:
         C = []
         cls.gc.append((G, C))
 
-    def test_connected_components(self):
+    # This additionally tests the @nx._dispatch mechanism, treating
+    # nx.connected_components as if it were a re-implementation from another package
+    @pytest.mark.parametrize("wrapper", [lambda x: x, dispatch_interface.convert])
+    def test_connected_components(self, wrapper):
         cc = nx.connected_components
-        G = self.G
+        G = wrapper(self.G)
         C = {
             frozenset([0, 1, 2, 3]),
             frozenset([4, 5, 6, 7, 8, 9]),
diff --git a/networkx/algorithms/components/weakly_connected.py b/networkx/algorithms/components/weakly_connected.py
index 822719a..31b5b03 100644
--- a/networkx/algorithms/components/weakly_connected.py
+++ b/networkx/algorithms/components/weakly_connected.py
@@ -9,6 +9,7 @@ __all__ = [
 ]
 
 
+@nx._dispatch
 @not_implemented_for("undirected")
 def weakly_connected_components(G):
     """Generate weakly connected components of G.
@@ -103,6 +104,7 @@ def number_weakly_connected_components(G):
     return sum(1 for wcc in weakly_connected_components(G))
 
 
+@nx._dispatch
 @not_implemented_for("undirected")
 def is_weakly_connected(G):
     """Test directed graph for weak connectivity.
diff --git a/networkx/algorithms/connectivity/connectivity.py b/networkx/algorithms/connectivity/connectivity.py
index b782031..82b98f8 100644
--- a/networkx/algorithms/connectivity/connectivity.py
+++ b/networkx/algorithms/connectivity/connectivity.py
@@ -75,12 +75,10 @@ def local_node_connectivity(
         Residual network to compute maximum flow. If provided it will be
         reused instead of recreated. Default value: None.
 
-    cutoff : integer, float
+    cutoff : integer, float, or None (default: None)
         If specified, the maximum flow algorithm will terminate when the
-        flow value reaches or exceeds the cutoff. This is only for the
-        algorithms that support the cutoff parameter: :meth:`edmonds_karp`
-        and :meth:`shortest_augmenting_path`. Other algorithms will ignore
-        this parameter. Default value: None.
+        flow value reaches or exceeds the cutoff. This only works for flows
+        that support the cutoff parameter (most do) and is ignored otherwise.
 
     Returns
     -------
@@ -197,7 +195,7 @@ def local_node_connectivity(
     if mapping is None:
         raise nx.NetworkXError("Invalid auxiliary digraph.")
 
-    kwargs = dict(flow_func=flow_func, residual=residual)
+    kwargs = {"flow_func": flow_func, "residual": residual}
     if flow_func is shortest_augmenting_path:
         kwargs["cutoff"] = cutoff
         kwargs["two_phase"] = True
@@ -332,7 +330,7 @@ def node_connectivity(G, s=None, t=None, flow_func=None):
     # Reuse the auxiliary digraph and the residual network
     H = build_auxiliary_node_connectivity(G)
     R = build_residual_network(H, "capacity")
-    kwargs = dict(flow_func=flow_func, auxiliary=H, residual=R)
+    kwargs = {"flow_func": flow_func, "auxiliary": H, "residual": R}
 
     # Pick a node with minimum degree
     # Node connectivity is bounded by degree.
@@ -407,7 +405,7 @@ def average_node_connectivity(G, flow_func=None):
     # Reuse the auxiliary digraph and the residual network
     H = build_auxiliary_node_connectivity(G)
     R = build_residual_network(H, "capacity")
-    kwargs = dict(flow_func=flow_func, auxiliary=H, residual=R)
+    kwargs = {"flow_func": flow_func, "auxiliary": H, "residual": R}
 
     num, den = 0, 0
     for u, v in iter_func(G, 2):
@@ -475,7 +473,7 @@ def all_pairs_node_connectivity(G, nbunch=None, flow_func=None):
     H = build_auxiliary_node_connectivity(G)
     mapping = H.graph["mapping"]
     R = build_residual_network(H, "capacity")
-    kwargs = dict(flow_func=flow_func, auxiliary=H, residual=R)
+    kwargs = {"flow_func": flow_func, "auxiliary": H, "residual": R}
 
     for u, v in iter_func(nbunch, 2):
         K = local_node_connectivity(G, u, v, **kwargs)
@@ -529,12 +527,10 @@ def local_edge_connectivity(
         Residual network to compute maximum flow. If provided it will be
         reused instead of recreated. Default value: None.
 
-    cutoff : integer, float
+    cutoff : integer, float, or None (default: None)
         If specified, the maximum flow algorithm will terminate when the
-        flow value reaches or exceeds the cutoff. This is only for the
-        algorithms that support the cutoff parameter: :meth:`edmonds_karp`
-        and :meth:`shortest_augmenting_path`. Other algorithms will ignore
-        this parameter. Default value: None.
+        flow value reaches or exceeds the cutoff. This only works for flows
+        that support the cutoff parameter (most do) and is ignored otherwise.
 
     Returns
     -------
@@ -635,7 +631,7 @@ def local_edge_connectivity(
     else:
         H = auxiliary
 
-    kwargs = dict(flow_func=flow_func, residual=residual)
+    kwargs = {"flow_func": flow_func, "residual": residual}
     if flow_func is shortest_augmenting_path:
         kwargs["cutoff"] = cutoff
         kwargs["two_phase"] = True
@@ -679,12 +675,10 @@ def edge_connectivity(G, s=None, t=None, flow_func=None, cutoff=None):
         choice of the default function may change from version
         to version and should not be relied on. Default value: None.
 
-    cutoff : integer, float
+    cutoff : integer, float, or None (default: None)
         If specified, the maximum flow algorithm will terminate when the
-        flow value reaches or exceeds the cutoff. This is only for the
-        algorithms that support the cutoff parameter: e.g., :meth:`edmonds_karp`
-        and :meth:`shortest_augmenting_path`. Other algorithms will ignore
-        this parameter. Default value: None.
+        flow value reaches or exceeds the cutoff. This only works for flows
+        that support the cutoff parameter (most do) and is ignored otherwise.
 
     Returns
     -------
@@ -766,7 +760,7 @@ def edge_connectivity(G, s=None, t=None, flow_func=None, cutoff=None):
     # reuse auxiliary digraph and residual network
     H = build_auxiliary_edge_connectivity(G)
     R = build_residual_network(H, "capacity")
-    kwargs = dict(flow_func=flow_func, auxiliary=H, residual=R)
+    kwargs = {"flow_func": flow_func, "auxiliary": H, "residual": R}
 
     if G.is_directed():
         # Algorithm 8 in [1]
diff --git a/networkx/algorithms/connectivity/cuts.py b/networkx/algorithms/connectivity/cuts.py
index 92495b1..8b8026f 100644
--- a/networkx/algorithms/connectivity/cuts.py
+++ b/networkx/algorithms/connectivity/cuts.py
@@ -140,7 +140,7 @@ def minimum_st_edge_cut(G, s, t, flow_func=None, auxiliary=None, residual=None):
     else:
         H = auxiliary
 
-    kwargs = dict(capacity="capacity", flow_func=flow_func, residual=residual)
+    kwargs = {"capacity": "capacity", "flow_func": flow_func, "residual": residual}
 
     cut_value, partition = nx.minimum_cut(H, s, t, **kwargs)
     reachable, non_reachable = partition
@@ -281,7 +281,7 @@ def minimum_st_node_cut(G, s, t, flow_func=None, auxiliary=None, residual=None):
         raise nx.NetworkXError("Invalid auxiliary digraph.")
     if G.has_edge(s, t) or G.has_edge(t, s):
         return {}
-    kwargs = dict(flow_func=flow_func, residual=residual, auxiliary=H)
+    kwargs = {"flow_func": flow_func, "residual": residual, "auxiliary": H}
 
     # The edge cut in the auxiliary digraph corresponds to the node cut in the
     # original graph.
@@ -414,7 +414,7 @@ def minimum_node_cut(G, s=None, t=None, flow_func=None):
     # Reuse the auxiliary digraph and the residual network.
     H = build_auxiliary_node_connectivity(G)
     R = build_residual_network(H, "capacity")
-    kwargs = dict(flow_func=flow_func, auxiliary=H, residual=R)
+    kwargs = {"flow_func": flow_func, "auxiliary": H, "residual": R}
 
     # Choose a node with minimum degree.
     v = min(G, key=G.degree)
@@ -537,7 +537,7 @@ def minimum_edge_cut(G, s=None, t=None, flow_func=None):
     # reuse auxiliary digraph and residual network
     H = build_auxiliary_edge_connectivity(G)
     R = build_residual_network(H, "capacity")
-    kwargs = dict(flow_func=flow_func, residual=R, auxiliary=H)
+    kwargs = {"flow_func": flow_func, "residual": R, "auxiliary": H}
 
     # Local minimum edge cut if s and t are not None
     if s is not None and t is not None:
diff --git a/networkx/algorithms/connectivity/disjoint_paths.py b/networkx/algorithms/connectivity/disjoint_paths.py
index 378a709..f3f2db1 100644
--- a/networkx/algorithms/connectivity/disjoint_paths.py
+++ b/networkx/algorithms/connectivity/disjoint_paths.py
@@ -1,7 +1,7 @@
 """Flow based node and edge disjoint paths."""
 import networkx as nx
 
-# Define the default maximum flow function to use for the undelying
+# Define the default maximum flow function to use for the underlying
 # maximum flow computations
 from networkx.algorithms.flow import (
     edmonds_karp,
@@ -48,13 +48,11 @@ def edge_disjoint_paths(
         may change from version to version and should not be relied on.
         Default value: None.
 
-    cutoff : int
-        Maximum number of paths to yield. Some of the maximum flow
-        algorithms, such as :meth:`edmonds_karp` (the default) and
-        :meth:`shortest_augmenting_path` support the cutoff parameter,
-        and will terminate when the flow value reaches or exceeds the
-        cutoff. Other algorithms will ignore this parameter.
-        Default value: None.
+    cutoff : integer or None (default: None)
+        Maximum number of paths to yield. If specified, the maximum flow
+        algorithm will terminate when the flow value reaches or exceeds the
+        cutoff. This only works for flows that support the cutoff parameter
+        (most do) and is ignored otherwise.
 
     auxiliary : NetworkX DiGraph
         Auxiliary digraph to compute flow based edge connectivity. It has
@@ -175,9 +173,12 @@ def edge_disjoint_paths(
 
     # Compute maximum flow between source and target. Flow functions in
     # NetworkX return a residual network.
-    kwargs = dict(
-        capacity="capacity", residual=residual, cutoff=cutoff, value_only=True
-    )
+    kwargs = {
+        "capacity": "capacity",
+        "residual": residual,
+        "cutoff": cutoff,
+        "value_only": True,
+    }
     if flow_func is preflow_push:
         del kwargs["cutoff"]
     if flow_func is shortest_augmenting_path:
@@ -254,13 +255,11 @@ def node_disjoint_paths(
         of the default function may change from version to version and
         should not be relied on. Default value: None.
 
-    cutoff : int
-        Maximum number of paths to yield. Some of the maximum flow
-        algorithms, such as :meth:`edmonds_karp` (the default) and
-        :meth:`shortest_augmenting_path` support the cutoff parameter,
-        and will terminate when the flow value reaches or exceeds the
-        cutoff. Other algorithms will ignore this parameter.
-        Default value: None.
+    cutoff : integer or None (default: None)
+        Maximum number of paths to yield. If specified, the maximum flow
+        algorithm will terminate when the flow value reaches or exceeds the
+        cutoff. This only works for flows that support the cutoff parameter
+        (most do) and is ignored otherwise.
 
     auxiliary : NetworkX DiGraph
         Auxiliary digraph to compute flow based node connectivity. It has
@@ -373,7 +372,12 @@ def node_disjoint_paths(
     else:
         cutoff = min(cutoff, possible)
 
-    kwargs = dict(flow_func=flow_func, residual=residual, auxiliary=H, cutoff=cutoff)
+    kwargs = {
+        "flow_func": flow_func,
+        "residual": residual,
+        "auxiliary": H,
+        "cutoff": cutoff,
+    }
 
     # The edge disjoint paths in the auxiliary digraph correspond to the node
     # disjoint paths in the original graph.
diff --git a/networkx/algorithms/connectivity/edge_augmentation.py b/networkx/algorithms/connectivity/edge_augmentation.py
index a8c5e83..3c5d6b9 100644
--- a/networkx/algorithms/connectivity/edge_augmentation.py
+++ b/networkx/algorithms/connectivity/edge_augmentation.py
@@ -10,7 +10,7 @@ k-edge-augmentation exists.
 See Also
 --------
 :mod:`edge_kcomponents` : algorithms for finding k-edge-connected components
-:mod:`connectivity` : algorithms for determening edge connectivity.
+:mod:`connectivity` : algorithms for determining edge connectivity.
 """
 import itertools as it
 import math
@@ -262,7 +262,7 @@ def k_edge_augmentation(G, k, avail=None, weight=None, partial=False):
             aug_edges = greedy_k_edge_augmentation(
                 G, k=k, avail=avail, weight=weight, seed=0
             )
-        # Do eager evaulation so we can catch any exceptions
+        # Do eager evaluation so we can catch any exceptions
         # Before executing partial code.
         yield from list(aug_edges)
     except nx.NetworkXUnfeasible:
@@ -368,13 +368,13 @@ def partial_k_edge_augmentation(G, k, avail, weight=None):
             }
             # Remove potential augmenting edges
             C.remove_edges_from(sub_avail.keys())
-            # Find a subset of these edges that makes the compoment
+            # Find a subset of these edges that makes the component
             # k-edge-connected and ignore the rest
             yield from nx.k_edge_augmentation(C, k=k, avail=sub_avail)
 
     # Generate all edges between CCs that could not be k-edge-connected
     for cc1, cc2 in it.combinations(k_edge_subgraphs, 2):
-        for (u, v) in _edges_between_disjoint(H, cc1, cc2):
+        for u, v in _edges_between_disjoint(H, cc1, cc2):
             d = H.get_edge_data(u, v)
             edge = d.get("generator", None)
             if edge is not None:
@@ -542,7 +542,7 @@ def _lightest_meta_edges(mapping, avail_uv, avail_w):
     -----
     Each node in the metagraph is a k-edge-connected component in the original
     graph.  We don't care about any edge within the same k-edge-connected
-    component, so we ignore self edges.  We also are only intereseted in the
+    component, so we ignore self edges.  We also are only interested in the
     minimum weight edge bridging each k-edge-connected component so, we group
     the edges by meta-edge and take the lightest in each group.
 
@@ -1223,7 +1223,7 @@ def greedy_k_edge_augmentation(G, k, avail=None, weight=None, seed=None):
 
     # Incrementally add edges in until we are k-connected
     H = G.copy()
-    for (u, v) in avail_uv:
+    for u, v in avail_uv:
         done = False
         if not is_locally_k_edge_connected(H, u, v, k=k):
             # Only add edges in parts that are not yet locally k-edge-connected
@@ -1241,7 +1241,7 @@ def greedy_k_edge_augmentation(G, k, avail=None, weight=None, seed=None):
 
     # Randomized attempt to reduce the size of the solution
     _compat_shuffle(seed, aug_edges)
-    for (u, v) in list(aug_edges):
+    for u, v in list(aug_edges):
         # Don't remove if we know it would break connectivity
         if H.degree(u) <= k or H.degree(v) <= k:
             continue
diff --git a/networkx/algorithms/connectivity/edge_kcomponents.py b/networkx/algorithms/connectivity/edge_kcomponents.py
index a5d6977..fc85e26 100644
--- a/networkx/algorithms/connectivity/edge_kcomponents.py
+++ b/networkx/algorithms/connectivity/edge_kcomponents.py
@@ -239,7 +239,7 @@ def bridge_components(G):
 class EdgeComponentAuxGraph:
     r"""A simple algorithm to find all k-edge-connected components in a graph.
 
-    Constructing the AuxillaryGraph (which may take some time) allows for the
+    Constructing the auxiliary graph (which may take some time) allows for the
     k-edge-ccs to be found in linear time for arbitrary k.
 
     Notes
@@ -288,7 +288,7 @@ class EdgeComponentAuxGraph:
     >>> sorted(map(sorted, aux_graph.k_edge_components(k=4)))
     [[0], [1], [2], [3], [4], [5], [6], [7]]
 
-    The auxiliary graph is primarilly used for k-edge-ccs but it
+    The auxiliary graph is primarily used for k-edge-ccs but it
     can also speed up the queries of k-edge-subgraphs by refining the
     search space.
 
diff --git a/networkx/algorithms/connectivity/kcomponents.py b/networkx/algorithms/connectivity/kcomponents.py
index 4e88d32..5c7f441 100644
--- a/networkx/algorithms/connectivity/kcomponents.py
+++ b/networkx/algorithms/connectivity/kcomponents.py
@@ -104,7 +104,7 @@ def k_components(G, flow_func=None):
     """
     # Dictionary with connectivity level (k) as keys and a list of
     # sets of nodes that form a k-component as values. Note that
-    # k-compoents can overlap (but only k - 1 nodes).
+    # k-components can overlap (but only k - 1 nodes).
     k_components = defaultdict(list)
     # Define default flow function
     if flow_func is None:
@@ -167,7 +167,7 @@ def _consolidate(sets, k):
 
     """
     G = nx.Graph()
-    nodes = {i: s for i, s in enumerate(sets)}
+    nodes = dict(enumerate(sets))
     G.add_nodes_from(nodes)
     G.add_edges_from(
         (u, v) for u, v in combinations(nodes, 2) if len(nodes[u] & nodes[v]) >= k
@@ -178,10 +178,7 @@ def _consolidate(sets, k):
 
 def _generate_partition(G, cuts, k):
     def has_nbrs_in_partition(G, node, partition):
-        for n in G[node]:
-            if n in partition:
-                return True
-        return False
+        return any(n in partition for n in G[node])
 
     components = []
     nodes = {n for n, d in G.degree() if d > k} - {n for cut in cuts for n in cut}
@@ -198,7 +195,7 @@ def _generate_partition(G, cuts, k):
 
 
 def _reconstruct_k_components(k_comps):
-    result = dict()
+    result = {}
     max_k = max(k_comps)
     for k in reversed(range(1, max_k + 1)):
         if k == max_k:
diff --git a/networkx/algorithms/connectivity/kcutsets.py b/networkx/algorithms/connectivity/kcutsets.py
index f4f3e6e..837e273 100644
--- a/networkx/algorithms/connectivity/kcutsets.py
+++ b/networkx/algorithms/connectivity/kcutsets.py
@@ -108,7 +108,7 @@ def all_node_cuts(G, k=None, flow_func=None):
     # Shallow copy is enough.
     original_H_pred = copy.copy(H._pred)
     R = build_residual_network(H, "capacity")
-    kwargs = dict(capacity="capacity", residual=R)
+    kwargs = {"capacity": "capacity", "residual": R}
     # Define default flow function
     if flow_func is None:
         flow_func = default_flow_func
@@ -189,7 +189,7 @@ def all_node_cuts(G, k=None, flow_func=None):
                         cutset.update((u, w) for w in original_H_pred[u] if w not in S)
                     # The edges in H that form the cutset are internal edges
                     # (ie edges that represent a node of the original graph G)
-                    if any([H_nodes[u]["id"] != H_nodes[w]["id"] for u, w in cutset]):
+                    if any(H_nodes[u]["id"] != H_nodes[w]["id"] for u, w in cutset):
                         continue
                     node_cut = {H_nodes[u]["id"] for u, _ in cutset}
 
diff --git a/networkx/algorithms/connectivity/tests/test_connectivity.py b/networkx/algorithms/connectivity/tests/test_connectivity.py
index a13aa07..7aef247 100644
--- a/networkx/algorithms/connectivity/tests/test_connectivity.py
+++ b/networkx/algorithms/connectivity/tests/test_connectivity.py
@@ -47,7 +47,7 @@ def test_average_connectivity():
     G2.add_edges_from([(1, 3), (1, 4), (0, 3), (0, 4), (3, 4)])
     G3 = nx.Graph()
     for flow_func in flow_funcs:
-        kwargs = dict(flow_func=flow_func)
+        kwargs = {"flow_func": flow_func}
         errmsg = f"Assertion failed in function: {flow_func.__name__}"
         assert nx.average_node_connectivity(G1, **kwargs) == 1, errmsg
         assert nx.average_node_connectivity(G2, **kwargs) == 2.2, errmsg
@@ -98,7 +98,7 @@ def test_brandes_erlebach():
         ]
     )
     for flow_func in flow_funcs:
-        kwargs = dict(flow_func=flow_func)
+        kwargs = {"flow_func": flow_func}
         errmsg = f"Assertion failed in function: {flow_func.__name__}"
         assert 3 == local_edge_connectivity(G, 1, 11, **kwargs), errmsg
         assert 3 == nx.edge_connectivity(G, 1, 11, **kwargs), errmsg
diff --git a/networkx/algorithms/connectivity/tests/test_cuts.py b/networkx/algorithms/connectivity/tests/test_cuts.py
index c4af519..7a485be 100644
--- a/networkx/algorithms/connectivity/tests/test_cuts.py
+++ b/networkx/algorithms/connectivity/tests/test_cuts.py
@@ -70,7 +70,7 @@ def test_brandes_erlebach_book():
         ]
     )
     for flow_func in flow_funcs:
-        kwargs = dict(flow_func=flow_func)
+        kwargs = {"flow_func": flow_func}
         errmsg = f"Assertion failed in function: {flow_func.__name__}"
         # edge cutsets
         assert 3 == len(nx.minimum_edge_cut(G, 1, 11, **kwargs)), errmsg
@@ -104,7 +104,7 @@ def test_white_harary_paper():
     for i in range(7, 10):
         G.add_edge(0, i)
     for flow_func in flow_funcs:
-        kwargs = dict(flow_func=flow_func)
+        kwargs = {"flow_func": flow_func}
         errmsg = f"Assertion failed in function: {flow_func.__name__}"
         # edge cuts
         edge_cut = nx.minimum_edge_cut(G, **kwargs)
@@ -123,7 +123,7 @@ def test_white_harary_paper():
 def test_petersen_cutset():
     G = nx.petersen_graph()
     for flow_func in flow_funcs:
-        kwargs = dict(flow_func=flow_func)
+        kwargs = {"flow_func": flow_func}
         errmsg = f"Assertion failed in function: {flow_func.__name__}"
         # edge cuts
         edge_cut = nx.minimum_edge_cut(G, **kwargs)
@@ -142,7 +142,7 @@ def test_petersen_cutset():
 def test_octahedral_cutset():
     G = nx.octahedral_graph()
     for flow_func in flow_funcs:
-        kwargs = dict(flow_func=flow_func)
+        kwargs = {"flow_func": flow_func}
         errmsg = f"Assertion failed in function: {flow_func.__name__}"
         # edge cuts
         edge_cut = nx.minimum_edge_cut(G, **kwargs)
@@ -161,7 +161,7 @@ def test_octahedral_cutset():
 def test_icosahedral_cutset():
     G = nx.icosahedral_graph()
     for flow_func in flow_funcs:
-        kwargs = dict(flow_func=flow_func)
+        kwargs = {"flow_func": flow_func}
         errmsg = f"Assertion failed in function: {flow_func.__name__}"
         # edge cuts
         edge_cut = nx.minimum_edge_cut(G, **kwargs)
diff --git a/networkx/algorithms/connectivity/tests/test_disjoint_paths.py b/networkx/algorithms/connectivity/tests/test_disjoint_paths.py
index 74bb3f2..0c0fad9 100644
--- a/networkx/algorithms/connectivity/tests/test_disjoint_paths.py
+++ b/networkx/algorithms/connectivity/tests/test_disjoint_paths.py
@@ -67,7 +67,7 @@ def test_graph_from_pr_2053():
         ]
     )
     for flow_func in flow_funcs:
-        kwargs = dict(flow_func=flow_func)
+        kwargs = {"flow_func": flow_func}
         errmsg = f"Assertion failed in function: {flow_func.__name__}"
         # edge disjoint paths
         edge_paths = list(nx.edge_disjoint_paths(G, "A", "Z", **kwargs))
@@ -82,7 +82,7 @@ def test_graph_from_pr_2053():
 def test_florentine_families():
     G = nx.florentine_families_graph()
     for flow_func in flow_funcs:
-        kwargs = dict(flow_func=flow_func)
+        kwargs = {"flow_func": flow_func}
         errmsg = f"Assertion failed in function: {flow_func.__name__}"
         # edge disjoint paths
         edge_dpaths = list(nx.edge_disjoint_paths(G, "Medici", "Strozzi", **kwargs))
@@ -97,7 +97,7 @@ def test_florentine_families():
 def test_karate():
     G = nx.karate_club_graph()
     for flow_func in flow_funcs:
-        kwargs = dict(flow_func=flow_func)
+        kwargs = {"flow_func": flow_func}
         errmsg = f"Assertion failed in function: {flow_func.__name__}"
         # edge disjoint paths
         edge_dpaths = list(nx.edge_disjoint_paths(G, 0, 33, **kwargs))
@@ -112,7 +112,7 @@ def test_karate():
 def test_petersen_disjoint_paths():
     G = nx.petersen_graph()
     for flow_func in flow_funcs:
-        kwargs = dict(flow_func=flow_func)
+        kwargs = {"flow_func": flow_func}
         errmsg = f"Assertion failed in function: {flow_func.__name__}"
         # edge disjoint paths
         edge_dpaths = list(nx.edge_disjoint_paths(G, 0, 6, **kwargs))
@@ -127,7 +127,7 @@ def test_petersen_disjoint_paths():
 def test_octahedral_disjoint_paths():
     G = nx.octahedral_graph()
     for flow_func in flow_funcs:
-        kwargs = dict(flow_func=flow_func)
+        kwargs = {"flow_func": flow_func}
         errmsg = f"Assertion failed in function: {flow_func.__name__}"
         # edge disjoint paths
         edge_dpaths = list(nx.edge_disjoint_paths(G, 0, 5, **kwargs))
@@ -142,7 +142,7 @@ def test_octahedral_disjoint_paths():
 def test_icosahedral_disjoint_paths():
     G = nx.icosahedral_graph()
     for flow_func in flow_funcs:
-        kwargs = dict(flow_func=flow_func)
+        kwargs = {"flow_func": flow_func}
         errmsg = f"Assertion failed in function: {flow_func.__name__}"
         # edge disjoint paths
         edge_dpaths = list(nx.edge_disjoint_paths(G, 0, 6, **kwargs))
@@ -157,7 +157,7 @@ def test_icosahedral_disjoint_paths():
 def test_cutoff_disjoint_paths():
     G = nx.icosahedral_graph()
     for flow_func in flow_funcs:
-        kwargs = dict(flow_func=flow_func)
+        kwargs = {"flow_func": flow_func}
         errmsg = f"Assertion failed in function: {flow_func.__name__}"
         for cutoff in [2, 4]:
             kwargs["cutoff"] = cutoff
diff --git a/networkx/algorithms/connectivity/tests/test_kcutsets.py b/networkx/algorithms/connectivity/tests/test_kcutsets.py
index 91426f1..d5b3b08 100644
--- a/networkx/algorithms/connectivity/tests/test_kcutsets.py
+++ b/networkx/algorithms/connectivity/tests/test_kcutsets.py
@@ -186,7 +186,7 @@ def test_articulation_points():
     Ggen = _generate_no_biconnected()
     for i in range(1):  # change 1 to 3 or more for more realizations.
         G = next(Ggen)
-        articulation_points = list({a} for a in nx.articulation_points(G))
+        articulation_points = [{a} for a in nx.articulation_points(G)]
         for cut in nx.all_node_cuts(G):
             assert cut in articulation_points
 
@@ -241,7 +241,6 @@ def test_non_repeated_cuts():
     solution = [{32, 33}, {2, 33}, {0, 3}, {0, 1}, {29, 33}]
     cuts = list(nx.all_node_cuts(G))
     if len(solution) != len(cuts):
-        print(nx.info(G))
         print(f"Solution: {solution}")
         print(f"Result: {cuts}")
     assert len(solution) == len(cuts)
diff --git a/networkx/algorithms/connectivity/utils.py b/networkx/algorithms/connectivity/utils.py
index 06d2fdc..96ebb2a 100644
--- a/networkx/algorithms/connectivity/utils.py
+++ b/networkx/algorithms/connectivity/utils.py
@@ -47,7 +47,7 @@ def build_auxiliary_node_connectivity(G):
         H.add_edge(f"{i}A", f"{i}B", capacity=1)
 
     edges = []
-    for (source, target) in G.edges():
+    for source, target in G.edges():
         edges.append((f"{mapping[source]}B", f"{mapping[target]}A"))
         if not directed:
             edges.append((f"{mapping[target]}B", f"{mapping[source]}A"))
@@ -80,6 +80,6 @@ def build_auxiliary_edge_connectivity(G):
     else:
         H = nx.DiGraph()
         H.add_nodes_from(G.nodes())
-        for (source, target) in G.edges():
+        for source, target in G.edges():
             H.add_edges_from([(source, target), (target, source)], capacity=1)
         return H
diff --git a/networkx/algorithms/core.py b/networkx/algorithms/core.py
index e39eb84..0ec1a44 100644
--- a/networkx/algorithms/core.py
+++ b/networkx/algorithms/core.py
@@ -34,7 +34,6 @@ from networkx.utils import not_implemented_for
 
 __all__ = [
     "core_number",
-    "find_cores",
     "k_core",
     "k_shell",
     "k_crust",
@@ -44,6 +43,7 @@ __all__ = [
 ]
 
 
+@nx._dispatch
 @not_implemented_for("multigraph")
 def core_number(G):
     """Returns the core number for each vertex.
@@ -115,18 +115,6 @@ def core_number(G):
     return core
 
 
-def find_cores(G):
-    import warnings
-
-    msg = (
-        "\nfind_cores is deprecated as of version 2.7 and will be removed "
-        "in version 3.0.\n"
-        "The find_cores function is renamed core_number\n"
-    )
-    warnings.warn(msg, DeprecationWarning, stacklevel=2)
-    return nx.core_number(G)
-
-
 def _core_subgraph(G, k_filter, k=None, core=None):
     """Returns the subgraph induced by nodes passing filter `k_filter`.
 
@@ -154,6 +142,7 @@ def _core_subgraph(G, k_filter, k=None, core=None):
     return G.subgraph(nodes).copy()
 
 
+@nx._dispatch
 def k_core(G, k=None, core_number=None):
     """Returns the k-core of G.
 
@@ -378,6 +367,7 @@ def k_corona(G, k, core_number=None):
     return _core_subgraph(G, func, k, core_number)
 
 
+@nx._dispatch
 @not_implemented_for("directed")
 @not_implemented_for("multigraph")
 def k_truss(G, k):
@@ -402,8 +392,8 @@ def k_truss(G, k):
     ------
     NetworkXError
 
-      The k-truss is not defined for graphs with self loops or parallel edges
-      or directed graphs.
+      The k-truss is not defined for graphs with self loops, directed graphs
+      and multigraphs.
 
     Notes
     -----
@@ -426,6 +416,13 @@ def k_truss(G, k):
     .. [2] Trusses: Cohesive Subgraphs for Social Network Analysis. Jonathan
        Cohen, 2005.
     """
+    if nx.number_of_selfloops(G) > 0:
+        msg = (
+            "Input graph has self loops which is not permitted; "
+            "Consider using G.remove_edges_from(nx.selfloop_edges(G))."
+        )
+        raise NetworkXError(msg)
+
     H = G.copy()
 
     n_dropped = 1
@@ -511,7 +508,7 @@ def onion_layers(G):
     current_core = 1
     current_layer = 1
     # Sets vertices of degree 0 to layer 1, if any.
-    isolated_nodes = [v for v in nx.isolates(G)]
+    isolated_nodes = list(nx.isolates(G))
     if len(isolated_nodes) > 0:
         for v in isolated_nodes:
             od_layers[v] = current_layer
diff --git a/networkx/algorithms/cuts.py b/networkx/algorithms/cuts.py
index ae1cb02..d225996 100644
--- a/networkx/algorithms/cuts.py
+++ b/networkx/algorithms/cuts.py
@@ -21,6 +21,7 @@ __all__ = [
 # TODO STILL NEED TO UPDATE ALL THE DOCUMENTATION!
 
 
+@nx._dispatch
 def cut_size(G, S, T=None, weight=None):
     """Returns the size of the cut between two sets of nodes.
 
@@ -83,6 +84,7 @@ def cut_size(G, S, T=None, weight=None):
     return sum(weight for u, v, weight in edges)
 
 
+@nx._dispatch
 def volume(G, S, weight=None):
     """Returns the volume of a set of nodes.
 
@@ -125,6 +127,7 @@ def volume(G, S, weight=None):
     return sum(d for v, d in degree(S, weight=weight))
 
 
+@nx._dispatch
 def normalized_cut_size(G, S, T=None, weight=None):
     """Returns the normalized size of the cut between two sets of nodes.
 
@@ -177,6 +180,7 @@ def normalized_cut_size(G, S, T=None, weight=None):
     return num_cut_edges * ((1 / volume_S) + (1 / volume_T))
 
 
+@nx._dispatch
 def conductance(G, S, T=None, weight=None):
     """Returns the conductance of two sets of nodes.
 
@@ -224,6 +228,7 @@ def conductance(G, S, T=None, weight=None):
     return num_cut_edges / min(volume_S, volume_T)
 
 
+@nx._dispatch
 def edge_expansion(G, S, T=None, weight=None):
     """Returns the edge expansion between two node sets.
 
@@ -270,6 +275,7 @@ def edge_expansion(G, S, T=None, weight=None):
     return num_cut_edges / min(len(S), len(T))
 
 
+@nx._dispatch
 def mixing_expansion(G, S, T=None, weight=None):
     """Returns the mixing expansion between two node sets.
 
@@ -317,6 +323,7 @@ def mixing_expansion(G, S, T=None, weight=None):
 
 # TODO What is the generalization to two arguments, S and T? Does the
 # denominator become `min(len(S), len(T))`?
+@nx._dispatch
 def node_expansion(G, S):
     """Returns the node expansion of the set `S`.
 
@@ -356,6 +363,7 @@ def node_expansion(G, S):
 
 # TODO What is the generalization to two arguments, S and T? Does the
 # denominator become `min(len(S), len(T))`?
+@nx._dispatch
 def boundary_expansion(G, S):
     """Returns the boundary expansion of the set `S`.
 
diff --git a/networkx/algorithms/cycles.py b/networkx/algorithms/cycles.py
index 48d32ae..6c5f7d4 100644
--- a/networkx/algorithms/cycles.py
+++ b/networkx/algorithms/cycles.py
@@ -5,6 +5,7 @@ Cycle finding algorithms
 """
 
 from collections import defaultdict
+from itertools import combinations, product
 
 import networkx as nx
 from networkx.utils import not_implemented_for, pairwise
@@ -15,6 +16,7 @@ __all__ = [
     "recursive_simple_cycles",
     "find_cycle",
     "minimum_cycle_basis",
+    "chordless_cycles",
 ]
 
 
@@ -95,22 +97,44 @@ def cycle_basis(G, root=None):
     return cycles
 
 
-@not_implemented_for("undirected")
-def simple_cycles(G):
-    """Find simple cycles (elementary circuits) of a directed graph.
+def simple_cycles(G, length_bound=None):
+    """Find simple cycles (elementary circuits) of a graph.
 
     A `simple cycle`, or `elementary circuit`, is a closed path where
-    no node appears twice. Two elementary circuits are distinct if they
-    are not cyclic permutations of each other.
-
-    This is a nonrecursive, iterator/generator version of Johnson's
-    algorithm [1]_.  There may be better algorithms for some cases [2]_ [3]_.
+    no node appears twice.  In a directed graph, two simple cycles are distinct
+    if they are not cyclic permutations of each other.  In an undirected graph,
+    two simple cycles are distinct if they are not cyclic permutations of each
+    other nor of the other's reversal.
+
+    Optionally, the cycles are bounded in length.  In the unbounded case, we use
+    a nonrecursive, iterator/generator version of Johnson's algorithm [1]_.  In
+    the bounded case, we use a version of the algorithm of Gupta and
+    Suzumura[2]_. There may be better algorithms for some cases [3]_ [4]_ [5]_.
+
+    The algorithms of Johnson, and Gupta and Suzumura, are enhanced by some
+    well-known preprocessing techniques.  When G is directed, we restrict our
+    attention to strongly connected components of G, generate all simple cycles
+    containing a certain node, remove that node, and further decompose the
+    remainder into strongly connected components.  When G is undirected, we
+    restrict our attention to biconnected components, generate all simple cycles
+    containing a particular edge, remove that edge, and further decompose the
+    remainder into biconnected components.
+
+    Note that multigraphs are supported by this function -- and in undirected
+    multigraphs, a pair of parallel edges is considered a cycle of length 2.
+    Likewise, self-loops are considered to be cycles of length 1.  We define
+    cycles as sequences of nodes; so the presence of loops and parallel edges
+    does not change the number of simple cycles in a graph.
 
     Parameters
     ----------
     G : NetworkX DiGraph
        A directed graph
 
+    length_bound : int or None, optional (default=None)
+       If length_bound is an int, generate all simple cycles of G with length at
+       most length_bound.  Otherwise, generate all simple cycles of G.
+
     Yields
     ------
     list of nodes
@@ -134,92 +158,602 @@ def simple_cycles(G):
 
     Notes
     -----
-    The implementation follows pp. 79-80 in [1]_.
+    When length_bound is None, the time complexity is $O((n+e)(c+1))$ for $n$
+    nodes, $e$ edges and $c$ simple circuits.  Otherwise, when length_bound > 1,
+    the time complexity is $O((c+n)(k-1)d^k)$ where $d$ is the average degree of
+    the nodes of G and $k$ = length_bound.
 
-    The time complexity is $O((n+e)(c+1))$ for $n$ nodes, $e$ edges and $c$
-    elementary circuits.
+    Raises
+    ------
+    ValueError
+        when length_bound < 0.
 
     References
     ----------
     .. [1] Finding all the elementary circuits of a directed graph.
        D. B. Johnson, SIAM Journal on Computing 4, no. 1, 77-84, 1975.
        https://doi.org/10.1137/0204007
-    .. [2] Enumerating the cycles of a digraph: a new preprocessing strategy.
+    .. [2] Finding All Bounded-Length Simple Cycles in a Directed Graph
+       A. Gupta and T. Suzumura https://arxiv.org/abs/2105.10094
+    .. [3] Enumerating the cycles of a digraph: a new preprocessing strategy.
        G. Loizou and P. Thanish, Information Sciences, v. 27, 163-182, 1982.
-    .. [3] A search strategy for the elementary cycles of a directed graph.
+    .. [4] A search strategy for the elementary cycles of a directed graph.
        J.L. Szwarcfiter and P.E. Lauer, BIT NUMERICAL MATHEMATICS,
        v. 16, no. 2, 192-204, 1976.
+    .. [5] Optimal Listing of Cycles and st-Paths in Undirected Graphs
+        R. Ferreira and R. Grossi and A. Marino and N. Pisanti and R. Rizzi and
+        G. Sacomoto https://arxiv.org/abs/1205.2766
 
     See Also
     --------
     cycle_basis
+    chordless_cycles
     """
 
-    def _unblock(thisnode, blocked, B):
-        stack = {thisnode}
-        while stack:
-            node = stack.pop()
-            if node in blocked:
-                blocked.remove(node)
-                stack.update(B[node])
-                B[node].clear()
+    if length_bound is not None:
+        if length_bound == 0:
+            return
+        elif length_bound < 0:
+            raise ValueError("length bound must be non-negative")
 
-    # Johnson's algorithm requires some ordering of the nodes.
-    # We assign the arbitrary ordering given by the strongly connected comps
-    # There is no need to track the ordering as each node removed as processed.
-    # Also we save the actual graph so we can mutate it. We only take the
-    # edges because we do not want to copy edge and node attributes here.
-    subG = type(G)(G.edges())
-    sccs = [scc for scc in nx.strongly_connected_components(subG) if len(scc) > 1]
+    directed = G.is_directed()
+    yield from ([v] for v, Gv in G.adj.items() if v in Gv)
 
-    # Johnson's algorithm exclude self cycle edges like (v, v)
-    # To be backward compatible, we record those cycles in advance
-    # and then remove from subG
-    for v in subG:
-        if subG.has_edge(v, v):
-            yield [v]
-            subG.remove_edge(v, v)
-
-    while sccs:
-        scc = sccs.pop()
-        sccG = subG.subgraph(scc)
-        # order of scc determines ordering of nodes
-        startnode = scc.pop()
-        # Processing node runs "circuit" routine from recursive version
-        path = [startnode]
-        blocked = set()  # vertex: blocked from search?
-        closed = set()  # nodes involved in a cycle
-        blocked.add(startnode)
-        B = defaultdict(set)  # graph portions that yield no elementary circuit
-        stack = [(startnode, list(sccG[startnode]))]  # sccG gives comp nbrs
-        while stack:
-            thisnode, nbrs = stack[-1]
-            if nbrs:
-                nextnode = nbrs.pop()
-                if nextnode == startnode:
-                    yield path[:]
-                    closed.update(path)
-                #                        print "Found a cycle", path, closed
-                elif nextnode not in blocked:
-                    path.append(nextnode)
-                    stack.append((nextnode, list(sccG[nextnode])))
-                    closed.discard(nextnode)
-                    blocked.add(nextnode)
-                    continue
-            # done with nextnode... look for more neighbors
-            if not nbrs:  # no more nbrs
-                if thisnode in closed:
-                    _unblock(thisnode, blocked, B)
+    if length_bound is not None and length_bound == 1:
+        return
+
+    if G.is_multigraph() and not directed:
+        visited = set()
+        for u, Gu in G.adj.items():
+            multiplicity = ((v, len(Guv)) for v, Guv in Gu.items() if v in visited)
+            yield from ([u, v] for v, m in multiplicity if m > 1)
+            visited.add(u)
+
+    # explicitly filter out loops; implicitly filter out parallel edges
+    if directed:
+        G = nx.DiGraph((u, v) for u, Gu in G.adj.items() for v in Gu if v != u)
+    else:
+        G = nx.Graph((u, v) for u, Gu in G.adj.items() for v in Gu if v != u)
+
+    # this case is not strictly necessary but improves performance
+    if length_bound is not None and length_bound == 2:
+        if directed:
+            visited = set()
+            for u, Gu in G.adj.items():
+                yield from (
+                    [v, u] for v in visited.intersection(Gu) if G.has_edge(v, u)
+                )
+                visited.add(u)
+        return
+
+    if directed:
+        yield from _directed_cycle_search(G, length_bound)
+    else:
+        yield from _undirected_cycle_search(G, length_bound)
+
+
+def _directed_cycle_search(G, length_bound):
+    """A dispatch function for `simple_cycles` for directed graphs.
+
+    We generate all cycles of G through binary partition.
+
+        1. Pick a node v in G which belongs to at least one cycle
+            a. Generate all cycles of G which contain the node v.
+            b. Recursively generate all cycles of G \\ v.
+
+    This is accomplished through the following:
+
+        1. Compute the strongly connected components SCC of G.
+        2. Select and remove a biconnected component C from BCC.  Select a
+           non-tree edge (u, v) of a depth-first search of G[C].
+        3. For each simple cycle P containing v in G[C], yield P.
+        4. Add the biconnected components of G[C \\ v] to BCC.
+
+    If the parameter length_bound is not None, then step 3 will be limited to
+    simple cycles of length at most length_bound.
+
+    Parameters
+    ----------
+    G : NetworkX DiGraph
+       A directed graph
+
+    length_bound : int or None
+       If length_bound is an int, generate all simple cycles of G with length at most length_bound.
+       Otherwise, generate all simple cycles of G.
+
+    Yields
+    ------
+    list of nodes
+       Each cycle is represented by a list of nodes along the cycle.
+    """
+
+    scc = nx.strongly_connected_components
+    components = [c for c in scc(G) if len(c) >= 2]
+    while components:
+        c = components.pop()
+        Gc = G.subgraph(c)
+        v = next(iter(c))
+        if length_bound is None:
+            yield from _johnson_cycle_search(Gc, [v])
+        else:
+            yield from _bounded_cycle_search(Gc, [v], length_bound)
+        # delete v after searching G, to make sure we can find v
+        G.remove_node(v)
+        components.extend(c for c in scc(Gc) if len(c) >= 2)
+
+
+def _undirected_cycle_search(G, length_bound):
+    """A dispatch function for `simple_cycles` for undirected graphs.
+
+    We generate all cycles of G through binary partition.
+
+        1. Pick an edge (u, v) in G which belongs to at least one cycle
+            a. Generate all cycles of G which contain the edge (u, v)
+            b. Recursively generate all cycles of G \\ (u, v)
+
+    This is accomplished through the following:
+
+        1. Compute the biconnected components BCC of G.
+        2. Select and remove a biconnected component C from BCC.  Select a
+           non-tree edge (u, v) of a depth-first search of G[C].
+        3. For each (v -> u) path P remaining in G[C] \\ (u, v), yield P.
+        4. Add the biconnected components of G[C] \\ (u, v) to BCC.
+
+    If the parameter length_bound is not None, then step 3 will be limited to simple paths
+    of length at most length_bound.
+
+    Parameters
+    ----------
+    G : NetworkX Graph
+       An undirected graph
+
+    length_bound : int or None
+       If length_bound is an int, generate all simple cycles of G with length at most length_bound.
+       Otherwise, generate all simple cycles of G.
+
+    Yields
+    ------
+    list of nodes
+       Each cycle is represented by a list of nodes along the cycle.
+    """
+
+    bcc = nx.biconnected_components
+    components = [c for c in bcc(G) if len(c) >= 3]
+    while components:
+        c = components.pop()
+        Gc = G.subgraph(c)
+        uv = list(next(iter(Gc.edges)))
+        G.remove_edge(*uv)
+        # delete (u, v) before searching G, to avoid fake 3-cycles [u, v, u]
+        if length_bound is None:
+            yield from _johnson_cycle_search(Gc, uv)
+        else:
+            yield from _bounded_cycle_search(Gc, uv, length_bound)
+        components.extend(c for c in bcc(Gc) if len(c) >= 3)
+
+
+class _NeighborhoodCache(dict):
+    """Very lightweight graph wrapper which caches neighborhoods as list.
+
+    This dict subclass uses the __missing__ functionality to query graphs for
+    their neighborhoods, and store the result as a list.  This is used to avoid
+    the performance penalty incurred by subgraph views.
+    """
+
+    def __init__(self, G):
+        self.G = G
+
+    def __missing__(self, v):
+        Gv = self[v] = list(self.G[v])
+        return Gv
+
+
+def _johnson_cycle_search(G, path):
+    """The main loop of the cycle-enumeration algorithm of Johnson.
+
+    Parameters
+    ----------
+    G : NetworkX Graph or DiGraph
+       A graph
+
+    path : list
+       A cycle prefix.  All cycles generated will begin with this prefix.
+
+    Yields
+    ------
+    list of nodes
+       Each cycle is represented by a list of nodes along the cycle.
+
+    References
+    ----------
+        .. [1] Finding all the elementary circuits of a directed graph.
+       D. B. Johnson, SIAM Journal on Computing 4, no. 1, 77-84, 1975.
+       https://doi.org/10.1137/0204007
+
+    """
+
+    G = _NeighborhoodCache(G)
+    blocked = set(path)
+    B = defaultdict(set)  # graph portions that yield no elementary circuit
+    start = path[0]
+    stack = [iter(G[path[-1]])]
+    closed = [False]
+    while stack:
+        nbrs = stack[-1]
+        for w in nbrs:
+            if w == start:
+                yield path[:]
+                closed[-1] = True
+            elif w not in blocked:
+                path.append(w)
+                closed.append(False)
+                stack.append(iter(G[w]))
+                blocked.add(w)
+                break
+        else:  # no more nbrs
+            stack.pop()
+            v = path.pop()
+            if closed.pop():
+                if closed:
+                    closed[-1] = True
+                unblock_stack = {v}
+                while unblock_stack:
+                    u = unblock_stack.pop()
+                    if u in blocked:
+                        blocked.remove(u)
+                        unblock_stack.update(B[u])
+                        B[u].clear()
+            else:
+                for w in G[v]:
+                    B[w].add(v)
+
+
+def _bounded_cycle_search(G, path, length_bound):
+    """The main loop of the cycle-enumeration algorithm of Gupta and Suzumura.
+
+    Parameters
+    ----------
+    G : NetworkX Graph or DiGraph
+       A graph
+
+    path : list
+       A cycle prefix.  All cycles generated will begin with this prefix.
+
+    length_bound: int
+        A length bound.  All cycles generated will have length at most length_bound.
+
+    Yields
+    ------
+    list of nodes
+       Each cycle is represented by a list of nodes along the cycle.
+
+    References
+    ----------
+    .. [1] Finding All Bounded-Length Simple Cycles in a Directed Graph
+       A. Gupta and T. Suzumura https://arxiv.org/abs/2105.10094
+
+    """
+    G = _NeighborhoodCache(G)
+    lock = {v: 0 for v in path}
+    B = defaultdict(set)
+    start = path[0]
+    stack = [iter(G[path[-1]])]
+    blen = [length_bound]
+    while stack:
+        nbrs = stack[-1]
+        for w in nbrs:
+            if w == start:
+                yield path[:]
+                blen[-1] = 1
+            elif len(path) < lock.get(w, length_bound):
+                path.append(w)
+                blen.append(length_bound)
+                lock[w] = len(path)
+                stack.append(iter(G[w]))
+                break
+        else:
+            stack.pop()
+            v = path.pop()
+            bl = blen.pop()
+            if blen:
+                blen[-1] = min(blen[-1], bl)
+            if bl < length_bound:
+                relax_stack = [(bl, v)]
+                while relax_stack:
+                    bl, u = relax_stack.pop()
+                    if lock.get(u, length_bound) < length_bound - bl + 1:
+                        lock[u] = length_bound - bl + 1
+                        relax_stack.extend((bl + 1, w) for w in B[u].difference(path))
+            else:
+                for w in G[v]:
+                    B[w].add(v)
+
+
+def chordless_cycles(G, length_bound=None):
+    """Find simple chordless cycles of a graph.
+
+    A `simple cycle` is a closed path where no node appears twice.  In a simple
+    cycle, a `chord` is an additional edge between two nodes in the cycle.  A
+    `chordless cycle` is a simple cycle without chords.  Said differently, a
+    chordless cycle is a cycle C in a graph G where the number of edges in the
+    induced graph G[C] is equal to the length of `C`.
+
+    Note that some care must be taken in the case that G is not a simple graph
+    nor a simple digraph.  Some authors limit the definition of chordless cycles
+    to have a prescribed minimum length; we do not.
+
+        1. We interpret self-loops to be chordless cycles, except in multigraphs
+           with multiple loops in parallel.  Likewise, in a chordless cycle of
+           length greater than 1, there can be no nodes with self-loops.
+
+        2. We interpret directed two-cycles to be chordless cycles, except in
+           multi-digraphs when any edge in a two-cycle has a parallel copy.
+
+        3. We interpret parallel pairs of undirected edges as two-cycles, except
+           when a third (or more) parallel edge exists between the two nodes.
+
+        4. Generalizing the above, edges with parallel clones may not occur in
+           chordless cycles.
+
+    In a directed graph, two chordless cycles are distinct if they are not
+    cyclic permutations of each other.  In an undirected graph, two chordless
+    cycles are distinct if they are not cyclic permutations of each other nor of
+    the other's reversal.
+
+    Optionally, the cycles are bounded in length.
+
+    We use an algorithm strongly inspired by that of Dias et al [1]_.  It has
+    been modified in the following ways:
+
+        1. Recursion is avoided, per Python's limitations
+
+        2. The labeling function is not necessary, because the starting paths
+            are chosen (and deleted from the host graph) to prevent multiple
+            occurrences of the same path
+
+        3. The search is optionally bounded at a specified length
+
+        4. Support for directed graphs is provided by extending cycles along
+            forward edges, and blocking nodes along forward and reverse edges
+
+        5. Support for multigraphs is provided by omitting digons from the set
+            of forward edges
+
+    Parameters
+    ----------
+    G : NetworkX DiGraph
+       A directed graph
+
+    length_bound : int or None, optional (default=None)
+       If length_bound is an int, generate all simple cycles of G with length at
+       most length_bound.  Otherwise, generate all simple cycles of G.
+
+    Yields
+    ------
+    list of nodes
+       Each cycle is represented by a list of nodes along the cycle.
+
+    Examples
+    --------
+    >>> sorted(list(nx.chordless_cycles(nx.complete_graph(4))))
+    [[1, 0, 2], [1, 0, 3], [2, 0, 3], [2, 1, 3]]
+
+    Notes
+    -----
+    When length_bound is None, and the graph is simple, the time complexity is
+    $O((n+e)(c+1))$ for $n$ nodes, $e$ edges and $c$ chordless cycles.
+
+    Raises
+    ------
+    ValueError
+        when length_bound < 0.
+
+    References
+    ----------
+    .. [1] Efficient enumeration of chordless cycles
+       E. Dias and D. Castonguay and H. Longo and W.A.R. Jradi
+       https://arxiv.org/abs/1309.1051
+
+    See Also
+    --------
+    simple_cycles
+    """
+
+    if length_bound is not None:
+        if length_bound == 0:
+            return
+        elif length_bound < 0:
+            raise ValueError("length bound must be non-negative")
+
+    directed = G.is_directed()
+    multigraph = G.is_multigraph()
+
+    if multigraph:
+        yield from ([v] for v, Gv in G.adj.items() if len(Gv.get(v, ())) == 1)
+    else:
+        yield from ([v] for v, Gv in G.adj.items() if v in Gv)
+
+    if length_bound is not None and length_bound == 1:
+        return
+
+    # Nodes with loops cannot belong to longer cycles.  Let's delete them here.
+    # also, we implicitly reduce the multiplicity of edges down to 1 in the case
+    # of multiedges.
+    if directed:
+        F = nx.DiGraph((u, v) for u, Gu in G.adj.items() if u not in Gu for v in Gu)
+        B = F.to_undirected(as_view=False)
+    else:
+        F = nx.Graph((u, v) for u, Gu in G.adj.items() if u not in Gu for v in Gu)
+        B = None
+
+    # If we're given a multigraph, we have a few cases to consider with parallel
+    # edges.
+    #
+    # 1. If we have 2 or more edges in parallel between the nodes (u, v), we
+    #    must not construct longer cycles along (u, v).
+    # 2. If G is not directed, then a pair of parallel edges between (u, v) is a
+    #    chordless cycle unless there exists a third (or more) parallel edge.
+    # 3. If G is directed, then parallel edges do not form cyles, but do
+    #    preclude back-edges from forming cycles (handled in the next section),
+    #    Thus, if an edge (u, v) is duplicated and the reverse (v, u) is also
+    #    present, then we remove both from F.
+    #
+    # In directed graphs, we need to consider both directions that edges can
+    # take, so iterate over all edges (u, v) and possibly (v, u).  In undirected
+    # graphs, we need to be a little careful to only consider every edge once,
+    # so we use a "visited" set to emulate node-order comparisons.
+
+    if multigraph:
+        if not directed:
+            B = F.copy()
+            visited = set()
+        for u, Gu in G.adj.items():
+            if directed:
+                multiplicity = ((v, len(Guv)) for v, Guv in Gu.items())
+                for v, m in multiplicity:
+                    if m > 1:
+                        F.remove_edges_from(((u, v), (v, u)))
+            else:
+                multiplicity = ((v, len(Guv)) for v, Guv in Gu.items() if v in visited)
+                for v, m in multiplicity:
+                    if m == 2:
+                        yield [u, v]
+                    if m > 1:
+                        F.remove_edge(u, v)
+                visited.add(u)
+
+    # If we're given a directed graphs, we need to think about digons.  If we
+    # have two edges (u, v) and (v, u), then that's a two-cycle.  If either edge
+    # was duplicated above, then we removed both from F.  So, any digons we find
+    # here are chordless.  After finding digons, we remove their edges from F
+    # to avoid traversing them in the search for chordless cycles.
+    if directed:
+        for u, Fu in F.adj.items():
+            digons = [[u, v] for v in Fu if F.has_edge(v, u)]
+            yield from digons
+            F.remove_edges_from(digons)
+            F.remove_edges_from(e[::-1] for e in digons)
+
+    if length_bound is not None and length_bound == 2:
+        return
+
+    # Now, we prepare to search for cycles.  We have removed all cycles of
+    # lengths 1 and 2, so F is a simple graph or simple digraph.  We repeatedly
+    # separate digraphs into their strongly connected components, and undirected
+    # graphs into their biconnected components.  For each component, we pick a
+    # node v, search for chordless cycles based at each "stem" (u, v, w), and
+    # then remove v from that component before separating the graph again.
+    if directed:
+        separate = nx.strongly_connected_components
+
+        # Directed stems look like (u -> v -> w), so we use the product of
+        # predecessors of v with successors of v.
+        def stems(C, v):
+            for u, w in product(C.pred[v], C.succ[v]):
+                if not G.has_edge(u, w):  # omit stems with acyclic chords
+                    yield [u, v, w], F.has_edge(w, u)
+
+    else:
+        separate = nx.biconnected_components
+
+        # Undirected stems look like (u ~ v ~ w), but we must not also search
+        # (w ~ v ~ u), so we use combinations of v's neighbors of length 2.
+        def stems(C, v):
+            yield from (([u, v, w], F.has_edge(w, u)) for u, w in combinations(C[v], 2))
+
+    components = [c for c in separate(F) if len(c) > 2]
+    while components:
+        c = components.pop()
+        v = next(iter(c))
+        Fc = F.subgraph(c)
+        Fcc = Bcc = None
+        for S, is_triangle in stems(Fc, v):
+            if is_triangle:
+                yield S
+            else:
+                if Fcc is None:
+                    Fcc = _NeighborhoodCache(Fc)
+                    Bcc = Fcc if B is None else _NeighborhoodCache(B.subgraph(c))
+                yield from _chordless_cycle_search(Fcc, Bcc, S, length_bound)
+
+        components.extend(c for c in separate(F.subgraph(c - {v})) if len(c) > 2)
+
+
+def _chordless_cycle_search(F, B, path, length_bound):
+    """The main loop for chordless cycle enumeration.
+
+    This algorithm is strongly inspired by that of Dias et al [1]_.  It has been
+    modified in the following ways:
+
+        1. Recursion is avoided, per Python's limitations
+
+        2. The labeling function is not necessary, because the starting paths
+            are chosen (and deleted from the host graph) to prevent multiple
+            occurrences of the same path
+
+        3. The search is optionally bounded at a specified length
+
+        4. Support for directed graphs is provided by extending cycles along
+            forward edges, and blocking nodes along forward and reverse edges
+
+        5. Support for multigraphs is provided by omitting digons from the set
+            of forward edges
+
+    Parameters
+    ----------
+    F : _NeighborhoodCache
+       A graph of forward edges to follow in constructing cycles
+
+    B : _NeighborhoodCache
+       A graph of blocking edges to prevent the production of chordless cycles
+
+    path : list
+       A cycle prefix.  All cycles generated will begin with this prefix.
+
+    length_bound : int
+       A length bound.  All cycles generated will have length at most length_bound.
+
+
+    Yields
+    ------
+    list of nodes
+       Each cycle is represented by a list of nodes along the cycle.
+
+    References
+    ----------
+    .. [1] Efficient enumeration of chordless cycles
+       E. Dias and D. Castonguay and H. Longo and W.A.R. Jradi
+       https://arxiv.org/abs/1309.1051
+
+    """
+    blocked = defaultdict(int)
+    target = path[0]
+    blocked[path[1]] = 1
+    for w in path[1:]:
+        for v in B[w]:
+            blocked[v] += 1
+
+    stack = [iter(F[path[2]])]
+    while stack:
+        nbrs = stack[-1]
+        for w in nbrs:
+            if blocked[w] == 1 and (length_bound is None or len(path) < length_bound):
+                Fw = F[w]
+                if target in Fw:
+                    yield path + [w]
                 else:
-                    for nbr in sccG[thisnode]:
-                        if thisnode not in B[nbr]:
-                            B[nbr].add(thisnode)
-                stack.pop()
-                #                assert path[-1] == thisnode
-                path.pop()
-        # done processing this node
-        H = subG.subgraph(scc)  # make smaller to avoid work in SCC routine
-        sccs.extend(scc for scc in nx.strongly_connected_components(H) if len(scc) > 1)
+                    Bw = B[w]
+                    if target in Bw:
+                        continue
+                    for v in Bw:
+                        blocked[v] += 1
+                    path.append(w)
+                    stack.append(iter(Fw))
+                    break
+        else:
+            stack.pop()
+            for v in B[path.pop()]:
+                blocked[v] -= 1
 
 
 @not_implemented_for("undirected")
@@ -269,6 +803,7 @@ def recursive_simple_cycles(G):
     --------
     simple_cycles, cycle_basis
     """
+
     # Jon Olav Vik, 2010-08-09
     def _unblock(thisnode):
         """Recursively unblock and remove nodes from B[thisnode]."""
@@ -529,7 +1064,7 @@ def minimum_cycle_basis(G, weight=None):
     --------
     simple_cycles, cycle_basis
     """
-    # We first split the graph in commected subgraphs
+    # We first split the graph in connected subgraphs
     return sum(
         (_min_cycle_basis(G.subgraph(c), weight) for c in nx.connected_components(G)),
         [],
diff --git a/networkx/algorithms/d_separation.py b/networkx/algorithms/d_separation.py
index caf26d0..ef54108 100644
--- a/networkx/algorithms/d_separation.py
+++ b/networkx/algorithms/d_separation.py
@@ -11,6 +11,65 @@ The implementation is based on the conceptually simple linear time
 algorithm presented in [2]_.  Refer to [3]_, [4]_ for a couple of
 alternative algorithms.
 
+Here, we provide a brief overview of d-separation and related concepts that
+are relevant for understanding it:
+
+Blocking paths
+--------------
+
+Before we overview, we introduce the following terminology to describe paths:
+
+- "open" path: A path between two nodes that can be traversed
+- "blocked" path: A path between two nodes that cannot be traversed
+
+A **collider** is a triplet of nodes along a path that is like the following:
+``... u -> c <- v ...``), where 'c' is a common successor of ``u`` and ``v``. A path
+through a collider is considered "blocked". When
+a node that is a collider, or a descendant of a collider is included in
+the d-separating set, then the path through that collider node is "open". If the
+path through the collider node is open, then we will call this node an open collider.
+
+The d-separation set blocks the paths between ``u`` and ``v``. If you include colliders,
+or their descendant nodes in the d-separation set, then those colliders will open up,
+enabling a path to be traversed if it is not blocked some other way.
+
+Illustration of D-separation with examples
+------------------------------------------
+
+For a pair of two nodes, ``u`` and ``v``, all paths are considered open if
+there is a path between ``u`` and ``v`` that is not blocked. That means, there is an open
+path between ``u`` and ``v`` that does not encounter a collider, or a variable in the
+d-separating set.
+
+For example, if the d-separating set is the empty set, then the following paths are
+unblocked between ``u`` and ``v``:
+
+- u <- z -> v
+- u -> w -> ... -> z -> v
+
+If for example, 'z' is in the d-separating set, then 'z' blocks those paths
+between ``u`` and ``v``.
+
+Colliders block a path by default if they and their descendants are not included
+in the d-separating set. An example of a path that is blocked when the d-separating
+set is empty is:
+
+- u -> w -> ... -> z <- v
+
+because 'z' is a collider in this path and 'z' is not in the d-separating set. However,
+if 'z' or a descendant of 'z' is included in the d-separating set, then the path through
+the collider at 'z' (... -> z <- ...) is now "open". 
+
+D-separation is concerned with blocking all paths between u and v. Therefore, a
+d-separating set between ``u`` and ``v`` is one where all paths are blocked.
+
+D-separation and its applications in probability
+------------------------------------------------
+
+D-separation is commonly used in probabilistic graphical models. D-separation
+connects the idea of probabilistic "dependence" with separation in a graph. If
+one assumes the causal Markov condition [5]_, then d-separation implies conditional
+independence in probability distributions.
 
 Examples
 --------
@@ -55,6 +114,8 @@ References
 .. [4] Koller, D., & Friedman, N. (2009).
    Probabilistic graphical models: principles and techniques. The MIT Press.
 
+.. [5] https://en.wikipedia.org/wiki/Causal_Markov_condition
+
 """
 
 from collections import deque
@@ -62,7 +123,7 @@ from collections import deque
 import networkx as nx
 from networkx.utils import UnionFind, not_implemented_for
 
-__all__ = ["d_separated"]
+__all__ = ["d_separated", "minimal_d_separator", "is_minimal_d_separator"]
 
 
 @not_implemented_for("undirected")
@@ -100,6 +161,15 @@ def d_separated(G, x, y, z):
         If any of the input nodes are not found in the graph,
         a :exc:`NodeNotFound` exception is raised.
 
+    Notes
+    -----
+    A d-separating set in a DAG is a set of nodes that
+    blocks all paths between the two sets. Nodes in `z`
+    block a path if they are part of the path and are not a collider,
+    or a descendant of a collider. A collider structure along a path
+    is ``... -> c <- ...`` where ``c`` is the collider node.
+
+    https://en.wikipedia.org/wiki/Bayesian_network#d-separation
     """
 
     if not nx.is_directed_acyclic_graph(G):
@@ -140,3 +210,232 @@ def d_separated(G, x, y, z):
         return False
     else:
         return True
+
+
+@not_implemented_for("undirected")
+def minimal_d_separator(G, u, v):
+    """Compute a minimal d-separating set between 'u' and 'v'.
+
+    A d-separating set in a DAG is a set of nodes that blocks all paths
+    between the two nodes, 'u' and 'v'. This function
+    constructs a d-separating set that is "minimal", meaning it is the smallest
+    d-separating set for 'u' and 'v'. This is not necessarily
+    unique. For more details, see Notes.
+
+    Parameters
+    ----------
+    G : graph
+        A networkx DAG.
+    u : node
+        A node in the graph, G.
+    v : node
+        A node in the graph, G.
+
+    Raises
+    ------
+    NetworkXError
+        Raises a :exc:`NetworkXError` if the input graph is not a DAG.
+
+    NodeNotFound
+        If any of the input nodes are not found in the graph,
+        a :exc:`NodeNotFound` exception is raised.
+
+    References
+    ----------
+    .. [1] Tian, J., & Paz, A. (1998). Finding Minimal D-separators.
+
+    Notes
+    -----
+    This function only finds ``a`` minimal d-separator. It does not guarantee
+    uniqueness, since in a DAG there may be more than one minimal d-separator
+    between two nodes. Moreover, this only checks for minimal separators
+    between two nodes, not two sets. Finding minimal d-separators between
+    two sets of nodes is not supported.
+
+    Uses the algorithm presented in [1]_. The complexity of the algorithm
+    is :math:`O(|E_{An}^m|)`, where :math:`|E_{An}^m|` stands for the
+    number of edges in the moralized graph of the sub-graph consisting
+    of only the ancestors of 'u' and 'v'. For full details, see [1]_.
+
+    The algorithm works by constructing the moral graph consisting of just
+    the ancestors of `u` and `v`. Then it constructs a candidate for
+    a separating set  ``Z'`` from the predecessors of `u` and `v`.
+    Then BFS is run starting from `u` and marking nodes
+    found from ``Z'`` and calling those nodes ``Z''``.
+    Then BFS is run again starting from `v` and marking nodes if they are
+    present in ``Z''``. Those marked nodes are the returned minimal
+    d-separating set.
+
+    https://en.wikipedia.org/wiki/Bayesian_network#d-separation
+    """
+    if not nx.is_directed_acyclic_graph(G):
+        raise nx.NetworkXError("graph should be directed acyclic")
+
+    union_uv = {u, v}
+
+    if any(n not in G.nodes for n in union_uv):
+        raise nx.NodeNotFound("one or more specified nodes not found in the graph")
+
+    # first construct the set of ancestors of X and Y
+    x_anc = nx.ancestors(G, u)
+    y_anc = nx.ancestors(G, v)
+    D_anc_xy = x_anc.union(y_anc)
+    D_anc_xy.update((u, v))
+
+    # second, construct the moralization of the subgraph of Anc(X,Y)
+    moral_G = nx.moral_graph(G.subgraph(D_anc_xy))
+
+    # find a separating set Z' in moral_G
+    Z_prime = set(G.predecessors(u)).union(set(G.predecessors(v)))
+
+    # perform BFS on the graph from 'x' to mark
+    Z_dprime = _bfs_with_marks(moral_G, u, Z_prime)
+    Z = _bfs_with_marks(moral_G, v, Z_dprime)
+    return Z
+
+
+@not_implemented_for("undirected")
+def is_minimal_d_separator(G, u, v, z):
+    """Determine if a d-separating set is minimal.
+
+    A d-separating set, `z`, in a DAG is a set of nodes that blocks
+    all paths between the two nodes, `u` and `v`. This function
+    verifies that a set is "minimal", meaning there is no smaller
+    d-separating set between the two nodes.
+
+    Parameters
+    ----------
+    G : nx.DiGraph
+        The graph.
+    u : node
+        A node in the graph.
+    v : node
+        A node in the graph.
+    z : Set of nodes
+        The set of nodes to check if it is a minimal d-separating set.
+
+    Returns
+    -------
+    bool
+        Whether or not the `z` separating set is minimal.
+
+    Raises
+    ------
+    NetworkXError
+        Raises a :exc:`NetworkXError` if the input graph is not a DAG.
+
+    NodeNotFound
+        If any of the input nodes are not found in the graph,
+        a :exc:`NodeNotFound` exception is raised.
+
+    References
+    ----------
+    .. [1] Tian, J., & Paz, A. (1998). Finding Minimal D-separators.
+
+    Notes
+    -----
+    This function only works on verifying a d-separating set is minimal
+    between two nodes. To verify that a d-separating set is minimal between
+    two sets of nodes is not supported.
+
+    Uses algorithm 2 presented in [1]_. The complexity of the algorithm
+    is :math:`O(|E_{An}^m|)`, where :math:`|E_{An}^m|` stands for the
+    number of edges in the moralized graph of the sub-graph consisting
+    of only the ancestors of ``u`` and ``v``.
+
+    The algorithm works by constructing the moral graph consisting of just
+    the ancestors of `u` and `v`. First, it performs BFS on the moral graph
+    starting from `u` and marking any nodes it encounters that are part of
+    the separating set, `z`. If a node is marked, then it does not continue
+    along that path. In the second stage, BFS with markings is repeated on the
+    moral graph starting from `v`. If at any stage, any node in `z` is
+    not marked, then `z` is considered not minimal. If the end of the algorithm
+    is reached, then `z` is minimal.
+
+    For full details, see [1]_.
+
+    https://en.wikipedia.org/wiki/Bayesian_network#d-separation
+    """
+    if not nx.is_directed_acyclic_graph(G):
+        raise nx.NetworkXError("graph should be directed acyclic")
+
+    union_uv = {u, v}
+    union_uv.update(z)
+
+    if any(n not in G.nodes for n in union_uv):
+        raise nx.NodeNotFound("one or more specified nodes not found in the graph")
+
+    x_anc = nx.ancestors(G, u)
+    y_anc = nx.ancestors(G, v)
+    xy_anc = x_anc.union(y_anc)
+
+    # if Z contains any node which is not in ancestors of X or Y
+    # then it is definitely not minimal
+    if any(node not in xy_anc for node in z):
+        return False
+
+    D_anc_xy = x_anc.union(y_anc)
+    D_anc_xy.update((u, v))
+
+    # second, construct the moralization of the subgraph
+    moral_G = nx.moral_graph(G.subgraph(D_anc_xy))
+
+    # start BFS from X
+    marks = _bfs_with_marks(moral_G, u, z)
+
+    # if not all the Z is marked, then the set is not minimal
+    if any(node not in marks for node in z):
+        return False
+
+    # similarly, start BFS from Y and check the marks
+    marks = _bfs_with_marks(moral_G, v, z)
+    # if not all the Z is marked, then the set is not minimal
+    if any(node not in marks for node in z):
+        return False
+
+    return True
+
+
+@not_implemented_for("directed")
+def _bfs_with_marks(G, start_node, check_set):
+    """Breadth-first-search with markings.
+
+    Performs BFS starting from ``start_node`` and whenever a node
+    inside ``check_set`` is met, it is "marked". Once a node is marked,
+    BFS does not continue along that path. The resulting marked nodes
+    are returned.
+
+    Parameters
+    ----------
+    G : nx.Graph
+        An undirected graph.
+    start_node : node
+        The start of the BFS.
+    check_set : set
+        The set of nodes to check against.
+
+    Returns
+    -------
+    marked : set
+        A set of nodes that were marked.
+    """
+    visited = {}
+    marked = set()
+    queue = []
+
+    visited[start_node] = None
+    queue.append(start_node)
+    while queue:
+        m = queue.pop(0)
+
+        for nbr in G.neighbors(m):
+            if nbr not in visited:
+                # memoize where we visited so far
+                visited[nbr] = None
+
+                # mark the node in Z' and do not continue along that path
+                if nbr in check_set:
+                    marked.add(nbr)
+                else:
+                    queue.append(nbr)
+    return marked
diff --git a/networkx/algorithms/dag.py b/networkx/algorithms/dag.py
index 27f1a82..2096812 100644
--- a/networkx/algorithms/dag.py
+++ b/networkx/algorithms/dag.py
@@ -8,7 +8,7 @@ to the user to check for that.
 import heapq
 from collections import deque
 from functools import partial
-from itertools import chain, product, starmap
+from itertools import chain, combinations, product, starmap
 from math import gcd
 
 import networkx as nx
@@ -30,11 +30,13 @@ __all__ = [
     "dag_longest_path",
     "dag_longest_path_length",
     "dag_to_branching",
+    "compute_v_structures",
 ]
 
 chaini = chain.from_iterable
 
 
+@nx._dispatch
 def descendants(G, source):
     """Returns all nodes reachable from `source` in `G`.
 
@@ -56,9 +58,14 @@ def descendants(G, source):
     Examples
     --------
     >>> DG = nx.path_graph(5, create_using=nx.DiGraph)
-    >>> sorted(list(nx.descendants(DG, 2)))
+    >>> sorted(nx.descendants(DG, 2))
     [3, 4]
 
+    The `source` node is not a descendant of itself, but can be included manually:
+
+    >>> sorted(nx.descendants(DG, 2) | {2})
+    [2, 3, 4]
+
     See also
     --------
     ancestors
@@ -66,6 +73,7 @@ def descendants(G, source):
     return {child for parent, child in nx.bfs_edges(G, source)}
 
 
+@nx._dispatch
 def ancestors(G, source):
     """Returns all nodes having a path to `source` in `G`.
 
@@ -87,9 +95,14 @@ def ancestors(G, source):
     Examples
     --------
     >>> DG = nx.path_graph(5, create_using=nx.DiGraph)
-    >>> sorted(list(nx.ancestors(DG, 2)))
+    >>> sorted(nx.ancestors(DG, 2))
     [0, 1]
 
+    The `source` node is not an ancestor of itself, but can be included manually:
+
+    >>> sorted(nx.ancestors(DG, 2) | {2})
+    [0, 1, 2]
+
     See also
     --------
     descendants
@@ -494,7 +507,7 @@ def all_topological_sorts(G):
 
     # do-while construct
     while True:
-        assert all([count[v] == 0 for v in D])
+        assert all(count[v] == 0 for v in D)
 
         if len(current_sort) == len(G):
             yield list(current_sort)
@@ -995,7 +1008,15 @@ def dag_longest_path(G, weight="weight", default_weight=1, topo_order=None):
     dist = {}  # stores {v : (length, u)}
     for v in topo_order:
         us = [
-            (dist[u][0] + data.get(weight, default_weight), u)
+            (
+                dist[u][0]
+                + (
+                    max(data.values(), key=lambda x: x.get(weight, default_weight))
+                    if G.is_multigraph()
+                    else data
+                ).get(weight, default_weight),
+                u,
+            )
             for u, data in G.pred[v].items()
         ]
 
@@ -1057,8 +1078,13 @@ def dag_longest_path_length(G, weight="weight", default_weight=1):
     """
     path = nx.dag_longest_path(G, weight, default_weight)
     path_length = 0
-    for (u, v) in pairwise(path):
-        path_length += G[u][v].get(weight, default_weight)
+    if G.is_multigraph():
+        for u, v in pairwise(path):
+            i = max(G[u][v], key=lambda x: G[u][v][x].get(weight, default_weight))
+            path_length += G[u][v][i].get(weight, default_weight)
+    else:
+        for u, v in pairwise(path):
+            path_length += G[u][v].get(weight, default_weight)
 
     return path_length
 
@@ -1177,3 +1203,40 @@ def dag_to_branching(G):
     B.remove_node(0)
     B.remove_node(-1)
     return B
+
+
+@not_implemented_for("undirected")
+def compute_v_structures(G):
+    """Iterate through the graph to compute all v-structures.
+
+    V-structures are triples in the directed graph where
+    two parent nodes point to the same child and the two parent nodes
+    are not adjacent.
+
+    Parameters
+    ----------
+    G : graph
+        A networkx DiGraph.
+
+    Returns
+    -------
+    vstructs : iterator of tuples
+        The v structures within the graph. Each v structure is a 3-tuple with the
+        parent, collider, and other parent.
+
+    Examples
+    --------
+    >>> G = nx.DiGraph()
+    >>> G.add_edges_from([(1, 2), (0, 5), (3, 1), (2, 4), (3, 1), (4, 5), (1, 5)])
+    >>> list(nx.compute_v_structures(G))
+    [(0, 5, 4), (0, 5, 1), (1, 5, 4)]
+
+    Notes
+    -----
+    https://en.wikipedia.org/wiki/Collider_(statistics)
+    """
+    for collider, preds in G.pred.items():
+        for common_parents in combinations(preds, r=2):
+            # ensure that the colliders are the same
+            common_parents = sorted(common_parents)
+            yield (common_parents[0], collider, common_parents[1])
diff --git a/networkx/algorithms/distance_measures.py b/networkx/algorithms/distance_measures.py
index 3f59a2a..cfb0fdc 100644
--- a/networkx/algorithms/distance_measures.py
+++ b/networkx/algorithms/distance_measures.py
@@ -4,7 +4,6 @@ import networkx as nx
 from networkx.utils import not_implemented_for
 
 __all__ = [
-    "extrema_bounding",
     "eccentricity",
     "diameter",
     "radius",
@@ -15,15 +14,9 @@ __all__ = [
 ]
 
 
-def extrema_bounding(G, compute="diameter"):
+def _extrema_bounding(G, compute="diameter", weight=None):
     """Compute requested extreme distance metric of undirected graph G
 
-    .. deprecated:: 2.8
-
-       extrema_bounding is deprecated and will be removed in NetworkX 3.0.
-       Use the corresponding distance measure with the `usebounds=True` option
-       instead.
-
     Computation is based on smart lower and upper bounds, and in practice
     linear in the number of nodes, rather than quadratic (except for some
     border cases such as complete graphs or circle shaped graphs).
@@ -40,70 +33,25 @@ def extrema_bounding(G, compute="diameter"):
        "center" for the set of nodes with eccentricity equal to the radius,
        "eccentricities" for the maximum distance from each node to all other nodes in G
 
-    Returns
-    -------
-    value : value of the requested metric
-       int for "diameter" and "radius" or
-       list of nodes for "center" and "periphery" or
-       dictionary of eccentricity values keyed by node for "eccentricities"
-
-    Raises
-    ------
-    NetworkXError
-        If the graph consists of multiple components
-    ValueError
-        If `compute` is not one of "diameter", "radius", "periphery", "center",
-        or "eccentricities".
-
-    Notes
-    -----
-    This algorithm was proposed in the following papers:
-
-    F.W. Takes and W.A. Kosters, Determining the Diameter of Small World
-    Networks, in Proceedings of the 20th ACM International Conference on
-    Information and Knowledge Management (CIKM 2011), pp. 1191-1196, 2011.
-    doi: https://doi.org/10.1145/2063576.2063748
-
-    F.W. Takes and W.A. Kosters, Computing the Eccentricity Distribution of
-    Large Graphs, Algorithms 6(1): 100-118, 2013.
-    doi: https://doi.org/10.3390/a6010100
-
-    M. Borassi, P. Crescenzi, M. Habib, W.A. Kosters, A. Marino and F.W. Takes,
-    Fast Graph Diameter and Radius BFS-Based Computation in (Weakly Connected)
-    Real-World Graphs, Theoretical Computer Science 586: 59-80, 2015.
-    doi: https://doi.org/10.1016/j.tcs.2015.02.033
-    """
-    import warnings
-
-    msg = "extrema_bounding is deprecated and will be removed in networkx 3.0\n"
-    # NOTE: _extrema_bounding does input checking, so it is skipped here
-    if compute in {"diameter", "radius", "periphery", "center"}:
-        msg += f"Use nx.{compute}(G, usebounds=True) instead."
-    if compute == "eccentricities":
-        msg += f"Use nx.eccentricity(G) instead."
-    warnings.warn(msg, DeprecationWarning, stacklevel=2)
-
-    return _extrema_bounding(G, compute=compute)
+    weight : string, function, or None
+        If this is a string, then edge weights will be accessed via the
+        edge attribute with this key (that is, the weight of the edge
+        joining `u` to `v` will be ``G.edges[u, v][weight]``). If no
+        such edge attribute exists, the weight of the edge is assumed to
+        be one.
 
+        If this is a function, the weight of an edge is the value
+        returned by the function. The function must accept exactly three
+        positional arguments: the two endpoints of an edge and the
+        dictionary of edge attributes for that edge. The function must
+        return a number.
 
-def _extrema_bounding(G, compute="diameter"):
-    """Compute requested extreme distance metric of undirected graph G
+        If this is None, every edge has weight/distance/cost 1.
 
-    Computation is based on smart lower and upper bounds, and in practice
-    linear in the number of nodes, rather than quadratic (except for some
-    border cases such as complete graphs or circle shaped graphs).
+        Weights stored as floating point values can lead to small round-off
+        errors in distances. Use integer weights to avoid this.
 
-    Parameters
-    ----------
-    G : NetworkX graph
-       An undirected graph
-
-    compute : string denoting the requesting metric
-       "diameter" for the maximal eccentricity value,
-       "radius" for the minimal eccentricity value,
-       "periphery" for the set of nodes with eccentricity equal to the diameter,
-       "center" for the set of nodes with eccentricity equal to the radius,
-       "eccentricities" for the maximum distance from each node to all other nodes in G
+        Weights should be positive, since they are distances.
 
     Returns
     -------
@@ -118,25 +66,26 @@ def _extrema_bounding(G, compute="diameter"):
         If the graph consists of multiple components
     ValueError
         If `compute` is not one of "diameter", "radius", "periphery", "center", or "eccentricities".
+
     Notes
     -----
-    This algorithm was proposed in the following papers:
-
-    F.W. Takes and W.A. Kosters, Determining the Diameter of Small World
-    Networks, in Proceedings of the 20th ACM International Conference on
-    Information and Knowledge Management (CIKM 2011), pp. 1191-1196, 2011.
-    doi: https://doi.org/10.1145/2063576.2063748
+    This algorithm was proposed in [1]_ and discussed further in [2]_ and [3]_.
 
-    F.W. Takes and W.A. Kosters, Computing the Eccentricity Distribution of
-    Large Graphs, Algorithms 6(1): 100-118, 2013.
-    doi: https://doi.org/10.3390/a6010100
-
-    M. Borassi, P. Crescenzi, M. Habib, W.A. Kosters, A. Marino and F.W. Takes,
-    Fast Graph Diameter and Radius BFS-Based Computation in (Weakly Connected)
-    Real-World Graphs, Theoretical Computer Science 586: 59-80, 2015.
-    doi: https://doi.org/10.1016/j.tcs.2015.02.033
+    References
+    ----------
+    .. [1] F. W. Takes, W. A. Kosters,
+       "Determining the diameter of small world networks."
+       Proceedings of the 20th ACM international conference on Information and knowledge management, 2011
+       https://dl.acm.org/doi/abs/10.1145/2063576.2063748
+    .. [2] F. W. Takes, W. A. Kosters,
+       "Computing the Eccentricity Distribution of Large Graphs."
+       Algorithms, 2013
+       https://www.mdpi.com/1999-4893/6/1/100
+    .. [3] M. Borassi, P. Crescenzi, M. Habib, W. A. Kosters, A. Marino, F. W. Takes,
+       "Fast diameter and radius BFS-based computation in (weakly connected) real-world graphs: With an application to the six degrees of separation games. "
+       Theoretical Computer Science, 2015
+       https://www.sciencedirect.com/science/article/pii/S0304397515001644
     """
-
     # init variables
     degrees = dict(G.degree())  # start with the highest degree node
     minlowernode = max(degrees, key=degrees.get)
@@ -163,7 +112,8 @@ def _extrema_bounding(G, compute="diameter"):
         high = not high
 
         # get distances from/to current node and derive eccentricity
-        dist = dict(nx.single_source_shortest_path_length(G, current))
+        dist = nx.shortest_path_length(G, source=current, weight=weight)
+
         if len(dist) != N:
             msg = "Cannot compute metric because graph is not connected."
             raise nx.NetworkXError(msg)
@@ -272,20 +222,20 @@ def _extrema_bounding(G, compute="diameter"):
     # return the correct value of the requested metric
     if compute == "diameter":
         return maxlower
-    elif compute == "radius":
+    if compute == "radius":
         return minupper
-    elif compute == "periphery":
+    if compute == "periphery":
         p = [v for v in G if ecc_lower[v] == maxlower]
         return p
-    elif compute == "center":
+    if compute == "center":
         c = [v for v in G if ecc_upper[v] == minupper]
         return c
-    elif compute == "eccentricities":
+    if compute == "eccentricities":
         return ecc_lower
     return None
 
 
-def eccentricity(G, v=None, sp=None):
+def eccentricity(G, v=None, sp=None, weight=None):
     """Returns the eccentricity of nodes in G.
 
     The eccentricity of a node v is the maximum distance from v to
@@ -302,6 +252,26 @@ def eccentricity(G, v=None, sp=None):
     sp : dict of dicts, optional
        All pairs shortest path lengths as a dictionary of dictionaries
 
+    weight : string, function, or None (default=None)
+        If this is a string, then edge weights will be accessed via the
+        edge attribute with this key (that is, the weight of the edge
+        joining `u` to `v` will be ``G.edges[u, v][weight]``). If no
+        such edge attribute exists, the weight of the edge is assumed to
+        be one.
+
+        If this is a function, the weight of an edge is the value
+        returned by the function. The function must accept exactly three
+        positional arguments: the two endpoints of an edge and the
+        dictionary of edge attributes for that edge. The function must
+        return a number.
+
+        If this is None, every edge has weight/distance/cost 1.
+
+        Weights stored as floating point values can lead to small round-off
+        errors in distances. Use integer weights to avoid this.
+
+        Weights should be positive, since they are distances.
+
     Returns
     -------
     ecc : dictionary
@@ -324,11 +294,11 @@ def eccentricity(G, v=None, sp=None):
     #    else:                      # assume v is a container of nodes
     #        nodes=v
     order = G.order()
-
     e = {}
     for n in G.nbunch_iter(v):
         if sp is None:
-            length = nx.single_source_shortest_path_length(G, n)
+            length = nx.shortest_path_length(G, source=n, weight=weight)
+
             L = len(length)
         else:
             try:
@@ -350,11 +320,10 @@ def eccentricity(G, v=None, sp=None):
 
     if v in G:
         return e[v]  # return single value
-    else:
-        return e
+    return e
 
 
-def diameter(G, e=None, usebounds=False):
+def diameter(G, e=None, usebounds=False, weight=None):
     """Returns the diameter of the graph G.
 
     The diameter is the maximum eccentricity.
@@ -367,6 +336,26 @@ def diameter(G, e=None, usebounds=False):
     e : eccentricity dictionary, optional
       A precomputed dictionary of eccentricities.
 
+    weight : string, function, or None
+        If this is a string, then edge weights will be accessed via the
+        edge attribute with this key (that is, the weight of the edge
+        joining `u` to `v` will be ``G.edges[u, v][weight]``). If no
+        such edge attribute exists, the weight of the edge is assumed to
+        be one.
+
+        If this is a function, the weight of an edge is the value
+        returned by the function. The function must accept exactly three
+        positional arguments: the two endpoints of an edge and the
+        dictionary of edge attributes for that edge. The function must
+        return a number.
+
+        If this is None, every edge has weight/distance/cost 1.
+
+        Weights stored as floating point values can lead to small round-off
+        errors in distances. Use integer weights to avoid this.
+
+        Weights should be positive, since they are distances.
+
     Returns
     -------
     d : integer
@@ -383,13 +372,13 @@ def diameter(G, e=None, usebounds=False):
     eccentricity
     """
     if usebounds is True and e is None and not G.is_directed():
-        return _extrema_bounding(G, compute="diameter")
+        return _extrema_bounding(G, compute="diameter", weight=weight)
     if e is None:
-        e = eccentricity(G)
+        e = eccentricity(G, weight=weight)
     return max(e.values())
 
 
-def periphery(G, e=None, usebounds=False):
+def periphery(G, e=None, usebounds=False, weight=None):
     """Returns the periphery of the graph G.
 
     The periphery is the set of nodes with eccentricity equal to the diameter.
@@ -402,6 +391,26 @@ def periphery(G, e=None, usebounds=False):
     e : eccentricity dictionary, optional
       A precomputed dictionary of eccentricities.
 
+    weight : string, function, or None
+        If this is a string, then edge weights will be accessed via the
+        edge attribute with this key (that is, the weight of the edge
+        joining `u` to `v` will be ``G.edges[u, v][weight]``). If no
+        such edge attribute exists, the weight of the edge is assumed to
+        be one.
+
+        If this is a function, the weight of an edge is the value
+        returned by the function. The function must accept exactly three
+        positional arguments: the two endpoints of an edge and the
+        dictionary of edge attributes for that edge. The function must
+        return a number.
+
+        If this is None, every edge has weight/distance/cost 1.
+
+        Weights stored as floating point values can lead to small round-off
+        errors in distances. Use integer weights to avoid this.
+
+        Weights should be positive, since they are distances.
+
     Returns
     -------
     p : list
@@ -419,15 +428,15 @@ def periphery(G, e=None, usebounds=False):
     center
     """
     if usebounds is True and e is None and not G.is_directed():
-        return _extrema_bounding(G, compute="periphery")
+        return _extrema_bounding(G, compute="periphery", weight=weight)
     if e is None:
-        e = eccentricity(G)
+        e = eccentricity(G, weight=weight)
     diameter = max(e.values())
     p = [v for v in e if e[v] == diameter]
     return p
 
 
-def radius(G, e=None, usebounds=False):
+def radius(G, e=None, usebounds=False, weight=None):
     """Returns the radius of the graph G.
 
     The radius is the minimum eccentricity.
@@ -440,6 +449,26 @@ def radius(G, e=None, usebounds=False):
     e : eccentricity dictionary, optional
       A precomputed dictionary of eccentricities.
 
+    weight : string, function, or None
+        If this is a string, then edge weights will be accessed via the
+        edge attribute with this key (that is, the weight of the edge
+        joining `u` to `v` will be ``G.edges[u, v][weight]``). If no
+        such edge attribute exists, the weight of the edge is assumed to
+        be one.
+
+        If this is a function, the weight of an edge is the value
+        returned by the function. The function must accept exactly three
+        positional arguments: the two endpoints of an edge and the
+        dictionary of edge attributes for that edge. The function must
+        return a number.
+
+        If this is None, every edge has weight/distance/cost 1.
+
+        Weights stored as floating point values can lead to small round-off
+        errors in distances. Use integer weights to avoid this.
+
+        Weights should be positive, since they are distances.
+
     Returns
     -------
     r : integer
@@ -453,13 +482,13 @@ def radius(G, e=None, usebounds=False):
 
     """
     if usebounds is True and e is None and not G.is_directed():
-        return _extrema_bounding(G, compute="radius")
+        return _extrema_bounding(G, compute="radius", weight=weight)
     if e is None:
-        e = eccentricity(G)
+        e = eccentricity(G, weight=weight)
     return min(e.values())
 
 
-def center(G, e=None, usebounds=False):
+def center(G, e=None, usebounds=False, weight=None):
     """Returns the center of the graph G.
 
     The center is the set of nodes with eccentricity equal to radius.
@@ -472,6 +501,26 @@ def center(G, e=None, usebounds=False):
     e : eccentricity dictionary, optional
       A precomputed dictionary of eccentricities.
 
+    weight : string, function, or None
+        If this is a string, then edge weights will be accessed via the
+        edge attribute with this key (that is, the weight of the edge
+        joining `u` to `v` will be ``G.edges[u, v][weight]``). If no
+        such edge attribute exists, the weight of the edge is assumed to
+        be one.
+
+        If this is a function, the weight of an edge is the value
+        returned by the function. The function must accept exactly three
+        positional arguments: the two endpoints of an edge and the
+        dictionary of edge attributes for that edge. The function must
+        return a number.
+
+        If this is None, every edge has weight/distance/cost 1.
+
+        Weights stored as floating point values can lead to small round-off
+        errors in distances. Use integer weights to avoid this.
+
+        Weights should be positive, since they are distances.
+
     Returns
     -------
     c : list
@@ -489,9 +538,9 @@ def center(G, e=None, usebounds=False):
     periphery
     """
     if usebounds is True and e is None and not G.is_directed():
-        return _extrema_bounding(G, compute="center")
+        return _extrema_bounding(G, compute="center", weight=weight)
     if e is None:
-        e = eccentricity(G)
+        e = eccentricity(G, weight=weight)
     radius = min(e.values())
     p = [v for v in e if e[v] == radius]
     return p
@@ -632,14 +681,23 @@ def resistance_distance(G, nodeA, nodeB, weight=None, invert_weight=True):
 
     Notes
     -----
-    Overview discussion:
-    * https://en.wikipedia.org/wiki/Resistance_distance
-    * http://mathworld.wolfram.com/ResistanceDistance.html
-
-    Additional details:
-    Vaya Sapobi Samui Vos, “Methods for determining the effective resistance,” M.S.,
-    Mathematisch Instituut, Universiteit Leiden, Leiden, Netherlands, 2016
-    Available: `Link to thesis <https://www.universiteitleiden.nl/binaries/content/assets/science/mi/scripties/master/vos_vaya_master.pdf>`_
+    Overviews are provided in [1]_ and [2]_. Additional details on computational
+    methods, proofs of properties, and corresponding MATLAB codes are provided
+    in [3]_.
+
+    References
+    ----------
+    .. [1] Wikipedia
+       "Resistance distance."
+       https://en.wikipedia.org/wiki/Resistance_distance
+    .. [2] E. W. Weisstein
+       "Resistance Distance."
+       MathWorld--A Wolfram Web Resource
+       https://mathworld.wolfram.com/ResistanceDistance.html
+    .. [3] V. S. S. Vos,
+       "Methods for determining the effective resistance."
+       Mestrado, Mathematisch Instituut Universiteit Leiden, 2016
+       https://www.universiteitleiden.nl/binaries/content/assets/science/mi/scripties/master/vos_vaya_master.pdf
     """
     import numpy as np
     import scipy as sp
@@ -663,15 +721,15 @@ def resistance_distance(G, nodeA, nodeB, weight=None, invert_weight=True):
 
     if invert_weight and weight is not None:
         if G.is_multigraph():
-            for (u, v, k, d) in G.edges(keys=True, data=True):
+            for u, v, k, d in G.edges(keys=True, data=True):
                 d[weight] = 1 / d[weight]
         else:
-            for (u, v, d) in G.edges(data=True):
+            for u, v, d in G.edges(data=True):
                 d[weight] = 1 / d[weight]
     # Replace with collapsing topology or approximated zero?
 
     # Using determinants to compute the effective resistance is more memory
-    # efficent than directly calculating the psuedo-inverse
+    # efficient than directly calculating the pseudo-inverse
     L = nx.laplacian_matrix(G, node_list, weight=weight).asformat("csc")
     indices = list(range(L.shape[0]))
     # w/ nodeA removed
@@ -684,7 +742,7 @@ def resistance_distance(G, nodeA, nodeB, weight=None, invert_weight=True):
     # Factorize Laplacian submatrixes and extract diagonals
     # Order the diagonals to minimize the likelihood over overflows
     # during computing the determinant
-    lu_a = sp.sparse.linalg.splu(L_a, options=dict(SymmetricMode=True))
+    lu_a = sp.sparse.linalg.splu(L_a, options={"SymmetricMode": True})
     LdiagA = lu_a.U.diagonal()
     LdiagA_s = np.product(np.sign(LdiagA)) * np.product(lu_a.L.diagonal())
     LdiagA_s *= (-1) ** _count_lu_permutations(lu_a.perm_r)
@@ -692,7 +750,7 @@ def resistance_distance(G, nodeA, nodeB, weight=None, invert_weight=True):
     LdiagA = np.absolute(LdiagA)
     LdiagA = np.sort(LdiagA)
 
-    lu_ab = sp.sparse.linalg.splu(L_ab, options=dict(SymmetricMode=True))
+    lu_ab = sp.sparse.linalg.splu(L_ab, options={"SymmetricMode": True})
     LdiagAB = lu_ab.U.diagonal()
     LdiagAB_s = np.product(np.sign(LdiagAB)) * np.product(lu_ab.L.diagonal())
     LdiagAB_s *= (-1) ** _count_lu_permutations(lu_ab.perm_r)
diff --git a/networkx/algorithms/dominating.py b/networkx/algorithms/dominating.py
index 32fff4d..042e2b8 100644
--- a/networkx/algorithms/dominating.py
+++ b/networkx/algorithms/dominating.py
@@ -64,6 +64,7 @@ def dominating_set(G, start_with=None):
     return dominating_set
 
 
+@nx._dispatch
 def is_dominating_set(G, nbunch):
     """Checks if `nbunch` is a dominating set for `G`.
 
diff --git a/networkx/algorithms/efficiency_measures.py b/networkx/algorithms/efficiency_measures.py
index 45f19cd..3234399 100644
--- a/networkx/algorithms/efficiency_measures.py
+++ b/networkx/algorithms/efficiency_measures.py
@@ -28,6 +28,12 @@ def efficiency(G, u, v):
     float
         Multiplicative inverse of the shortest path distance between the nodes.
 
+    Examples
+    --------
+    >>> G = nx.Graph([(0, 1), (0, 2), (0, 3), (1, 2), (1, 3)])
+    >>> nx.efficiency(G, 2, 3)  # this gives efficiency for node 2 and 3
+    0.5
+
     Notes
     -----
     Edge weights are ignored when computing the shortest path distances.
@@ -71,6 +77,12 @@ def global_efficiency(G):
     float
         The average global efficiency of the graph.
 
+    Examples
+    --------
+    >>> G = nx.Graph([(0, 1), (0, 2), (0, 3), (1, 2), (1, 3)])
+    >>> round(nx.global_efficiency(G), 12)
+    0.916666666667
+
     Notes
     -----
     Edge weights are ignored when computing the shortest path distances.
@@ -126,6 +138,12 @@ def local_efficiency(G):
     float
         The average local efficiency of the graph.
 
+    Examples
+    --------
+    >>> G = nx.Graph([(0, 1), (0, 2), (0, 3), (1, 2), (1, 3)])
+    >>> nx.local_efficiency(G)
+    0.9166666666666667
+
     Notes
     -----
     Edge weights are ignored when computing the shortest path distances.
diff --git a/networkx/algorithms/euler.py b/networkx/algorithms/euler.py
index e50a0e9..b68d5a8 100644
--- a/networkx/algorithms/euler.py
+++ b/networkx/algorithms/euler.py
@@ -382,7 +382,10 @@ def eulerian_path(G, source=None, keys=False):
 
 @not_implemented_for("directed")
 def eulerize(G):
-    """Transforms a graph into an Eulerian graph
+    """Transforms a graph into an Eulerian graph.
+
+    If `G` is Eulerian the result is `G` as a MultiGraph, otherwise the result is a smallest
+    (in terms of the number of edges) multigraph whose underlying simple graph is `G`.
 
     Parameters
     ----------
@@ -434,13 +437,21 @@ def eulerize(G):
         for m, n in combinations(odd_degree_nodes, 2)
     ]
 
-    # use inverse path lengths as edge-weights in a new graph
+    # use the number of vertices in a graph + 1 as an upper bound on
+    # the maximum length of a path in G
+    upper_bound_on_max_path_length = len(G) + 1
+
+    # use "len(G) + 1 - len(P)",
+    # where P is a shortest path between vertices n and m,
+    # as edge-weights in a new graph
     # store the paths in the graph for easy indexing later
     Gp = nx.Graph()
     for n, Ps in odd_deg_pairs_paths:
         for m, P in Ps.items():
             if n != m:
-                Gp.add_edge(m, n, weight=1 / len(P), path=P)
+                Gp.add_edge(
+                    m, n, weight=upper_bound_on_max_path_length - len(P), path=P
+                )
 
     # find the minimum weight matching of edges in the weighted graph
     best_matching = nx.Graph(list(nx.max_weight_matching(Gp)))
diff --git a/networkx/algorithms/flow/boykovkolmogorov.py b/networkx/algorithms/flow/boykovkolmogorov.py
index fd96681..8bc84a0 100644
--- a/networkx/algorithms/flow/boykovkolmogorov.py
+++ b/networkx/algorithms/flow/boykovkolmogorov.py
@@ -317,7 +317,7 @@ def boykov_kolmogorov_impl(G, s, t, capacity, residual, cutoff):
         v = n
         while v is not None:
             path.append(v)
-            if v == s or v == t:
+            if v in (s, t):
                 base_dist = 0
                 break
             elif timestamp[v] == time:
diff --git a/networkx/algorithms/flow/capacityscaling.py b/networkx/algorithms/flow/capacityscaling.py
index b565077..374c104 100644
--- a/networkx/algorithms/flow/capacityscaling.py
+++ b/networkx/algorithms/flow/capacityscaling.py
@@ -290,7 +290,7 @@ def capacity_scaling(
         for u, v, e in nx.selfloop_edges(G, data=True)
     )
 
-    # Determine the maxmimum edge capacity.
+    # Determine the maximum edge capacity.
     wmax = max(chain([-inf], (e["capacity"] for u, v, e in R.edges(data=True))))
     if wmax == -inf:
         # Residual network has no edges.
diff --git a/networkx/algorithms/flow/maxflow.py b/networkx/algorithms/flow/maxflow.py
index 8d2fb8f..b0098d0 100644
--- a/networkx/algorithms/flow/maxflow.py
+++ b/networkx/algorithms/flow/maxflow.py
@@ -12,14 +12,6 @@ from .utils import build_flow_dict
 
 # Define the default flow function for computing maximum flow.
 default_flow_func = preflow_push
-# Functions that don't support cutoff for minimum cut computations.
-flow_funcs = [
-    boykov_kolmogorov,
-    dinitz,
-    edmonds_karp,
-    preflow_push,
-    shortest_augmenting_path,
-]
 
 __all__ = ["maximum_flow", "maximum_flow_value", "minimum_cut", "minimum_cut_value"]
 
@@ -452,7 +444,7 @@ def minimum_cut(flowG, _s, _t, capacity="capacity", flow_func=None, **kwargs):
     if not callable(flow_func):
         raise nx.NetworkXError("flow_func has to be callable.")
 
-    if kwargs.get("cutoff") is not None and flow_func in flow_funcs:
+    if kwargs.get("cutoff") is not None and flow_func is preflow_push:
         raise nx.NetworkXError("cutoff should not be specified.")
 
     R = flow_func(flowG, _s, _t, capacity=capacity, value_only=True, **kwargs)
@@ -603,7 +595,7 @@ def minimum_cut_value(flowG, _s, _t, capacity="capacity", flow_func=None, **kwar
     if not callable(flow_func):
         raise nx.NetworkXError("flow_func has to be callable.")
 
-    if kwargs.get("cutoff") is not None and flow_func in flow_funcs:
+    if kwargs.get("cutoff") is not None and flow_func is preflow_push:
         raise nx.NetworkXError("cutoff should not be specified.")
 
     R = flow_func(flowG, _s, _t, capacity=capacity, value_only=True, **kwargs)
diff --git a/networkx/algorithms/flow/networksimplex.py b/networkx/algorithms/flow/networksimplex.py
index 3e2eaf5..85902cc 100644
--- a/networkx/algorithms/flow/networksimplex.py
+++ b/networkx/algorithms/flow/networksimplex.py
@@ -15,7 +15,6 @@ class _DataEssentialsAndFunctions:
     def __init__(
         self, G, multigraph, demand="demand", capacity="capacity", weight="weight"
     ):
-
         # Number all nodes and edges and hereafter reference them using ONLY their numbers
         self.node_list = list(G)  # nodes
         self.node_indices = {u: i for i, u in enumerate(self.node_list)}  # node indices
diff --git a/networkx/algorithms/flow/tests/test_maxflow.py b/networkx/algorithms/flow/tests/test_maxflow.py
index 6448a76..026fba0 100644
--- a/networkx/algorithms/flow/tests/test_maxflow.py
+++ b/networkx/algorithms/flow/tests/test_maxflow.py
@@ -20,6 +20,7 @@ flow_funcs = {
     preflow_push,
     shortest_augmenting_path,
 }
+
 max_min_funcs = {nx.maximum_flow, nx.minimum_cut}
 flow_value_funcs = {nx.maximum_flow_value, nx.minimum_cut_value}
 interface_funcs = max_min_funcs & flow_value_funcs
@@ -427,32 +428,31 @@ class TestMaxFlowMinCutInterface:
 
     def test_minimum_cut_no_cutoff(self):
         G = self.G
-        for flow_func in flow_funcs:
-            pytest.raises(
-                nx.NetworkXError,
-                nx.minimum_cut,
-                G,
-                "x",
-                "y",
-                flow_func=flow_func,
-                cutoff=1.0,
-            )
-            pytest.raises(
-                nx.NetworkXError,
-                nx.minimum_cut_value,
-                G,
-                "x",
-                "y",
-                flow_func=flow_func,
-                cutoff=1.0,
-            )
+        pytest.raises(
+            nx.NetworkXError,
+            nx.minimum_cut,
+            G,
+            "x",
+            "y",
+            flow_func=preflow_push,
+            cutoff=1.0,
+        )
+        pytest.raises(
+            nx.NetworkXError,
+            nx.minimum_cut_value,
+            G,
+            "x",
+            "y",
+            flow_func=preflow_push,
+            cutoff=1.0,
+        )
 
     def test_kwargs(self):
         G = self.H
         fv = 1.0
         to_test = (
-            (shortest_augmenting_path, dict(two_phase=True)),
-            (preflow_push, dict(global_relabel_freq=5)),
+            (shortest_augmenting_path, {"two_phase": True}),
+            (preflow_push, {"global_relabel_freq": 5}),
         )
         for interface_func in interface_funcs:
             for flow_func, kwargs in to_test:
diff --git a/networkx/algorithms/flow/tests/test_maxflow_large_graph.py b/networkx/algorithms/flow/tests/test_maxflow_large_graph.py
index c62c0a9..fd36c11 100644
--- a/networkx/algorithms/flow/tests/test_maxflow_large_graph.py
+++ b/networkx/algorithms/flow/tests/test_maxflow_large_graph.py
@@ -1,7 +1,9 @@
 """Maximum flow algorithms test suite on large graphs.
 """
 
+import bz2
 import os
+import pickle
 
 import pytest
 
@@ -47,8 +49,10 @@ def gen_pyramid(N):
 
 def read_graph(name):
     dirname = os.path.dirname(__file__)
-    path = os.path.join(dirname, name + ".gpickle.bz2")
-    return nx.read_gpickle(path)
+    fname = os.path.join(dirname, name + ".gpickle.bz2")
+    with bz2.BZ2File(fname, "rb") as f:
+        G = pickle.load(f)
+    return G
 
 
 def validate_flows(G, s, t, soln_value, R, flow_func):
@@ -81,7 +85,7 @@ class TestMaxflowLargeGraph:
         G = nx.complete_graph(N)
         nx.set_edge_attributes(G, 5, "capacity")
         R = build_residual_network(G, "capacity")
-        kwargs = dict(residual=R)
+        kwargs = {"residual": R}
 
         for flow_func in flow_funcs:
             kwargs["flow_func"] = flow_func
@@ -94,7 +98,7 @@ class TestMaxflowLargeGraph:
         # N = 100 # this gives a graph with 5051 nodes
         G = gen_pyramid(N)
         R = build_residual_network(G, "capacity")
-        kwargs = dict(residual=R)
+        kwargs = {"residual": R}
 
         for flow_func in flow_funcs:
             kwargs["flow_func"] = flow_func
@@ -107,7 +111,7 @@ class TestMaxflowLargeGraph:
         s = 1
         t = len(G)
         R = build_residual_network(G, "capacity")
-        kwargs = dict(residual=R)
+        kwargs = {"residual": R}
 
         # do one flow_func to save time
         flow_func = flow_funcs[0]
@@ -123,7 +127,7 @@ class TestMaxflowLargeGraph:
         s = 1
         t = len(G)
         R = build_residual_network(G, "capacity")
-        kwargs = dict(residual=R)
+        kwargs = {"residual": R}
 
         for flow_func in flow_funcs:
             validate_flows(G, s, t, 1202018, flow_func(G, s, t, **kwargs), flow_func)
@@ -133,7 +137,7 @@ class TestMaxflowLargeGraph:
         s = 1
         t = len(G)
         R = build_residual_network(G, "capacity")
-        kwargs = dict(residual=R)
+        kwargs = {"residual": R}
 
         # do one flow_func to save time
         flow_func = flow_funcs[0]
diff --git a/networkx/algorithms/flow/tests/test_mincost.py b/networkx/algorithms/flow/tests/test_mincost.py
index 5a8c2d7..65603d3 100644
--- a/networkx/algorithms/flow/tests/test_mincost.py
+++ b/networkx/algorithms/flow/tests/test_mincost.py
@@ -1,4 +1,6 @@
+import bz2
 import os
+import pickle
 
 import pytest
 
@@ -436,7 +438,7 @@ class TestMinCostFlow:
         pytest.raises(nx.NetworkXNotImplemented, nx.capacity_scaling, G)
         G = nx.DiGraph()
         pytest.raises(nx.NetworkXError, nx.network_simplex, G)
-        pytest.raises(nx.NetworkXError, nx.capacity_scaling, G)
+        # pytest.raises(nx.NetworkXError, nx.capacity_scaling, G)
         G.add_node(0, demand=float("inf"))
         pytest.raises(nx.NetworkXError, nx.network_simplex, G)
         pytest.raises(nx.NetworkXUnfeasible, nx.capacity_scaling, G)
@@ -460,7 +462,8 @@ class TestMinCostFlow:
 
     def test_large(self):
         fname = os.path.join(os.path.dirname(__file__), "netgen-2.gpickle.bz2")
-        G = nx.read_gpickle(fname)
+        with bz2.BZ2File(fname, "rb") as f:
+            G = pickle.load(f)
         flowCost, flowDict = nx.network_simplex(G)
         assert 6749969302 == flowCost
         assert 6749969302 == nx.cost_of_flow(G, flowDict)
diff --git a/networkx/algorithms/flow/tests/test_networksimplex.py b/networkx/algorithms/flow/tests/test_networksimplex.py
index 0c25db9..40c4bf0 100644
--- a/networkx/algorithms/flow/tests/test_networksimplex.py
+++ b/networkx/algorithms/flow/tests/test_networksimplex.py
@@ -1,4 +1,6 @@
+import bz2
 import os
+import pickle
 
 import pytest
 
@@ -36,8 +38,8 @@ def simple_no_flow_graph():
 def get_flowcost_from_flowdict(G, flowDict):
     """Returns flow cost calculated from flow dictionary"""
     flowCost = 0
-    for u in flowDict.keys():
-        for v in flowDict[u].keys():
+    for u in flowDict:
+        for v in flowDict[u]:
             flowCost += flowDict[u][v] * G[u][v]["weight"]
     return flowCost
 
@@ -140,7 +142,8 @@ def test_google_or_tools_example2():
 
 def test_large():
     fname = os.path.join(os.path.dirname(__file__), "netgen-2.gpickle.bz2")
-    G = nx.read_gpickle(fname)
+    with bz2.BZ2File(fname, "rb") as f:
+        G = pickle.load(f)
     flowCost, flowDict = nx.network_simplex(G)
     assert 6749969302 == flowCost
     assert 6749969302 == nx.cost_of_flow(G, flowDict)
diff --git a/networkx/algorithms/graph_hashing.py b/networkx/algorithms/graph_hashing.py
index b6e6312..1ca3928 100644
--- a/networkx/algorithms/graph_hashing.py
+++ b/networkx/algorithms/graph_hashing.py
@@ -163,9 +163,14 @@ def weisfeiler_lehman_subgraph_hashes(
     """
     Return a dictionary of subgraph hashes by node.
 
-    The dictionary is keyed by node to a list of hashes in increasingly
-    sized induced subgraphs containing the nodes within 2*k edges
-    of the key node for increasing integer k until all nodes are included.
+    Dictionary keys are nodes in `G`, and values are a list of hashes.
+    Each hash corresponds to a subgraph rooted at a given node u in `G`.
+    Lists of subgraph hashes are sorted in increasing order of depth from
+    their root node, with the hash at index i corresponding to a subgraph
+    of nodes at most i edges distance from u. Thus, each list will contain
+    ``iterations + 1`` elements - a hash for a subgraph at each depth, and
+    additionally a hash of the initial node label (or equivalently a
+    subgraph of depth 0)
 
     The function iteratively aggregates and hashes neighbourhoods of each node.
     This is achieved for each step by replacing for each node its label from
@@ -179,13 +184,13 @@ def weisfeiler_lehman_subgraph_hashes(
     along the connecting edge from this neighbor to node $n$. The resulting string
     is then hashed to compress this information into a fixed digest size.
 
-    Thus, at the $i$th iteration nodes within $2i$ distance influence any given
+    Thus, at the $i$-th iteration, nodes within $i$ hops influence any given
     hashed node label. We can therefore say that at depth $i$ for node $n$
     we have a hash for a subgraph induced by the $2i$-hop neighborhood of $n$.
 
-    Can be used to to create general Weisfeiler-Lehman graph kernels, or
-    generate features for graphs or nodes, for example to generate 'words' in a
-    graph as seen in the 'graph2vec' algorithm.
+    The output can be used to to create general Weisfeiler-Lehman graph kernels,
+    or generate features for graphs or nodes - for example to generate 'words' in
+    a graph as seen in the 'graph2vec' algorithm.
     See [1]_ & [2]_ respectively for details.
 
     Hashes are identical for isomorphic subgraphs and there exist strong
diff --git a/networkx/algorithms/isolate.py b/networkx/algorithms/isolate.py
index e81e722..f998328 100644
--- a/networkx/algorithms/isolate.py
+++ b/networkx/algorithms/isolate.py
@@ -1,10 +1,12 @@
 """
 Functions for identifying isolate (degree zero) nodes.
 """
+import networkx as nx
 
 __all__ = ["is_isolate", "isolates", "number_of_isolates"]
 
 
+@nx._dispatch
 def is_isolate(G, n):
     """Determines whether a node is an isolate.
 
@@ -37,6 +39,7 @@ def is_isolate(G, n):
     return G.degree(n) == 0
 
 
+@nx._dispatch
 def isolates(G):
     """Iterator over isolates in the graph.
 
@@ -82,6 +85,7 @@ def isolates(G):
     return (n for n, d in G.degree() if d == 0)
 
 
+@nx._dispatch
 def number_of_isolates(G):
     """Returns the number of isolates in the graph.
 
diff --git a/networkx/algorithms/isomorphism/__init__.py b/networkx/algorithms/isomorphism/__init__.py
index ddcedea..58c2268 100644
--- a/networkx/algorithms/isomorphism/__init__.py
+++ b/networkx/algorithms/isomorphism/__init__.py
@@ -4,3 +4,4 @@ from networkx.algorithms.isomorphism.matchhelpers import *
 from networkx.algorithms.isomorphism.temporalisomorphvf2 import *
 from networkx.algorithms.isomorphism.ismags import *
 from networkx.algorithms.isomorphism.tree_isomorphism import *
+from networkx.algorithms.isomorphism.vf2pp import *
diff --git a/networkx/algorithms/isomorphism/ismags.py b/networkx/algorithms/isomorphism/ismags.py
index bfb5eea..25ce94c 100644
--- a/networkx/algorithms/isomorphism/ismags.py
+++ b/networkx/algorithms/isomorphism/ismags.py
@@ -1,7 +1,6 @@
 """
-****************
 ISMAGS Algorithm
-****************
+================
 
 Provides a Python implementation of the ISMAGS algorithm. [1]_
 
@@ -90,21 +89,21 @@ True
 
 Notes
 -----
- - The current implementation works for undirected graphs only. The algorithm
-   in general should work for directed graphs as well though.
- - Node keys for both provided graphs need to be fully orderable as well as
-   hashable.
- - Node and edge equality is assumed to be transitive: if A is equal to B, and
-   B is equal to C, then A is equal to C.
+- The current implementation works for undirected graphs only. The algorithm
+  in general should work for directed graphs as well though.
+- Node keys for both provided graphs need to be fully orderable as well as
+  hashable.
+- Node and edge equality is assumed to be transitive: if A is equal to B, and
+  B is equal to C, then A is equal to C.
 
 References
 ----------
-    .. [1] M. Houbraken, S. Demeyer, T. Michoel, P. Audenaert, D. Colle,
-       M. Pickavet, "The Index-Based Subgraph Matching Algorithm with General
-       Symmetries (ISMAGS): Exploiting Symmetry for Faster Subgraph
-       Enumeration", PLoS One 9(5): e97896, 2014.
-       https://doi.org/10.1371/journal.pone.0097896
-    .. [2] https://en.wikipedia.org/wiki/Maximum_common_induced_subgraph
+.. [1] M. Houbraken, S. Demeyer, T. Michoel, P. Audenaert, D. Colle,
+   M. Pickavet, "The Index-Based Subgraph Matching Algorithm with General
+   Symmetries (ISMAGS): Exploiting Symmetry for Faster Subgraph
+   Enumeration", PLoS One 9(5): e97896, 2014.
+   https://doi.org/10.1371/journal.pone.0097896
+.. [2] https://en.wikipedia.org/wiki/Maximum_common_induced_subgraph
 """
 
 __all__ = ["ISMAGS"]
@@ -136,7 +135,7 @@ def are_all_equal(iterable):
         pass
     else:
         if len(shape) > 1:
-            message = "The function does not works on multidimension arrays."
+            message = "The function does not works on multidimensional arrays."
             raise NotImplementedError(message) from None
 
     iterator = iter(iterable)
@@ -184,8 +183,8 @@ def make_partitions(items, test):
 
 def partition_to_color(partitions):
     """
-    Creates a dictionary with for every item in partition for every partition
-    in partitions the index of partition in partitions.
+    Creates a dictionary that maps each item in each partition to the index of
+    the partition to which it belongs.
 
     Parameters
     ----------
@@ -196,7 +195,7 @@ def partition_to_color(partitions):
     -------
     dict
     """
-    colors = dict()
+    colors = {}
     for color, keys in enumerate(partitions):
         for key in keys:
             colors[key] = color
@@ -226,7 +225,7 @@ def intersect(collection_of_sets):
 
 class ISMAGS:
     """
-    Implements the ISMAGS subgraph matching algorith. [1]_ ISMAGS stands for
+    Implements the ISMAGS subgraph matching algorithm. [1]_ ISMAGS stands for
     "Index-based Subgraph Matching Algorithm with General Symmetries". As the
     name implies, it is symmetry aware and will only generate non-symmetric
     isomorphisms.
@@ -587,7 +586,7 @@ class ISMAGS:
         graph : networkx.Graph
             The graph whose symmetry should be analyzed.
         node_partitions : list of sets
-            A list of sets containining node keys. Node keys in the same set
+            A list of sets containing node keys. Node keys in the same set
             are considered equivalent. Every node key in `graph` should be in
             exactly one of the sets. If all nodes are equivalent, this should
             be ``[set(graph.nodes)]``.
@@ -720,7 +719,7 @@ class ISMAGS:
             counts[node1][ecolor, node_colors[node2]] += 1
             counts[node2][ecolor, node_colors[node1]] += 1
 
-        node_edge_colors = dict()
+        node_edge_colors = {}
         for node in graph.nodes:
             node_edge_colors[node] = node_colors[node], set(counts[node].items())
 
@@ -910,7 +909,7 @@ class ISMAGS:
         # "part of" the subgraph in to_be_mapped, and we make it a little
         # smaller every iteration.
 
-        # pylint disable becuase it's guarded against by default value
+        # pylint disable because it's guarded against by default value
         current_size = len(
             next(iter(to_be_mapped), [])
         )  # pylint: disable=stop-iteration-return
diff --git a/networkx/algorithms/isomorphism/isomorph.py b/networkx/algorithms/isomorphism/isomorph.py
index 1b9a727..b4de3f5 100644
--- a/networkx/algorithms/isomorphism/isomorph.py
+++ b/networkx/algorithms/isomorphism/isomorph.py
@@ -24,6 +24,10 @@ def could_be_isomorphic(G1, G2):
     Notes
     -----
     Checks for matching degree, triangle, and number of cliques sequences.
+    The triangle sequence contains the number of triangles each node is part of.
+    The clique sequence contains for each node the number of maximal cliques
+    involving that node.
+
     """
 
     # Check global properties
@@ -33,13 +37,15 @@ def could_be_isomorphic(G1, G2):
     # Check local properties
     d1 = G1.degree()
     t1 = nx.triangles(G1)
-    c1 = nx.number_of_cliques(G1)
+    clqs_1 = list(nx.find_cliques(G1))
+    c1 = {n: sum(1 for c in clqs_1 if n in c) for n in G1}  # number of cliques
     props1 = [[d, t1[v], c1[v]] for v, d in d1]
     props1.sort()
 
     d2 = G2.degree()
     t2 = nx.triangles(G2)
-    c2 = nx.number_of_cliques(G2)
+    clqs_2 = list(nx.find_cliques(G2))
+    c2 = {n: sum(1 for c in clqs_2 if n in c) for n in G2}  # number of cliques
     props2 = [[d, t2[v], c2[v]] for v, d in d2]
     props2.sort()
 
@@ -65,7 +71,8 @@ def fast_could_be_isomorphic(G1, G2):
 
     Notes
     -----
-    Checks for matching degree and triangle sequences.
+    Checks for matching degree and triangle sequences. The triangle
+    sequence contains the number of triangles each node is part of.
     """
     # Check global properties
     if G1.order() != G2.order():
@@ -219,7 +226,7 @@ def is_isomorphic(G1, G2, node_match=None, edge_match=None):
        "An Improved Algorithm for Matching Large Graphs",
        3rd IAPR-TC15 Workshop  on Graph-based Representations in
        Pattern Recognition, Cuen, pp. 149-159, 2001.
-       https://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.101.5342
+       https://www.researchgate.net/publication/200034365_An_Improved_Algorithm_for_Matching_Large_Graphs
     """
     if G1.is_directed() and G2.is_directed():
         GM = nx.algorithms.isomorphism.DiGraphMatcher
diff --git a/networkx/algorithms/isomorphism/isomorphvf2.py b/networkx/algorithms/isomorphism/isomorphvf2.py
index bcd478e..1890687 100644
--- a/networkx/algorithms/isomorphism/isomorphvf2.py
+++ b/networkx/algorithms/isomorphism/isomorphvf2.py
@@ -116,7 +116,7 @@ References
       Algorithm for Matching Large Graphs", 3rd IAPR-TC15 Workshop
       on Graph-based Representations in Pattern Recognition, Cuen,
       pp. 149-159, 2001.
-      https://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.101.5342
+      https://www.researchgate.net/publication/200034365_An_Improved_Algorithm_for_Matching_Large_Graphs
 
 See Also
 --------
@@ -447,7 +447,7 @@ class GraphMatcher:
         if self.test != "mono":
             for neighbor in self.G1[G1_node]:
                 if neighbor in self.core_1:
-                    if not (self.core_1[neighbor] in self.G2[G2_node]):
+                    if self.core_1[neighbor] not in self.G2[G2_node]:
                         return False
                     elif self.G1.number_of_edges(
                         neighbor, G1_node
@@ -456,7 +456,7 @@ class GraphMatcher:
 
         for neighbor in self.G2[G2_node]:
             if neighbor in self.core_2:
-                if not (self.core_2[neighbor] in self.G1[G1_node]):
+                if self.core_2[neighbor] not in self.G1[G1_node]:
                     return False
                 elif self.test == "mono":
                     if self.G1.number_of_edges(
@@ -484,7 +484,7 @@ class GraphMatcher:
                 if (neighbor in self.inout_2) and (neighbor not in self.core_2):
                     num2 += 1
             if self.test == "graph":
-                if not (num1 == num2):
+                if num1 != num2:
                     return False
             else:  # self.test == 'subgraph'
                 if not (num1 >= num2):
@@ -506,7 +506,7 @@ class GraphMatcher:
                 if neighbor not in self.inout_2:
                     num2 += 1
             if self.test == "graph":
-                if not (num1 == num2):
+                if num1 != num2:
                     return False
             else:  # self.test == 'subgraph'
                 if not (num1 >= num2):
@@ -672,7 +672,7 @@ class DiGraphMatcher(GraphMatcher):
         if self.test != "mono":
             for predecessor in self.G1.pred[G1_node]:
                 if predecessor in self.core_1:
-                    if not (self.core_1[predecessor] in self.G2.pred[G2_node]):
+                    if self.core_1[predecessor] not in self.G2.pred[G2_node]:
                         return False
                     elif self.G1.number_of_edges(
                         predecessor, G1_node
@@ -681,7 +681,7 @@ class DiGraphMatcher(GraphMatcher):
 
         for predecessor in self.G2.pred[G2_node]:
             if predecessor in self.core_2:
-                if not (self.core_2[predecessor] in self.G1.pred[G1_node]):
+                if self.core_2[predecessor] not in self.G1.pred[G1_node]:
                     return False
                 elif self.test == "mono":
                     if self.G1.number_of_edges(
@@ -702,7 +702,7 @@ class DiGraphMatcher(GraphMatcher):
         if self.test != "mono":
             for successor in self.G1[G1_node]:
                 if successor in self.core_1:
-                    if not (self.core_1[successor] in self.G2[G2_node]):
+                    if self.core_1[successor] not in self.G2[G2_node]:
                         return False
                     elif self.G1.number_of_edges(
                         G1_node, successor
@@ -711,7 +711,7 @@ class DiGraphMatcher(GraphMatcher):
 
         for successor in self.G2[G2_node]:
             if successor in self.core_2:
-                if not (self.core_2[successor] in self.G1[G1_node]):
+                if self.core_2[successor] not in self.G1[G1_node]:
                     return False
                 elif self.test == "mono":
                     if self.G1.number_of_edges(
@@ -725,7 +725,6 @@ class DiGraphMatcher(GraphMatcher):
                         return False
 
         if self.test != "mono":
-
             # Look ahead 1
 
             # R_termin
@@ -740,7 +739,7 @@ class DiGraphMatcher(GraphMatcher):
                 if (predecessor in self.in_2) and (predecessor not in self.core_2):
                     num2 += 1
             if self.test == "graph":
-                if not (num1 == num2):
+                if num1 != num2:
                     return False
             else:  # self.test == 'subgraph'
                 if not (num1 >= num2):
@@ -757,7 +756,7 @@ class DiGraphMatcher(GraphMatcher):
                 if (successor in self.in_2) and (successor not in self.core_2):
                     num2 += 1
             if self.test == "graph":
-                if not (num1 == num2):
+                if num1 != num2:
                     return False
             else:  # self.test == 'subgraph'
                 if not (num1 >= num2):
@@ -776,7 +775,7 @@ class DiGraphMatcher(GraphMatcher):
                 if (predecessor in self.out_2) and (predecessor not in self.core_2):
                     num2 += 1
             if self.test == "graph":
-                if not (num1 == num2):
+                if num1 != num2:
                     return False
             else:  # self.test == 'subgraph'
                 if not (num1 >= num2):
@@ -793,7 +792,7 @@ class DiGraphMatcher(GraphMatcher):
                 if (successor in self.out_2) and (successor not in self.core_2):
                     num2 += 1
             if self.test == "graph":
-                if not (num1 == num2):
+                if num1 != num2:
                     return False
             else:  # self.test == 'subgraph'
                 if not (num1 >= num2):
@@ -815,7 +814,7 @@ class DiGraphMatcher(GraphMatcher):
                 if (predecessor not in self.in_2) and (predecessor not in self.out_2):
                     num2 += 1
             if self.test == "graph":
-                if not (num1 == num2):
+                if num1 != num2:
                     return False
             else:  # self.test == 'subgraph'
                 if not (num1 >= num2):
@@ -833,7 +832,7 @@ class DiGraphMatcher(GraphMatcher):
                 if (successor not in self.in_2) and (successor not in self.out_2):
                     num2 += 1
             if self.test == "graph":
-                if not (num1 == num2):
+                if num1 != num2:
                     return False
             else:  # self.test == 'subgraph'
                 if not (num1 >= num2):
diff --git a/networkx/algorithms/isomorphism/matchhelpers.py b/networkx/algorithms/isomorphism/matchhelpers.py
index 9010e26..861869f 100644
--- a/networkx/algorithms/isomorphism/matchhelpers.py
+++ b/networkx/algorithms/isomorphism/matchhelpers.py
@@ -37,10 +37,7 @@ def allclose(x, y, rtol=1.0000000000000001e-05, atol=1e-08):
 
     """
     # assume finite weights, see numpy.allclose() for reference
-    for xi, yi in zip(x, y):
-        if not math.isclose(xi, yi, rel_tol=rtol, abs_tol=atol):
-            return False
-    return True
+    return all(math.isclose(xi, yi, rel_tol=rtol, abs_tol=atol) for xi, yi in zip(x, y))
 
 
 categorical_doc = """
diff --git a/networkx/algorithms/isomorphism/tests/test_ismags.py b/networkx/algorithms/isomorphism/tests/test_ismags.py
index 6fa4ab9..0b6fb0a 100644
--- a/networkx/algorithms/isomorphism/tests/test_ismags.py
+++ b/networkx/algorithms/isomorphism/tests/test_ismags.py
@@ -13,19 +13,19 @@ def _matches_to_sets(matches):
     Helper function to facilitate comparing collections of dictionaries in
     which order does not matter.
     """
-    return set(map(lambda m: frozenset(m.items()), matches))
+    return {frozenset(m.items()) for m in matches}
 
 
 class TestSelfIsomorphism:
     data = [
         (
             [
-                (0, dict(name="a")),
-                (1, dict(name="a")),
-                (2, dict(name="b")),
-                (3, dict(name="b")),
-                (4, dict(name="a")),
-                (5, dict(name="a")),
+                (0, {"name": "a"}),
+                (1, {"name": "a"}),
+                (2, {"name": "b"}),
+                (3, {"name": "b"}),
+                (4, {"name": "a"}),
+                (5, {"name": "a"}),
             ],
             [(0, 1), (1, 2), (2, 3), (3, 4), (4, 5)],
         ),
diff --git a/networkx/algorithms/isomorphism/tests/test_isomorphvf2.py b/networkx/algorithms/isomorphism/tests/test_isomorphvf2.py
index 5d3f41b..31f670a 100644
--- a/networkx/algorithms/isomorphism/tests/test_isomorphvf2.py
+++ b/networkx/algorithms/isomorphism/tests/test_isomorphvf2.py
@@ -59,7 +59,7 @@ class TestWikipediaExample:
 
         mapping = sorted(gm.mapping.items())
 
-    # this mapping is only one of the possibilies
+    # this mapping is only one of the possibilities
     # so this test needs to be reconsidered
     #        isomap = [('a', 1), ('b', 6), ('c', 3), ('d', 8),
     #                  ('g', 2), ('h', 5), ('i', 4), ('j', 7)]
@@ -143,7 +143,7 @@ class TestAtlas:
     @classmethod
     def setup_class(cls):
         global atlas
-        import networkx.generators.atlas as atlas
+        from networkx.generators import atlas
 
         cls.GAG = atlas.graph_atlas_g()
 
@@ -401,3 +401,9 @@ def test_monomorphism_edge_match():
 
     gm = iso.DiGraphMatcher(G, SG, edge_match=iso.categorical_edge_match("label", None))
     assert gm.subgraph_is_monomorphic()
+
+
+def test_isomorphvf2pp_multidigraphs():
+    g = nx.MultiDiGraph({0: [1, 1, 2, 2, 3], 1: [2, 3, 3], 2: [3]})
+    h = nx.MultiDiGraph({0: [1, 1, 2, 2, 3], 1: [2, 3, 3], 3: [2]})
+    assert not (nx.vf2pp_is_isomorphic(g, h))
diff --git a/networkx/algorithms/isomorphism/tests/test_match_helpers.py b/networkx/algorithms/isomorphism/tests/test_match_helpers.py
index 9d7c294..4d70347 100644
--- a/networkx/algorithms/isomorphism/tests/test_match_helpers.py
+++ b/networkx/algorithms/isomorphism/tests/test_match_helpers.py
@@ -6,8 +6,8 @@ from networkx.algorithms import isomorphism as iso
 
 def test_categorical_node_match():
     nm = iso.categorical_node_match(["x", "y", "z"], [None] * 3)
-    assert nm(dict(x=1, y=2, z=3), dict(x=1, y=2, z=3))
-    assert not nm(dict(x=1, y=2, z=2), dict(x=1, y=2, z=1))
+    assert nm({"x": 1, "y": 2, "z": 3}, {"x": 1, "y": 2, "z": 3})
+    assert not nm({"x": 1, "y": 2, "z": 2}, {"x": 1, "y": 2, "z": 1})
 
 
 class TestGenericMultiEdgeMatch:
diff --git a/networkx/algorithms/isomorphism/tests/test_tree_isomorphism.py b/networkx/algorithms/isomorphism/tests/test_tree_isomorphism.py
index 1628bea..2f0aba1 100644
--- a/networkx/algorithms/isomorphism/tests/test_tree_isomorphism.py
+++ b/networkx/algorithms/isomorphism/tests/test_tree_isomorphism.py
@@ -15,7 +15,6 @@ from networkx.classes.function import is_directed
 # and confirm it is identical to t1
 # randomize the order of the edges when constructing
 def check_isomorphism(t1, t2, isomorphism):
-
     # get the name of t1, given the name in t2
     mapping = {v2: v1 for (v1, v2) in isomorphism}
 
@@ -25,7 +24,7 @@ def check_isomorphism(t1, t2, isomorphism):
     assert d1 == d2
 
     edges_1 = []
-    for (u, v) in t1.edges():
+    for u, v in t1.edges():
         if d1:
             edges_1.append((u, v))
         else:
@@ -37,7 +36,7 @@ def check_isomorphism(t1, t2, isomorphism):
                 edges_1.append((v, u))
 
     edges_2 = []
-    for (u, v) in t2.edges():
+    for u, v in t2.edges():
         # translate to names for t1
         u = mapping[u]
         v = mapping[v]
@@ -53,7 +52,6 @@ def check_isomorphism(t1, t2, isomorphism):
 
 
 def test_hardcoded():
-
     print("hardcoded test")
 
     # define a test problem
@@ -134,7 +132,7 @@ def test_hardcoded():
     isomorphism = sorted(rooted_tree_isomorphism(t1, root1, t2, root2))
 
     # is correct by hand
-    assert (isomorphism == isomorphism1) or (isomorphism == isomorphism2)
+    assert isomorphism in (isomorphism1, isomorphism2)
 
     # check algorithmically
     assert check_isomorphism(t1, t2, isomorphism)
@@ -151,7 +149,7 @@ def test_hardcoded():
     isomorphism = sorted(rooted_tree_isomorphism(t1, root1, t2, root2))
 
     # is correct by hand
-    assert (isomorphism == isomorphism1) or (isomorphism == isomorphism2)
+    assert isomorphism in (isomorphism1, isomorphism2)
 
     # check algorithmically
     assert check_isomorphism(t1, t2, isomorphism)
@@ -170,10 +168,9 @@ def random_swap(t):
 # that is isomorphic to t1, with a known isomorphism
 # and test that our algorithm found the right one
 def positive_single_tree(t1):
-
     assert nx.is_tree(t1)
 
-    nodes1 = [n for n in t1.nodes()]
+    nodes1 = list(t1.nodes())
     # get a random permutation of this
     nodes2 = nodes1.copy()
     random.shuffle(nodes2)
@@ -183,7 +180,7 @@ def positive_single_tree(t1):
     someisomorphism = [(u, v) for (u, v) in zip(nodes1, nodes2)]
 
     # map from old to new
-    map1to2 = {u: v for (u, v) in someisomorphism}
+    map1to2 = dict(someisomorphism)
 
     # get the edges with the transformed names
     edges2 = [random_swap((map1to2[u], map1to2[v])) for (u, v) in t1.edges()]
@@ -210,7 +207,6 @@ def positive_single_tree(t1):
 # larger values run slow down significantly
 # as the number of trees grows rapidly
 def test_positive(maxk=14):
-
     print("positive test")
 
     for k in range(2, maxk + 1):
@@ -225,7 +221,6 @@ def test_positive(maxk=14):
 # test the trivial case of a single node in each tree
 # note that nonisomorphic_trees doesn't work for k = 1
 def test_trivial():
-
     print("trivial test")
 
     # back to an undirected graph
@@ -247,7 +242,6 @@ def test_trivial():
 # test another trivial case where the two graphs have
 # different numbers of nodes
 def test_trivial_2():
-
     print("trivial test 2")
 
     edges_1 = [("a", "b"), ("a", "c")]
@@ -275,7 +269,6 @@ def test_trivial_2():
 # larger values run slow down significantly
 # as the number of trees grows rapidly
 def test_negative(maxk=11):
-
     print("negative test")
 
     for k in range(4, maxk + 1):
diff --git a/networkx/algorithms/isomorphism/tests/test_vf2pp.py b/networkx/algorithms/isomorphism/tests/test_vf2pp.py
new file mode 100644
index 0000000..f728dfc
--- /dev/null
+++ b/networkx/algorithms/isomorphism/tests/test_vf2pp.py
@@ -0,0 +1,1608 @@
+import itertools as it
+
+import pytest
+
+import networkx as nx
+from networkx import vf2pp_is_isomorphic, vf2pp_isomorphism
+
+labels_same = ["blue"]
+
+labels_many = [
+    "white",
+    "red",
+    "blue",
+    "green",
+    "orange",
+    "black",
+    "purple",
+    "yellow",
+    "brown",
+    "cyan",
+    "solarized",
+    "pink",
+    "none",
+]
+
+
+class TestPreCheck:
+    def test_first_graph_empty(self):
+        G1 = nx.Graph()
+        G2 = nx.Graph([(0, 1), (1, 2)])
+        assert not vf2pp_is_isomorphic(G1, G2)
+
+    def test_second_graph_empty(self):
+        G1 = nx.Graph([(0, 1), (1, 2)])
+        G2 = nx.Graph()
+        assert not vf2pp_is_isomorphic(G1, G2)
+
+    def test_different_order1(self):
+        G1 = nx.path_graph(5)
+        G2 = nx.path_graph(6)
+        assert not vf2pp_is_isomorphic(G1, G2)
+
+    def test_different_order2(self):
+        G1 = nx.barbell_graph(100, 20)
+        G2 = nx.barbell_graph(101, 20)
+        assert not vf2pp_is_isomorphic(G1, G2)
+
+    def test_different_order3(self):
+        G1 = nx.complete_graph(7)
+        G2 = nx.complete_graph(8)
+        assert not vf2pp_is_isomorphic(G1, G2)
+
+    def test_different_degree_sequences1(self):
+        G1 = nx.Graph([(0, 1), (0, 2), (1, 2), (1, 3), (0, 4)])
+        G2 = nx.Graph([(0, 1), (0, 2), (1, 2), (1, 3), (0, 4), (2, 5)])
+        assert not vf2pp_is_isomorphic(G1, G2)
+
+        G2.remove_node(3)
+        nx.set_node_attributes(G1, dict(zip(G1, it.cycle(["a"]))), "label")
+        nx.set_node_attributes(G2, dict(zip(G2, it.cycle("a"))), "label")
+
+        assert vf2pp_is_isomorphic(G1, G2)
+
+    def test_different_degree_sequences2(self):
+        G1 = nx.Graph(
+            [
+                (0, 1),
+                (1, 2),
+                (0, 2),
+                (2, 3),
+                (3, 4),
+                (4, 5),
+                (5, 6),
+                (6, 3),
+                (4, 7),
+                (7, 8),
+                (8, 3),
+            ]
+        )
+        G2 = G1.copy()
+        G2.add_edge(8, 0)
+        assert not vf2pp_is_isomorphic(G1, G2)
+
+        G1.add_edge(6, 1)
+        nx.set_node_attributes(G1, dict(zip(G1, it.cycle(["a"]))), "label")
+        nx.set_node_attributes(G2, dict(zip(G2, it.cycle("a"))), "label")
+
+        assert vf2pp_is_isomorphic(G1, G2)
+
+    def test_different_degree_sequences3(self):
+        G1 = nx.Graph([(0, 1), (0, 2), (1, 2), (2, 3), (2, 4), (3, 4), (2, 5), (2, 6)])
+        G2 = nx.Graph(
+            [(0, 1), (0, 6), (0, 2), (1, 2), (2, 3), (2, 4), (3, 4), (2, 5), (2, 6)]
+        )
+        assert not vf2pp_is_isomorphic(G1, G2)
+
+        G1.add_edge(3, 5)
+        nx.set_node_attributes(G1, dict(zip(G1, it.cycle(["a"]))), "label")
+        nx.set_node_attributes(G2, dict(zip(G2, it.cycle("a"))), "label")
+
+        assert vf2pp_is_isomorphic(G1, G2)
+
+    def test_label_distribution(self):
+        G1 = nx.Graph([(0, 1), (0, 2), (1, 2), (2, 3), (2, 4), (3, 4), (2, 5), (2, 6)])
+        G2 = nx.Graph([(0, 1), (0, 2), (1, 2), (2, 3), (2, 4), (3, 4), (2, 5), (2, 6)])
+
+        colors1 = ["blue", "blue", "blue", "yellow", "black", "purple", "purple"]
+        colors2 = ["blue", "blue", "yellow", "yellow", "black", "purple", "purple"]
+
+        nx.set_node_attributes(G1, dict(zip(G1, it.cycle(colors1[::-1]))), "label")
+        nx.set_node_attributes(G2, dict(zip(G2, it.cycle(colors2[::-1]))), "label")
+
+        assert not vf2pp_is_isomorphic(G1, G2, node_label="label")
+        G2.nodes[3]["label"] = "blue"
+        assert vf2pp_is_isomorphic(G1, G2, node_label="label")
+
+
+class TestAllGraphTypesEdgeCases:
+    @pytest.mark.parametrize("graph_type", (nx.Graph, nx.MultiGraph, nx.DiGraph))
+    def test_both_graphs_empty(self, graph_type):
+        G = graph_type()
+        H = graph_type()
+        assert vf2pp_isomorphism(G, H) is None
+
+        G.add_node(0)
+
+        assert vf2pp_isomorphism(G, H) is None
+        assert vf2pp_isomorphism(H, G) is None
+
+        H.add_node(0)
+        assert vf2pp_isomorphism(G, H) == {0: 0}
+
+    @pytest.mark.parametrize("graph_type", (nx.Graph, nx.MultiGraph, nx.DiGraph))
+    def test_first_graph_empty(self, graph_type):
+        G = graph_type()
+        H = graph_type([(0, 1)])
+        assert vf2pp_isomorphism(G, H) is None
+
+    @pytest.mark.parametrize("graph_type", (nx.Graph, nx.MultiGraph, nx.DiGraph))
+    def test_second_graph_empty(self, graph_type):
+        G = graph_type([(0, 1)])
+        H = graph_type()
+        assert vf2pp_isomorphism(G, H) is None
+
+
+class TestGraphISOVF2pp:
+    def test_custom_graph1_same_labels(self):
+        G1 = nx.Graph()
+
+        mapped = {1: "A", 2: "B", 3: "C", 4: "D", 5: "Z", 6: "E"}
+        edges1 = [(1, 2), (1, 3), (1, 4), (2, 3), (2, 6), (3, 4), (5, 1), (5, 2)]
+
+        G1.add_edges_from(edges1)
+        G2 = nx.relabel_nodes(G1, mapped)
+        nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_same))), "label")
+        nx.set_node_attributes(G2, dict(zip(G2, it.cycle(labels_same))), "label")
+        assert vf2pp_isomorphism(G1, G2, node_label="label")
+
+        # Add edge making G1 symmetrical
+        G1.add_edge(3, 7)
+        G1.nodes[7]["label"] = "blue"
+        assert vf2pp_isomorphism(G1, G2, node_label="label") is None
+
+        # Make G2 isomorphic to G1
+        G2.add_edges_from([(mapped[3], "X"), (mapped[6], mapped[5])])
+        G1.add_edge(4, 7)
+        G2.nodes["X"]["label"] = "blue"
+        assert vf2pp_isomorphism(G1, G2, node_label="label")
+
+        # Re-structure maintaining isomorphism
+        G1.remove_edges_from([(1, 4), (1, 3)])
+        G2.remove_edges_from([(mapped[1], mapped[5]), (mapped[1], mapped[2])])
+        assert vf2pp_isomorphism(G1, G2, node_label="label")
+
+    def test_custom_graph1_different_labels(self):
+        G1 = nx.Graph()
+
+        mapped = {1: "A", 2: "B", 3: "C", 4: "D", 5: "Z", 6: "E"}
+        edges1 = [(1, 2), (1, 3), (1, 4), (2, 3), (2, 6), (3, 4), (5, 1), (5, 2)]
+
+        G1.add_edges_from(edges1)
+        G2 = nx.relabel_nodes(G1, mapped)
+        nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_many))), "label")
+        nx.set_node_attributes(
+            G2,
+            dict(zip([mapped[n] for n in G1], it.cycle(labels_many))),
+            "label",
+        )
+        assert vf2pp_isomorphism(G1, G2, node_label="label") == mapped
+
+    def test_custom_graph2_same_labels(self):
+        G1 = nx.Graph()
+
+        mapped = {1: "A", 2: "C", 3: "D", 4: "E", 5: "G", 7: "B", 6: "F"}
+        edges1 = [(1, 2), (1, 5), (5, 6), (2, 3), (2, 4), (3, 4), (4, 5), (2, 7)]
+
+        G1.add_edges_from(edges1)
+        G2 = nx.relabel_nodes(G1, mapped)
+        nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_same))), "label")
+        nx.set_node_attributes(G2, dict(zip(G2, it.cycle(labels_same))), "label")
+
+        assert vf2pp_isomorphism(G1, G2, node_label="label")
+
+        # Obtain two isomorphic subgraphs from the graph
+        G2.remove_edge(mapped[1], mapped[2])
+        G2.add_edge(mapped[1], mapped[4])
+        H1 = nx.Graph(G1.subgraph([2, 3, 4, 7]))
+        H2 = nx.Graph(G2.subgraph([mapped[1], mapped[4], mapped[5], mapped[6]]))
+        assert vf2pp_isomorphism(H1, H2, node_label="label")
+
+        # Add edges maintaining isomorphism
+        H1.add_edges_from([(3, 7), (4, 7)])
+        H2.add_edges_from([(mapped[1], mapped[6]), (mapped[4], mapped[6])])
+        assert vf2pp_isomorphism(H1, H2, node_label="label")
+
+    def test_custom_graph2_different_labels(self):
+        G1 = nx.Graph()
+
+        mapped = {1: "A", 2: "C", 3: "D", 4: "E", 5: "G", 7: "B", 6: "F"}
+        edges1 = [(1, 2), (1, 5), (5, 6), (2, 3), (2, 4), (3, 4), (4, 5), (2, 7)]
+
+        G1.add_edges_from(edges1)
+        G2 = nx.relabel_nodes(G1, mapped)
+        nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_many))), "label")
+        nx.set_node_attributes(
+            G2,
+            dict(zip([mapped[n] for n in G1], it.cycle(labels_many))),
+            "label",
+        )
+
+        # Adding new nodes
+        G1.add_node(0)
+        G2.add_node("Z")
+        G1.nodes[0]["label"] = G1.nodes[1]["label"]
+        G2.nodes["Z"]["label"] = G1.nodes[1]["label"]
+        mapped.update({0: "Z"})
+
+        assert vf2pp_isomorphism(G1, G2, node_label="label") == mapped
+
+        # Change the color of one of the nodes
+        G2.nodes["Z"]["label"] = G1.nodes[2]["label"]
+        assert vf2pp_isomorphism(G1, G2, node_label="label") is None
+
+        # Add an extra edge
+        G1.nodes[0]["label"] = "blue"
+        G2.nodes["Z"]["label"] = "blue"
+        G1.add_edge(0, 1)
+
+        assert vf2pp_isomorphism(G1, G2, node_label="label") is None
+
+        # Add extra edge to both
+        G2.add_edge("Z", "A")
+        assert vf2pp_isomorphism(G1, G2, node_label="label") == mapped
+
+    def test_custom_graph3_same_labels(self):
+        G1 = nx.Graph()
+
+        mapped = {1: 9, 2: 8, 3: 7, 4: 6, 5: 3, 8: 5, 9: 4, 7: 1, 6: 2}
+        edges1 = [
+            (1, 2),
+            (1, 3),
+            (2, 3),
+            (3, 4),
+            (4, 5),
+            (4, 7),
+            (4, 9),
+            (5, 8),
+            (8, 9),
+            (5, 6),
+            (6, 7),
+            (5, 2),
+        ]
+        G1.add_edges_from(edges1)
+        G2 = nx.relabel_nodes(G1, mapped)
+        nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_same))), "label")
+        nx.set_node_attributes(G2, dict(zip(G2, it.cycle(labels_same))), "label")
+        assert vf2pp_isomorphism(G1, G2, node_label="label")
+
+        # Connect nodes maintaining symmetry
+        G1.add_edges_from([(6, 9), (7, 8)])
+        G2.add_edges_from([(mapped[6], mapped[8]), (mapped[7], mapped[9])])
+        assert vf2pp_isomorphism(G1, G2, node_label="label") is None
+
+        # Make isomorphic
+        G1.add_edges_from([(6, 8), (7, 9)])
+        G2.add_edges_from([(mapped[6], mapped[9]), (mapped[7], mapped[8])])
+        assert vf2pp_isomorphism(G1, G2, node_label="label")
+
+        # Connect more nodes
+        G1.add_edges_from([(2, 7), (3, 6)])
+        G2.add_edges_from([(mapped[2], mapped[7]), (mapped[3], mapped[6])])
+        G1.add_node(10)
+        G2.add_node("Z")
+        G1.nodes[10]["label"] = "blue"
+        G2.nodes["Z"]["label"] = "blue"
+
+        assert vf2pp_isomorphism(G1, G2, node_label="label")
+
+        # Connect the newly added node, to opposite sides of the graph
+        G1.add_edges_from([(10, 1), (10, 5), (10, 8)])
+        G2.add_edges_from([("Z", mapped[1]), ("Z", mapped[4]), ("Z", mapped[9])])
+        assert vf2pp_isomorphism(G1, G2, node_label="label")
+
+        # Get two subgraphs that are not isomorphic but are easy to make
+        H1 = nx.Graph(G1.subgraph([2, 3, 4, 5, 6, 7, 10]))
+        H2 = nx.Graph(
+            G2.subgraph(
+                [mapped[4], mapped[5], mapped[6], mapped[7], mapped[8], mapped[9], "Z"]
+            )
+        )
+        assert vf2pp_isomorphism(H1, H2, node_label="label") is None
+
+        # Restructure both to make them isomorphic
+        H1.add_edges_from([(10, 2), (10, 6), (3, 6), (2, 7), (2, 6), (3, 7)])
+        H2.add_edges_from(
+            [("Z", mapped[7]), (mapped[6], mapped[9]), (mapped[7], mapped[8])]
+        )
+        assert vf2pp_isomorphism(H1, H2, node_label="label")
+
+        # Add edges with opposite direction in each Graph
+        H1.add_edge(3, 5)
+        H2.add_edge(mapped[5], mapped[7])
+        assert vf2pp_isomorphism(H1, H2, node_label="label") is None
+
+    def test_custom_graph3_different_labels(self):
+        G1 = nx.Graph()
+
+        mapped = {1: 9, 2: 8, 3: 7, 4: 6, 5: 3, 8: 5, 9: 4, 7: 1, 6: 2}
+        edges1 = [
+            (1, 2),
+            (1, 3),
+            (2, 3),
+            (3, 4),
+            (4, 5),
+            (4, 7),
+            (4, 9),
+            (5, 8),
+            (8, 9),
+            (5, 6),
+            (6, 7),
+            (5, 2),
+        ]
+        G1.add_edges_from(edges1)
+        G2 = nx.relabel_nodes(G1, mapped)
+        nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_many))), "label")
+        nx.set_node_attributes(
+            G2,
+            dict(zip([mapped[n] for n in G1], it.cycle(labels_many))),
+            "label",
+        )
+        assert vf2pp_isomorphism(G1, G2, node_label="label") == mapped
+
+        # Add extra edge to G1
+        G1.add_edge(1, 7)
+        assert vf2pp_isomorphism(G1, G2, node_label="label") is None
+
+        # Compensate in G2
+        G2.add_edge(9, 1)
+        assert vf2pp_isomorphism(G1, G2, node_label="label") == mapped
+
+        # Add extra node
+        G1.add_node("A")
+        G2.add_node("K")
+        G1.nodes["A"]["label"] = "green"
+        G2.nodes["K"]["label"] = "green"
+        mapped.update({"A": "K"})
+
+        assert vf2pp_isomorphism(G1, G2, node_label="label") == mapped
+
+        # Connect A to one side of G1 and K to the opposite
+        G1.add_edge("A", 6)
+        G2.add_edge("K", 5)
+        assert vf2pp_isomorphism(G1, G2, node_label="label") is None
+
+        # Make the graphs symmetrical
+        G1.add_edge(1, 5)
+        G1.add_edge(2, 9)
+        G2.add_edge(9, 3)
+        G2.add_edge(8, 4)
+        assert vf2pp_isomorphism(G1, G2, node_label="label") is None
+
+        # Assign same colors so the two opposite sides are identical
+        for node in G1.nodes():
+            color = "red"
+            G1.nodes[node]["label"] = color
+            G2.nodes[mapped[node]]["label"] = color
+
+        assert vf2pp_isomorphism(G1, G2, node_label="label")
+
+    def test_custom_graph4_different_labels(self):
+        G1 = nx.Graph()
+        edges1 = [
+            (1, 2),
+            (2, 3),
+            (3, 8),
+            (3, 4),
+            (4, 5),
+            (4, 6),
+            (3, 6),
+            (8, 7),
+            (8, 9),
+            (5, 9),
+            (10, 11),
+            (11, 12),
+            (12, 13),
+            (11, 13),
+        ]
+
+        mapped = {
+            1: "n",
+            2: "m",
+            3: "l",
+            4: "j",
+            5: "k",
+            6: "i",
+            7: "g",
+            8: "h",
+            9: "f",
+            10: "b",
+            11: "a",
+            12: "d",
+            13: "e",
+        }
+
+        G1.add_edges_from(edges1)
+        G2 = nx.relabel_nodes(G1, mapped)
+        nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_many))), "label")
+        nx.set_node_attributes(
+            G2,
+            dict(zip([mapped[n] for n in G1], it.cycle(labels_many))),
+            "label",
+        )
+        assert vf2pp_isomorphism(G1, G2, node_label="label") == mapped
+
+    def test_custom_graph4_same_labels(self):
+        G1 = nx.Graph()
+        edges1 = [
+            (1, 2),
+            (2, 3),
+            (3, 8),
+            (3, 4),
+            (4, 5),
+            (4, 6),
+            (3, 6),
+            (8, 7),
+            (8, 9),
+            (5, 9),
+            (10, 11),
+            (11, 12),
+            (12, 13),
+            (11, 13),
+        ]
+
+        mapped = {
+            1: "n",
+            2: "m",
+            3: "l",
+            4: "j",
+            5: "k",
+            6: "i",
+            7: "g",
+            8: "h",
+            9: "f",
+            10: "b",
+            11: "a",
+            12: "d",
+            13: "e",
+        }
+
+        G1.add_edges_from(edges1)
+        G2 = nx.relabel_nodes(G1, mapped)
+        nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_same))), "label")
+        nx.set_node_attributes(G2, dict(zip(G2, it.cycle(labels_same))), "label")
+        assert vf2pp_isomorphism(G1, G2, node_label="label")
+
+        # Add nodes of different label
+        G1.add_node(0)
+        G2.add_node("z")
+        G1.nodes[0]["label"] = "green"
+        G2.nodes["z"]["label"] = "blue"
+
+        assert vf2pp_isomorphism(G1, G2, node_label="label") is None
+
+        # Make the labels identical
+        G2.nodes["z"]["label"] = "green"
+        assert vf2pp_isomorphism(G1, G2, node_label="label")
+
+        # Change the structure of the graphs, keeping them isomorphic
+        G1.add_edge(2, 5)
+        G2.remove_edge("i", "l")
+        G2.add_edge("g", "l")
+        G2.add_edge("m", "f")
+        assert vf2pp_isomorphism(G1, G2, node_label="label")
+
+        # Change the structure of the disconnected sub-graph, keeping it isomorphic
+        G1.remove_node(13)
+        G2.remove_node("d")
+        assert vf2pp_isomorphism(G1, G2, node_label="label")
+
+        # Connect the newly added node to the disconnected graph, which now is just a path of size 3
+        G1.add_edge(0, 10)
+        G2.add_edge("e", "z")
+        assert vf2pp_isomorphism(G1, G2, node_label="label")
+
+        # Connect the two disconnected sub-graphs, forming a single graph
+        G1.add_edge(11, 3)
+        G1.add_edge(0, 8)
+        G2.add_edge("a", "l")
+        G2.add_edge("z", "j")
+        assert vf2pp_isomorphism(G1, G2, node_label="label")
+
+    def test_custom_graph5_same_labels(self):
+        G1 = nx.Graph()
+        edges1 = [
+            (1, 5),
+            (1, 2),
+            (1, 4),
+            (2, 3),
+            (2, 6),
+            (3, 4),
+            (3, 7),
+            (4, 8),
+            (5, 8),
+            (5, 6),
+            (6, 7),
+            (7, 8),
+        ]
+        mapped = {1: "a", 2: "h", 3: "d", 4: "i", 5: "g", 6: "b", 7: "j", 8: "c"}
+
+        G1.add_edges_from(edges1)
+        G2 = nx.relabel_nodes(G1, mapped)
+        nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_same))), "label")
+        nx.set_node_attributes(G2, dict(zip(G2, it.cycle(labels_same))), "label")
+        assert vf2pp_isomorphism(G1, G2, node_label="label")
+
+        # Add different edges in each graph, maintaining symmetry
+        G1.add_edges_from([(3, 6), (2, 7), (2, 5), (1, 3), (4, 7), (6, 8)])
+        G2.add_edges_from(
+            [
+                (mapped[6], mapped[3]),
+                (mapped[2], mapped[7]),
+                (mapped[1], mapped[6]),
+                (mapped[5], mapped[7]),
+                (mapped[3], mapped[8]),
+                (mapped[2], mapped[4]),
+            ]
+        )
+        assert vf2pp_isomorphism(G1, G2, node_label="label")
+
+        # Obtain two different but isomorphic subgraphs from G1 and G2
+        H1 = nx.Graph(G1.subgraph([1, 5, 8, 6, 7, 3]))
+        H2 = nx.Graph(
+            G2.subgraph(
+                [mapped[1], mapped[4], mapped[8], mapped[7], mapped[3], mapped[5]]
+            )
+        )
+        assert vf2pp_isomorphism(H1, H2, node_label="label")
+
+        # Delete corresponding node from the two graphs
+        H1.remove_node(8)
+        H2.remove_node(mapped[7])
+        assert vf2pp_isomorphism(H1, H2, node_label="label")
+
+        # Re-orient, maintaining isomorphism
+        H1.add_edge(1, 6)
+        H1.remove_edge(3, 6)
+        assert vf2pp_isomorphism(H1, H2, node_label="label")
+
+    def test_custom_graph5_different_labels(self):
+        G1 = nx.Graph()
+        edges1 = [
+            (1, 5),
+            (1, 2),
+            (1, 4),
+            (2, 3),
+            (2, 6),
+            (3, 4),
+            (3, 7),
+            (4, 8),
+            (5, 8),
+            (5, 6),
+            (6, 7),
+            (7, 8),
+        ]
+        mapped = {1: "a", 2: "h", 3: "d", 4: "i", 5: "g", 6: "b", 7: "j", 8: "c"}
+
+        G1.add_edges_from(edges1)
+        G2 = nx.relabel_nodes(G1, mapped)
+
+        colors = ["red", "blue", "grey", "none", "brown", "solarized", "yellow", "pink"]
+        nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_many))), "label")
+        nx.set_node_attributes(
+            G2,
+            dict(zip([mapped[n] for n in G1], it.cycle(labels_many))),
+            "label",
+        )
+        assert vf2pp_isomorphism(G1, G2, node_label="label") == mapped
+
+        # Assign different colors to matching nodes
+        c = 0
+        for node in G1.nodes():
+            color1 = colors[c]
+            color2 = colors[(c + 3) % len(colors)]
+            G1.nodes[node]["label"] = color1
+            G2.nodes[mapped[node]]["label"] = color2
+            c += 1
+
+        assert vf2pp_isomorphism(G1, G2, node_label="label") is None
+
+        # Get symmetrical sub-graphs of G1,G2 and compare them
+        H1 = G1.subgraph([1, 5])
+        H2 = G2.subgraph(["i", "c"])
+        c = 0
+        for node1, node2 in zip(H1.nodes(), H2.nodes()):
+            H1.nodes[node1]["label"] = "red"
+            H2.nodes[node2]["label"] = "red"
+            c += 1
+
+        assert vf2pp_isomorphism(H1, H2, node_label="label")
+
+    def test_disconnected_graph_all_same_labels(self):
+        G1 = nx.Graph()
+        G1.add_nodes_from(list(range(10)))
+
+        mapped = {0: 9, 1: 8, 2: 7, 3: 6, 4: 5, 5: 4, 6: 3, 7: 2, 8: 1, 9: 0}
+        G2 = nx.relabel_nodes(G1, mapped)
+        nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_same))), "label")
+        nx.set_node_attributes(G2, dict(zip(G2, it.cycle(labels_same))), "label")
+        assert vf2pp_isomorphism(G1, G2, node_label="label")
+
+    def test_disconnected_graph_all_different_labels(self):
+        G1 = nx.Graph()
+        G1.add_nodes_from(list(range(10)))
+
+        mapped = {0: 9, 1: 8, 2: 7, 3: 6, 4: 5, 5: 4, 6: 3, 7: 2, 8: 1, 9: 0}
+        G2 = nx.relabel_nodes(G1, mapped)
+
+        nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_many))), "label")
+        nx.set_node_attributes(
+            G2,
+            dict(zip([mapped[n] for n in G1], it.cycle(labels_many))),
+            "label",
+        )
+        assert vf2pp_isomorphism(G1, G2, node_label="label") == mapped
+
+    def test_disconnected_graph_some_same_labels(self):
+        G1 = nx.Graph()
+        G1.add_nodes_from(list(range(10)))
+
+        mapped = {0: 9, 1: 8, 2: 7, 3: 6, 4: 5, 5: 4, 6: 3, 7: 2, 8: 1, 9: 0}
+        G2 = nx.relabel_nodes(G1, mapped)
+
+        colors = [
+            "white",
+            "white",
+            "white",
+            "purple",
+            "purple",
+            "red",
+            "red",
+            "pink",
+            "pink",
+            "pink",
+        ]
+
+        nx.set_node_attributes(G1, dict(zip(G1, it.cycle(colors))), "label")
+        nx.set_node_attributes(
+            G2, dict(zip([mapped[n] for n in G1], it.cycle(colors))), "label"
+        )
+
+        assert vf2pp_isomorphism(G1, G2, node_label="label")
+
+
+class TestMultiGraphISOVF2pp:
+    def test_custom_multigraph1_same_labels(self):
+        G1 = nx.MultiGraph()
+
+        mapped = {1: "A", 2: "B", 3: "C", 4: "D", 5: "Z", 6: "E"}
+        edges1 = [
+            (1, 2),
+            (1, 3),
+            (1, 4),
+            (1, 4),
+            (1, 4),
+            (2, 3),
+            (2, 6),
+            (2, 6),
+            (3, 4),
+            (3, 4),
+            (5, 1),
+            (5, 1),
+            (5, 2),
+            (5, 2),
+        ]
+
+        G1.add_edges_from(edges1)
+        G2 = nx.relabel_nodes(G1, mapped)
+
+        nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_same))), "label")
+        nx.set_node_attributes(G2, dict(zip(G2, it.cycle(labels_same))), "label")
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert m
+
+        # Transfer the 2-clique to the right side of G1
+        G1.remove_edges_from([(2, 6), (2, 6)])
+        G1.add_edges_from([(3, 6), (3, 6)])
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert not m
+
+        # Delete an edges, making them symmetrical, so the position of the 2-clique doesn't matter
+        G2.remove_edge(mapped[1], mapped[4])
+        G1.remove_edge(1, 4)
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert m
+
+        # Add self-loops
+        G1.add_edges_from([(5, 5), (5, 5), (1, 1)])
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert not m
+
+        # Compensate in G2
+        G2.add_edges_from(
+            [(mapped[1], mapped[1]), (mapped[4], mapped[4]), (mapped[4], mapped[4])]
+        )
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert m
+
+    def test_custom_multigraph1_different_labels(self):
+        G1 = nx.MultiGraph()
+
+        mapped = {1: "A", 2: "B", 3: "C", 4: "D", 5: "Z", 6: "E"}
+        edges1 = [
+            (1, 2),
+            (1, 3),
+            (1, 4),
+            (1, 4),
+            (1, 4),
+            (2, 3),
+            (2, 6),
+            (2, 6),
+            (3, 4),
+            (3, 4),
+            (5, 1),
+            (5, 1),
+            (5, 2),
+            (5, 2),
+        ]
+
+        G1.add_edges_from(edges1)
+        G2 = nx.relabel_nodes(G1, mapped)
+
+        nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_many))), "label")
+        nx.set_node_attributes(
+            G2,
+            dict(zip([mapped[n] for n in G1], it.cycle(labels_many))),
+            "label",
+        )
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert m
+        assert m == mapped
+
+        # Re-structure G1, maintaining the degree sequence
+        G1.remove_edge(1, 4)
+        G1.add_edge(1, 5)
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert not m
+
+        # Restructure G2, making it isomorphic to G1
+        G2.remove_edge("A", "D")
+        G2.add_edge("A", "Z")
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert m
+        assert m == mapped
+
+        # Add edge from node to itself
+        G1.add_edges_from([(6, 6), (6, 6), (6, 6)])
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert not m
+
+        # Same for G2
+        G2.add_edges_from([("E", "E"), ("E", "E"), ("E", "E")])
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert m
+        assert m == mapped
+
+    def test_custom_multigraph2_same_labels(self):
+        G1 = nx.MultiGraph()
+
+        mapped = {1: "A", 2: "C", 3: "D", 4: "E", 5: "G", 7: "B", 6: "F"}
+        edges1 = [
+            (1, 2),
+            (1, 2),
+            (1, 5),
+            (1, 5),
+            (1, 5),
+            (5, 6),
+            (2, 3),
+            (2, 3),
+            (2, 4),
+            (3, 4),
+            (3, 4),
+            (4, 5),
+            (4, 5),
+            (4, 5),
+            (2, 7),
+            (2, 7),
+            (2, 7),
+        ]
+
+        G1.add_edges_from(edges1)
+        G2 = nx.relabel_nodes(G1, mapped)
+
+        nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_same))), "label")
+        nx.set_node_attributes(G2, dict(zip(G2, it.cycle(labels_same))), "label")
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert m
+
+        # Obtain two non-somorphic subgraphs from the graph
+        G2.remove_edges_from([(mapped[1], mapped[2]), (mapped[1], mapped[2])])
+        G2.add_edge(mapped[1], mapped[4])
+        H1 = nx.MultiGraph(G1.subgraph([2, 3, 4, 7]))
+        H2 = nx.MultiGraph(G2.subgraph([mapped[1], mapped[4], mapped[5], mapped[6]]))
+
+        m = vf2pp_isomorphism(H1, H2, node_label="label")
+        assert not m
+
+        # Make them isomorphic
+        H1.remove_edge(3, 4)
+        H1.add_edges_from([(2, 3), (2, 4), (2, 4)])
+        H2.add_edges_from([(mapped[5], mapped[6]), (mapped[5], mapped[6])])
+        m = vf2pp_isomorphism(H1, H2, node_label="label")
+        assert m
+
+        # Remove triangle edge
+        H1.remove_edges_from([(2, 3), (2, 3), (2, 3)])
+        H2.remove_edges_from([(mapped[5], mapped[4])] * 3)
+        m = vf2pp_isomorphism(H1, H2, node_label="label")
+        assert m
+
+        # Change the edge orientation such that H1 is rotated H2
+        H1.remove_edges_from([(2, 7), (2, 7)])
+        H1.add_edges_from([(3, 4), (3, 4)])
+        m = vf2pp_isomorphism(H1, H2, node_label="label")
+        assert m
+
+        # Add extra edges maintaining degree sequence, but in a non-symmetrical manner
+        H2.add_edge(mapped[5], mapped[1])
+        H1.add_edge(3, 4)
+        m = vf2pp_isomorphism(H1, H2, node_label="label")
+        assert not m
+
+    def test_custom_multigraph2_different_labels(self):
+        G1 = nx.MultiGraph()
+
+        mapped = {1: "A", 2: "C", 3: "D", 4: "E", 5: "G", 7: "B", 6: "F"}
+        edges1 = [
+            (1, 2),
+            (1, 2),
+            (1, 5),
+            (1, 5),
+            (1, 5),
+            (5, 6),
+            (2, 3),
+            (2, 3),
+            (2, 4),
+            (3, 4),
+            (3, 4),
+            (4, 5),
+            (4, 5),
+            (4, 5),
+            (2, 7),
+            (2, 7),
+            (2, 7),
+        ]
+
+        G1.add_edges_from(edges1)
+        G2 = nx.relabel_nodes(G1, mapped)
+
+        nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_many))), "label")
+        nx.set_node_attributes(
+            G2,
+            dict(zip([mapped[n] for n in G1], it.cycle(labels_many))),
+            "label",
+        )
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert m
+        assert m == mapped
+
+        # Re-structure G1
+        G1.remove_edge(2, 7)
+        G1.add_edge(5, 6)
+
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert not m
+
+        # Same for G2
+        G2.remove_edge("B", "C")
+        G2.add_edge("G", "F")
+
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert m
+        assert m == mapped
+
+        # Delete node from G1 and G2, keeping them isomorphic
+        G1.remove_node(3)
+        G2.remove_node("D")
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert m
+
+        # Change G1 edges
+        G1.remove_edge(1, 2)
+        G1.remove_edge(2, 7)
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert not m
+
+        # Make G2 identical to G1, but with different edge orientation and different labels
+        G2.add_edges_from([("A", "C"), ("C", "E"), ("C", "E")])
+        G2.remove_edges_from(
+            [("A", "G"), ("A", "G"), ("F", "G"), ("E", "G"), ("E", "G")]
+        )
+
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert not m
+
+        # Make all labels the same, so G1 and G2 are also isomorphic
+        for n1, n2 in zip(G1.nodes(), G2.nodes()):
+            G1.nodes[n1]["label"] = "blue"
+            G2.nodes[n2]["label"] = "blue"
+
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert m
+
+    def test_custom_multigraph3_same_labels(self):
+        G1 = nx.MultiGraph()
+
+        mapped = {1: 9, 2: 8, 3: 7, 4: 6, 5: 3, 8: 5, 9: 4, 7: 1, 6: 2}
+        edges1 = [
+            (1, 2),
+            (1, 3),
+            (1, 3),
+            (2, 3),
+            (2, 3),
+            (3, 4),
+            (4, 5),
+            (4, 7),
+            (4, 9),
+            (4, 9),
+            (4, 9),
+            (5, 8),
+            (5, 8),
+            (8, 9),
+            (8, 9),
+            (5, 6),
+            (6, 7),
+            (6, 7),
+            (6, 7),
+            (5, 2),
+        ]
+        G1.add_edges_from(edges1)
+        G2 = nx.relabel_nodes(G1, mapped)
+
+        nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_same))), "label")
+        nx.set_node_attributes(G2, dict(zip(G2, it.cycle(labels_same))), "label")
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert m
+
+        # Connect nodes maintaining symmetry
+        G1.add_edges_from([(6, 9), (7, 8), (5, 8), (4, 9), (4, 9)])
+        G2.add_edges_from(
+            [
+                (mapped[6], mapped[8]),
+                (mapped[7], mapped[9]),
+                (mapped[5], mapped[8]),
+                (mapped[4], mapped[9]),
+                (mapped[4], mapped[9]),
+            ]
+        )
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert not m
+
+        # Make isomorphic
+        G1.add_edges_from([(6, 8), (6, 8), (7, 9), (7, 9), (7, 9)])
+        G2.add_edges_from(
+            [
+                (mapped[6], mapped[8]),
+                (mapped[6], mapped[9]),
+                (mapped[7], mapped[8]),
+                (mapped[7], mapped[9]),
+                (mapped[7], mapped[9]),
+            ]
+        )
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert m
+
+        # Connect more nodes
+        G1.add_edges_from([(2, 7), (2, 7), (3, 6), (3, 6)])
+        G2.add_edges_from(
+            [
+                (mapped[2], mapped[7]),
+                (mapped[2], mapped[7]),
+                (mapped[3], mapped[6]),
+                (mapped[3], mapped[6]),
+            ]
+        )
+        G1.add_node(10)
+        G2.add_node("Z")
+        G1.nodes[10]["label"] = "blue"
+        G2.nodes["Z"]["label"] = "blue"
+
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert m
+
+        # Connect the newly added node, to opposite sides of the graph
+        G1.add_edges_from([(10, 1), (10, 5), (10, 8), (10, 10), (10, 10)])
+        G2.add_edges_from(
+            [
+                ("Z", mapped[1]),
+                ("Z", mapped[4]),
+                ("Z", mapped[9]),
+                ("Z", "Z"),
+                ("Z", "Z"),
+            ]
+        )
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert not m
+
+        # We connected the new node to opposite sides, so G1 must be symmetrical to G2. Re-structure them to be so
+        G1.remove_edges_from([(1, 3), (4, 9), (4, 9), (7, 9)])
+        G2.remove_edges_from(
+            [
+                (mapped[1], mapped[3]),
+                (mapped[4], mapped[9]),
+                (mapped[4], mapped[9]),
+                (mapped[7], mapped[9]),
+            ]
+        )
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert m
+
+        # Get two subgraphs that are not isomorphic but are easy to make
+        H1 = nx.Graph(G1.subgraph([2, 3, 4, 5, 6, 7, 10]))
+        H2 = nx.Graph(
+            G2.subgraph(
+                [mapped[4], mapped[5], mapped[6], mapped[7], mapped[8], mapped[9], "Z"]
+            )
+        )
+
+        m = vf2pp_isomorphism(H1, H2, node_label="label")
+        assert not m
+
+        # Restructure both to make them isomorphic
+        H1.add_edges_from([(10, 2), (10, 6), (3, 6), (2, 7), (2, 6), (3, 7)])
+        H2.add_edges_from(
+            [("Z", mapped[7]), (mapped[6], mapped[9]), (mapped[7], mapped[8])]
+        )
+        m = vf2pp_isomorphism(H1, H2, node_label="label")
+        assert m
+
+        # Remove one self-loop in H2
+        H2.remove_edge("Z", "Z")
+        m = vf2pp_isomorphism(H1, H2, node_label="label")
+        assert not m
+
+        # Compensate in H1
+        H1.remove_edge(10, 10)
+        m = vf2pp_isomorphism(H1, H2, node_label="label")
+        assert m
+
+    def test_custom_multigraph3_different_labels(self):
+        G1 = nx.MultiGraph()
+
+        mapped = {1: 9, 2: 8, 3: 7, 4: 6, 5: 3, 8: 5, 9: 4, 7: 1, 6: 2}
+        edges1 = [
+            (1, 2),
+            (1, 3),
+            (1, 3),
+            (2, 3),
+            (2, 3),
+            (3, 4),
+            (4, 5),
+            (4, 7),
+            (4, 9),
+            (4, 9),
+            (4, 9),
+            (5, 8),
+            (5, 8),
+            (8, 9),
+            (8, 9),
+            (5, 6),
+            (6, 7),
+            (6, 7),
+            (6, 7),
+            (5, 2),
+        ]
+
+        G1.add_edges_from(edges1)
+        G2 = nx.relabel_nodes(G1, mapped)
+
+        nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_many))), "label")
+        nx.set_node_attributes(
+            G2,
+            dict(zip([mapped[n] for n in G1], it.cycle(labels_many))),
+            "label",
+        )
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert m
+        assert m == mapped
+
+        # Delete edge maintaining isomorphism
+        G1.remove_edge(4, 9)
+        G2.remove_edge(4, 6)
+
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert m
+        assert m == mapped
+
+        # Change edge orientation such that G1 mirrors G2
+        G1.add_edges_from([(4, 9), (1, 2), (1, 2)])
+        G1.remove_edges_from([(1, 3), (1, 3)])
+        G2.add_edges_from([(3, 5), (7, 9)])
+        G2.remove_edge(8, 9)
+
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert not m
+
+        # Make all labels the same, so G1 and G2 are also isomorphic
+        for n1, n2 in zip(G1.nodes(), G2.nodes()):
+            G1.nodes[n1]["label"] = "blue"
+            G2.nodes[n2]["label"] = "blue"
+
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert m
+
+        G1.add_node(10)
+        G2.add_node("Z")
+        G1.nodes[10]["label"] = "green"
+        G2.nodes["Z"]["label"] = "green"
+
+        # Add different number of edges between the new nodes and themselves
+        G1.add_edges_from([(10, 10), (10, 10)])
+        G2.add_edges_from([("Z", "Z")])
+
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert not m
+
+        # Make the number of self-edges equal
+        G1.remove_edge(10, 10)
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert m
+
+        # Connect the new node to the graph
+        G1.add_edges_from([(10, 3), (10, 4)])
+        G2.add_edges_from([("Z", 8), ("Z", 3)])
+
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert m
+
+        # Remove central node
+        G1.remove_node(4)
+        G2.remove_node(3)
+        G1.add_edges_from([(5, 6), (5, 6), (5, 7)])
+        G2.add_edges_from([(1, 6), (1, 6), (6, 2)])
+
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert m
+
+    def test_custom_multigraph4_same_labels(self):
+        G1 = nx.MultiGraph()
+        edges1 = [
+            (1, 2),
+            (1, 2),
+            (2, 2),
+            (2, 3),
+            (3, 8),
+            (3, 8),
+            (3, 4),
+            (4, 5),
+            (4, 5),
+            (4, 5),
+            (4, 6),
+            (3, 6),
+            (3, 6),
+            (6, 6),
+            (8, 7),
+            (7, 7),
+            (8, 9),
+            (9, 9),
+            (8, 9),
+            (8, 9),
+            (5, 9),
+            (10, 11),
+            (11, 12),
+            (12, 13),
+            (11, 13),
+            (10, 10),
+            (10, 11),
+            (11, 13),
+        ]
+
+        mapped = {
+            1: "n",
+            2: "m",
+            3: "l",
+            4: "j",
+            5: "k",
+            6: "i",
+            7: "g",
+            8: "h",
+            9: "f",
+            10: "b",
+            11: "a",
+            12: "d",
+            13: "e",
+        }
+
+        G1.add_edges_from(edges1)
+        G2 = nx.relabel_nodes(G1, mapped)
+
+        nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_same))), "label")
+        nx.set_node_attributes(G2, dict(zip(G2, it.cycle(labels_same))), "label")
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert m
+
+        # Add extra but corresponding edges to both graphs
+        G1.add_edges_from([(2, 2), (2, 3), (2, 8), (3, 4)])
+        G2.add_edges_from([("m", "m"), ("m", "l"), ("m", "h"), ("l", "j")])
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert m
+
+        # Obtain subgraphs
+        H1 = nx.MultiGraph(G1.subgraph([2, 3, 4, 6, 10, 11, 12, 13]))
+        H2 = nx.MultiGraph(
+            G2.subgraph(
+                [
+                    mapped[2],
+                    mapped[3],
+                    mapped[8],
+                    mapped[9],
+                    mapped[10],
+                    mapped[11],
+                    mapped[12],
+                    mapped[13],
+                ]
+            )
+        )
+
+        m = vf2pp_isomorphism(H1, H2, node_label="label")
+        assert not m
+
+        # Make them isomorphic
+        H2.remove_edges_from(
+            [(mapped[3], mapped[2]), (mapped[9], mapped[8]), (mapped[2], mapped[2])]
+        )
+        H2.add_edges_from([(mapped[9], mapped[9]), (mapped[2], mapped[8])])
+        m = vf2pp_isomorphism(H1, H2, node_label="label")
+        assert m
+
+        # Re-structure the disconnected sub-graph
+        H1.remove_node(12)
+        H2.remove_node(mapped[12])
+        H1.add_edge(13, 13)
+        H2.add_edge(mapped[13], mapped[13])
+
+        # Connect the two disconnected components, forming a single graph
+        H1.add_edges_from([(3, 13), (6, 11)])
+        H2.add_edges_from([(mapped[8], mapped[10]), (mapped[2], mapped[11])])
+        m = vf2pp_isomorphism(H1, H2, node_label="label")
+        assert m
+
+        # Change orientation of self-loops in one graph, maintaining the degree sequence
+        H1.remove_edges_from([(2, 2), (3, 6)])
+        H1.add_edges_from([(6, 6), (2, 3)])
+        m = vf2pp_isomorphism(H1, H2, node_label="label")
+        assert not m
+
+    def test_custom_multigraph4_different_labels(self):
+        G1 = nx.MultiGraph()
+        edges1 = [
+            (1, 2),
+            (1, 2),
+            (2, 2),
+            (2, 3),
+            (3, 8),
+            (3, 8),
+            (3, 4),
+            (4, 5),
+            (4, 5),
+            (4, 5),
+            (4, 6),
+            (3, 6),
+            (3, 6),
+            (6, 6),
+            (8, 7),
+            (7, 7),
+            (8, 9),
+            (9, 9),
+            (8, 9),
+            (8, 9),
+            (5, 9),
+            (10, 11),
+            (11, 12),
+            (12, 13),
+            (11, 13),
+        ]
+
+        mapped = {
+            1: "n",
+            2: "m",
+            3: "l",
+            4: "j",
+            5: "k",
+            6: "i",
+            7: "g",
+            8: "h",
+            9: "f",
+            10: "b",
+            11: "a",
+            12: "d",
+            13: "e",
+        }
+
+        G1.add_edges_from(edges1)
+        G2 = nx.relabel_nodes(G1, mapped)
+
+        nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_many))), "label")
+        nx.set_node_attributes(
+            G2,
+            dict(zip([mapped[n] for n in G1], it.cycle(labels_many))),
+            "label",
+        )
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert m == mapped
+
+        # Add extra but corresponding edges to both graphs
+        G1.add_edges_from([(2, 2), (2, 3), (2, 8), (3, 4)])
+        G2.add_edges_from([("m", "m"), ("m", "l"), ("m", "h"), ("l", "j")])
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert m == mapped
+
+        # Obtain isomorphic subgraphs
+        H1 = nx.MultiGraph(G1.subgraph([2, 3, 4, 6]))
+        H2 = nx.MultiGraph(G2.subgraph(["m", "l", "j", "i"]))
+
+        m = vf2pp_isomorphism(H1, H2, node_label="label")
+        assert m
+
+        # Delete the 3-clique, keeping only the path-graph. Also, H1 mirrors H2
+        H1.remove_node(4)
+        H2.remove_node("j")
+        H1.remove_edges_from([(2, 2), (2, 3), (6, 6)])
+        H2.remove_edges_from([("l", "i"), ("m", "m"), ("m", "m")])
+
+        m = vf2pp_isomorphism(H1, H2, node_label="label")
+        assert not m
+
+        # Assign the same labels so that mirroring means isomorphic
+        for n1, n2 in zip(H1.nodes(), H2.nodes()):
+            H1.nodes[n1]["label"] = "red"
+            H2.nodes[n2]["label"] = "red"
+
+        m = vf2pp_isomorphism(H1, H2, node_label="label")
+        assert m
+
+        # Leave only one node with self-loop
+        H1.remove_nodes_from([3, 6])
+        H2.remove_nodes_from(["m", "l"])
+        m = vf2pp_isomorphism(H1, H2, node_label="label")
+        assert m
+
+        # Remove one self-loop from H1
+        H1.remove_edge(2, 2)
+        m = vf2pp_isomorphism(H1, H2, node_label="label")
+        assert not m
+
+        # Same for H2
+        H2.remove_edge("i", "i")
+        m = vf2pp_isomorphism(H1, H2, node_label="label")
+        assert m
+
+        # Compose H1 with the disconnected sub-graph of G1. Same for H2
+        S1 = nx.compose(H1, nx.MultiGraph(G1.subgraph([10, 11, 12, 13])))
+        S2 = nx.compose(H2, nx.MultiGraph(G2.subgraph(["a", "b", "d", "e"])))
+
+        m = vf2pp_isomorphism(H1, H2, node_label="label")
+        assert m
+
+        # Connect the two components
+        S1.add_edges_from([(13, 13), (13, 13), (2, 13)])
+        S2.add_edges_from([("a", "a"), ("a", "a"), ("i", "e")])
+        m = vf2pp_isomorphism(H1, H2, node_label="label")
+        assert m
+
+    def test_custom_multigraph5_same_labels(self):
+        G1 = nx.MultiGraph()
+
+        edges1 = [
+            (1, 5),
+            (1, 2),
+            (1, 4),
+            (2, 3),
+            (2, 6),
+            (3, 4),
+            (3, 7),
+            (4, 8),
+            (5, 8),
+            (5, 6),
+            (6, 7),
+            (7, 8),
+        ]
+        mapped = {1: "a", 2: "h", 3: "d", 4: "i", 5: "g", 6: "b", 7: "j", 8: "c"}
+
+        G1.add_edges_from(edges1)
+        G2 = nx.relabel_nodes(G1, mapped)
+        nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_same))), "label")
+        nx.set_node_attributes(G2, dict(zip(G2, it.cycle(labels_same))), "label")
+
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert m
+
+        # Add multiple edges and self-loops, maintaining isomorphism
+        G1.add_edges_from(
+            [(1, 2), (1, 2), (3, 7), (8, 8), (8, 8), (7, 8), (2, 3), (5, 6)]
+        )
+        G2.add_edges_from(
+            [
+                ("a", "h"),
+                ("a", "h"),
+                ("d", "j"),
+                ("c", "c"),
+                ("c", "c"),
+                ("j", "c"),
+                ("d", "h"),
+                ("g", "b"),
+            ]
+        )
+
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert m
+
+        # Make G2 to be the rotated G1
+        G2.remove_edges_from(
+            [
+                ("a", "h"),
+                ("a", "h"),
+                ("d", "j"),
+                ("c", "c"),
+                ("c", "c"),
+                ("j", "c"),
+                ("d", "h"),
+                ("g", "b"),
+            ]
+        )
+        G2.add_edges_from(
+            [
+                ("d", "i"),
+                ("a", "h"),
+                ("g", "b"),
+                ("g", "b"),
+                ("i", "i"),
+                ("i", "i"),
+                ("b", "j"),
+                ("d", "j"),
+            ]
+        )
+
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert m
+
+    def test_disconnected_multigraph_all_same_labels(self):
+        G1 = nx.MultiGraph()
+        G1.add_nodes_from(list(range(10)))
+        G1.add_edges_from([(i, i) for i in range(10)])
+
+        mapped = {0: 9, 1: 8, 2: 7, 3: 6, 4: 5, 5: 4, 6: 3, 7: 2, 8: 1, 9: 0}
+        G2 = nx.relabel_nodes(G1, mapped)
+
+        nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_same))), "label")
+        nx.set_node_attributes(G2, dict(zip(G2, it.cycle(labels_same))), "label")
+
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert m
+
+        # Add self-loops to non-mapped nodes. Should be the same, as the graph is disconnected.
+        G1.add_edges_from([(i, i) for i in range(5, 8)] * 3)
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert not m
+
+        # Compensate in G2
+        G2.add_edges_from([(i, i) for i in range(0, 3)] * 3)
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert m
+
+        # Add one more self-loop in G2
+        G2.add_edges_from([(0, 0), (1, 1), (1, 1)])
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert not m
+
+        # Compensate in G1
+        G1.add_edges_from([(5, 5), (7, 7), (7, 7)])
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert m
+
+    def test_disconnected_multigraph_all_different_labels(self):
+        G1 = nx.MultiGraph()
+        G1.add_nodes_from(list(range(10)))
+        G1.add_edges_from([(i, i) for i in range(10)])
+
+        mapped = {0: 9, 1: 8, 2: 7, 3: 6, 4: 5, 5: 4, 6: 3, 7: 2, 8: 1, 9: 0}
+        G2 = nx.relabel_nodes(G1, mapped)
+
+        nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_many))), "label")
+        nx.set_node_attributes(
+            G2,
+            dict(zip([mapped[n] for n in G1], it.cycle(labels_many))),
+            "label",
+        )
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert m
+        assert m == mapped
+
+        # Add self-loops to non-mapped nodes. Now it is not the same, as there are different labels
+        G1.add_edges_from([(i, i) for i in range(5, 8)] * 3)
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert not m
+
+        # Add self-loops to non mapped nodes in G2 as well
+        G2.add_edges_from([(mapped[i], mapped[i]) for i in range(0, 3)] * 7)
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert not m
+
+        # Add self-loops to mapped nodes in G2
+        G2.add_edges_from([(mapped[i], mapped[i]) for i in range(5, 8)] * 3)
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert not m
+
+        # Add self-loops to G1 so that they are even in both graphs
+        G1.add_edges_from([(i, i) for i in range(0, 3)] * 7)
+        m = vf2pp_isomorphism(G1, G2, node_label="label")
+        assert m
+
+
+class TestDiGraphISOVF2pp:
+    def test_wikipedia_graph(self):
+        edges1 = [
+            (1, 5),
+            (1, 2),
+            (1, 4),
+            (3, 2),
+            (6, 2),
+            (3, 4),
+            (7, 3),
+            (4, 8),
+            (5, 8),
+            (6, 5),
+            (6, 7),
+            (7, 8),
+        ]
+        mapped = {1: "a", 2: "h", 3: "d", 4: "i", 5: "g", 6: "b", 7: "j", 8: "c"}
+
+        G1 = nx.DiGraph(edges1)
+        G2 = nx.relabel_nodes(G1, mapped)
+
+        assert vf2pp_isomorphism(G1, G2) == mapped
+
+        # Change the direction of an edge
+        G1.remove_edge(1, 5)
+        G1.add_edge(5, 1)
+        assert vf2pp_isomorphism(G1, G2) is None
+
+    def test_non_isomorphic_same_degree_sequence(self):
+        r"""
+                G1                           G2
+        x--------------x              x--------------x
+        | \            |              | \            |
+        |  x-------x   |              |  x-------x   |
+        |  |       |   |              |  |       |   |
+        |  x-------x   |              |  x-------x   |
+        | /            |              |            \ |
+        x--------------x              x--------------x
+        """
+        edges1 = [
+            (1, 5),
+            (1, 2),
+            (4, 1),
+            (3, 2),
+            (3, 4),
+            (4, 8),
+            (5, 8),
+            (6, 5),
+            (6, 7),
+            (7, 8),
+        ]
+        edges2 = [
+            (1, 5),
+            (1, 2),
+            (4, 1),
+            (3, 2),
+            (4, 3),
+            (5, 8),
+            (6, 5),
+            (6, 7),
+            (3, 7),
+            (8, 7),
+        ]
+
+        G1 = nx.DiGraph(edges1)
+        G2 = nx.DiGraph(edges2)
+        assert vf2pp_isomorphism(G1, G2) is None
diff --git a/networkx/algorithms/isomorphism/tests/test_vf2pp_helpers.py b/networkx/algorithms/isomorphism/tests/test_vf2pp_helpers.py
new file mode 100644
index 0000000..1a46c38
--- /dev/null
+++ b/networkx/algorithms/isomorphism/tests/test_vf2pp_helpers.py
@@ -0,0 +1,3103 @@
+import itertools as it
+
+import pytest
+
+import networkx as nx
+from networkx import vf2pp_is_isomorphic, vf2pp_isomorphism
+from networkx.algorithms.isomorphism.vf2pp import (
+    _consistent_PT,
+    _cut_PT,
+    _feasibility,
+    _find_candidates,
+    _find_candidates_Di,
+    _GraphParameters,
+    _initialize_parameters,
+    _matching_order,
+    _restore_Tinout,
+    _restore_Tinout_Di,
+    _StateParameters,
+    _update_Tinout,
+)
+
+labels_same = ["blue"]
+
+labels_many = [
+    "white",
+    "red",
+    "blue",
+    "green",
+    "orange",
+    "black",
+    "purple",
+    "yellow",
+    "brown",
+    "cyan",
+    "solarized",
+    "pink",
+    "none",
+]
+
+
+class TestNodeOrdering:
+    def test_empty_graph(self):
+        G1 = nx.Graph()
+        G2 = nx.Graph()
+        gparams = _GraphParameters(G1, G2, None, None, None, None, None)
+        assert len(set(_matching_order(gparams))) == 0
+
+    def test_single_node(self):
+        G1 = nx.Graph()
+        G2 = nx.Graph()
+        G1.add_node(1)
+        G2.add_node(1)
+
+        nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels_many))), "label")
+        nx.set_node_attributes(
+            G2,
+            dict(zip(G2, it.cycle(labels_many))),
+            "label",
+        )
+        l1, l2 = nx.get_node_attributes(G1, "label"), nx.get_node_attributes(
+            G2, "label"
+        )
+
+        gparams = _GraphParameters(
+            G1,
+            G2,
+            l1,
+            l2,
+            nx.utils.groups(l1),
+            nx.utils.groups(l2),
+            nx.utils.groups(dict(G2.degree())),
+        )
+        m = _matching_order(gparams)
+        assert m == [1]
+
+    def test_matching_order(self):
+        labels = [
+            "blue",
+            "blue",
+            "red",
+            "red",
+            "red",
+            "red",
+            "green",
+            "green",
+            "green",
+            "yellow",
+            "purple",
+            "purple",
+            "blue",
+            "blue",
+        ]
+        G1 = nx.Graph(
+            [
+                (0, 1),
+                (0, 2),
+                (1, 2),
+                (2, 5),
+                (2, 4),
+                (1, 3),
+                (1, 4),
+                (3, 6),
+                (4, 6),
+                (6, 7),
+                (7, 8),
+                (9, 10),
+                (9, 11),
+                (11, 12),
+                (11, 13),
+                (12, 13),
+                (10, 13),
+            ]
+        )
+        G2 = G1.copy()
+        nx.set_node_attributes(G1, dict(zip(G1, it.cycle(labels))), "label")
+        nx.set_node_attributes(
+            G2,
+            dict(zip(G2, it.cycle(labels))),
+            "label",
+        )
+        l1, l2 = nx.get_node_attributes(G1, "label"), nx.get_node_attributes(
+            G2, "label"
+        )
+        gparams = _GraphParameters(
+            G1,
+            G2,
+            l1,
+            l2,
+            nx.utils.groups(l1),
+            nx.utils.groups(l2),
+            nx.utils.groups(dict(G2.degree())),
+        )
+
+        expected = [9, 11, 10, 13, 12, 1, 2, 4, 0, 3, 6, 5, 7, 8]
+        assert _matching_order(gparams) == expected
+
+    def test_matching_order_all_branches(self):
+        G1 = nx.Graph(
+            [(0, 1), (0, 2), (0, 3), (0, 4), (1, 2), (1, 3), (1, 4), (2, 4), (3, 4)]
+        )
+        G1.add_node(5)
+        G2 = G1.copy()
+
+        G1.nodes[0]["label"] = "black"
+        G1.nodes[1]["label"] = "blue"
+        G1.nodes[2]["label"] = "blue"
+        G1.nodes[3]["label"] = "red"
+        G1.nodes[4]["label"] = "red"
+        G1.nodes[5]["label"] = "blue"
+
+        G2.nodes[0]["label"] = "black"
+        G2.nodes[1]["label"] = "blue"
+        G2.nodes[2]["label"] = "blue"
+        G2.nodes[3]["label"] = "red"
+        G2.nodes[4]["label"] = "red"
+        G2.nodes[5]["label"] = "blue"
+
+        l1, l2 = nx.get_node_attributes(G1, "label"), nx.get_node_attributes(
+            G2, "label"
+        )
+        gparams = _GraphParameters(
+            G1,
+            G2,
+            l1,
+            l2,
+            nx.utils.groups(l1),
+            nx.utils.groups(l2),
+            nx.utils.groups(dict(G2.degree())),
+        )
+
+        expected = [0, 4, 1, 3, 2, 5]
+        assert _matching_order(gparams) == expected
+
+
+class TestGraphCandidateSelection:
+    G1_edges = [
+        (1, 2),
+        (1, 4),
+        (1, 5),
+        (2, 3),
+        (2, 4),
+        (3, 4),
+        (4, 5),
+        (1, 6),
+        (6, 7),
+        (6, 8),
+        (8, 9),
+        (7, 9),
+    ]
+    mapped = {
+        0: "x",
+        1: "a",
+        2: "b",
+        3: "c",
+        4: "d",
+        5: "e",
+        6: "f",
+        7: "g",
+        8: "h",
+        9: "i",
+    }
+
+    def test_no_covered_neighbors_no_labels(self):
+        G1 = nx.Graph()
+        G1.add_edges_from(self.G1_edges)
+        G1.add_node(0)
+        G2 = nx.relabel_nodes(G1, self.mapped)
+
+        G1_degree = dict(G1.degree)
+        l1 = dict(G1.nodes(data="label", default=-1))
+        l2 = dict(G2.nodes(data="label", default=-1))
+        gparams = _GraphParameters(
+            G1,
+            G2,
+            l1,
+            l2,
+            nx.utils.groups(l1),
+            nx.utils.groups(l2),
+            nx.utils.groups(dict(G2.degree())),
+        )
+
+        m = {9: self.mapped[9], 1: self.mapped[1]}
+        m_rev = {self.mapped[9]: 9, self.mapped[1]: 1}
+
+        T1 = {7, 8, 2, 4, 5}
+        T1_tilde = {0, 3, 6}
+        T2 = {"g", "h", "b", "d", "e"}
+        T2_tilde = {"x", "c", "f"}
+
+        sparams = _StateParameters(
+            m, m_rev, T1, None, T1_tilde, None, T2, None, T2_tilde, None
+        )
+
+        u = 3
+        candidates = _find_candidates(u, gparams, sparams, G1_degree)
+        assert candidates == {self.mapped[u]}
+
+        u = 0
+        candidates = _find_candidates(u, gparams, sparams, G1_degree)
+        assert candidates == {self.mapped[u]}
+
+        m.pop(9)
+        m_rev.pop(self.mapped[9])
+
+        T1 = {2, 4, 5, 6}
+        T1_tilde = {0, 3, 7, 8, 9}
+        T2 = {"g", "h", "b", "d", "e", "f"}
+        T2_tilde = {"x", "c", "g", "h", "i"}
+
+        sparams = _StateParameters(
+            m, m_rev, T1, None, T1_tilde, None, T2, None, T2_tilde, None
+        )
+
+        u = 7
+        candidates = _find_candidates(u, gparams, sparams, G1_degree)
+        assert candidates == {
+            self.mapped[u],
+            self.mapped[8],
+            self.mapped[3],
+            self.mapped[9],
+        }
+
+    def test_no_covered_neighbors_with_labels(self):
+        G1 = nx.Graph()
+        G1.add_edges_from(self.G1_edges)
+        G1.add_node(0)
+        G2 = nx.relabel_nodes(G1, self.mapped)
+
+        G1_degree = dict(G1.degree)
+        nx.set_node_attributes(
+            G1,
+            dict(zip(G1, it.cycle(labels_many))),
+            "label",
+        )
+        nx.set_node_attributes(
+            G2,
+            dict(
+                zip(
+                    [self.mapped[n] for n in G1],
+                    it.cycle(labels_many),
+                )
+            ),
+            "label",
+        )
+        l1 = dict(G1.nodes(data="label", default=-1))
+        l2 = dict(G2.nodes(data="label", default=-1))
+        gparams = _GraphParameters(
+            G1,
+            G2,
+            l1,
+            l2,
+            nx.utils.groups(l1),
+            nx.utils.groups(l2),
+            nx.utils.groups(dict(G2.degree())),
+        )
+
+        m = {9: self.mapped[9], 1: self.mapped[1]}
+        m_rev = {self.mapped[9]: 9, self.mapped[1]: 1}
+
+        T1 = {7, 8, 2, 4, 5, 6}
+        T1_tilde = {0, 3}
+        T2 = {"g", "h", "b", "d", "e", "f"}
+        T2_tilde = {"x", "c"}
+
+        sparams = _StateParameters(
+            m, m_rev, T1, None, T1_tilde, None, T2, None, T2_tilde, None
+        )
+
+        u = 3
+        candidates = _find_candidates(u, gparams, sparams, G1_degree)
+        assert candidates == {self.mapped[u]}
+
+        u = 0
+        candidates = _find_candidates(u, gparams, sparams, G1_degree)
+        assert candidates == {self.mapped[u]}
+
+        # Change label of disconnected node
+        G1.nodes[u]["label"] = "blue"
+        l1 = dict(G1.nodes(data="label", default=-1))
+        l2 = dict(G2.nodes(data="label", default=-1))
+        gparams = _GraphParameters(
+            G1,
+            G2,
+            l1,
+            l2,
+            nx.utils.groups(l1),
+            nx.utils.groups(l2),
+            nx.utils.groups(dict(G2.degree())),
+        )
+
+        # No candidate
+        candidates = _find_candidates(u, gparams, sparams, G1_degree)
+        assert candidates == set()
+
+        m.pop(9)
+        m_rev.pop(self.mapped[9])
+
+        T1 = {2, 4, 5, 6}
+        T1_tilde = {0, 3, 7, 8, 9}
+        T2 = {"b", "d", "e", "f"}
+        T2_tilde = {"x", "c", "g", "h", "i"}
+
+        sparams = _StateParameters(
+            m, m_rev, T1, None, T1_tilde, None, T2, None, T2_tilde, None
+        )
+
+        u = 7
+        candidates = _find_candidates(u, gparams, sparams, G1_degree)
+        assert candidates == {self.mapped[u]}
+
+        G1.nodes[8]["label"] = G1.nodes[7]["label"]
+        G2.nodes[self.mapped[8]]["label"] = G1.nodes[7]["label"]
+        l1 = dict(G1.nodes(data="label", default=-1))
+        l2 = dict(G2.nodes(data="label", default=-1))
+        gparams = _GraphParameters(
+            G1,
+            G2,
+            l1,
+            l2,
+            nx.utils.groups(l1),
+            nx.utils.groups(l2),
+            nx.utils.groups(dict(G2.degree())),
+        )
+
+        candidates = _find_candidates(u, gparams, sparams, G1_degree)
+        assert candidates == {self.mapped[u], self.mapped[8]}
+
+    def test_covered_neighbors_no_labels(self):
+        G1 = nx.Graph()
+        G1.add_edges_from(self.G1_edges)
+        G1.add_node(0)
+        G2 = nx.relabel_nodes(G1, self.mapped)
+
+        G1_degree = dict(G1.degree)
+        l1 = dict(G1.nodes(data=None, default=-1))
+        l2 = dict(G2.nodes(data=None, default=-1))
+        gparams = _GraphParameters(
+            G1,
+            G2,
+            l1,
+            l2,
+            nx.utils.groups(l1),
+            nx.utils.groups(l2),
+            nx.utils.groups(dict(G2.degree())),
+        )
+
+        m = {9: self.mapped[9], 1: self.mapped[1]}
+        m_rev = {self.mapped[9]: 9, self.mapped[1]: 1}
+
+        T1 = {7, 8, 2, 4, 5, 6}
+        T1_tilde = {0, 3}
+        T2 = {"g", "h", "b", "d", "e", "f"}
+        T2_tilde = {"x", "c"}
+
+        sparams = _StateParameters(
+            m, m_rev, T1, None, T1_tilde, None, T2, None, T2_tilde, None
+        )
+
+        u = 5
+        candidates = _find_candidates(u, gparams, sparams, G1_degree)
+        assert candidates == {self.mapped[u]}
+
+        u = 6
+        candidates = _find_candidates(u, gparams, sparams, G1_degree)
+        assert candidates == {self.mapped[u], self.mapped[2]}
+
+    def test_covered_neighbors_with_labels(self):
+        G1 = nx.Graph()
+        G1.add_edges_from(self.G1_edges)
+        G1.add_node(0)
+        G2 = nx.relabel_nodes(G1, self.mapped)
+
+        G1_degree = dict(G1.degree)
+        nx.set_node_attributes(
+            G1,
+            dict(zip(G1, it.cycle(labels_many))),
+            "label",
+        )
+        nx.set_node_attributes(
+            G2,
+            dict(
+                zip(
+                    [self.mapped[n] for n in G1],
+                    it.cycle(labels_many),
+                )
+            ),
+            "label",
+        )
+        l1 = dict(G1.nodes(data="label", default=-1))
+        l2 = dict(G2.nodes(data="label", default=-1))
+        gparams = _GraphParameters(
+            G1,
+            G2,
+            l1,
+            l2,
+            nx.utils.groups(l1),
+            nx.utils.groups(l2),
+            nx.utils.groups(dict(G2.degree())),
+        )
+
+        m = {9: self.mapped[9], 1: self.mapped[1]}
+        m_rev = {self.mapped[9]: 9, self.mapped[1]: 1}
+
+        T1 = {7, 8, 2, 4, 5, 6}
+        T1_tilde = {0, 3}
+        T2 = {"g", "h", "b", "d", "e", "f"}
+        T2_tilde = {"x", "c"}
+
+        sparams = _StateParameters(
+            m, m_rev, T1, None, T1_tilde, None, T2, None, T2_tilde, None
+        )
+
+        u = 5
+        candidates = _find_candidates(u, gparams, sparams, G1_degree)
+        assert candidates == {self.mapped[u]}
+
+        u = 6
+        candidates = _find_candidates(u, gparams, sparams, G1_degree)
+        assert candidates == {self.mapped[u]}
+
+        # Assign to 2, the same label as 6
+        G1.nodes[2]["label"] = G1.nodes[u]["label"]
+        G2.nodes[self.mapped[2]]["label"] = G1.nodes[u]["label"]
+        l1 = dict(G1.nodes(data="label", default=-1))
+        l2 = dict(G2.nodes(data="label", default=-1))
+        gparams = _GraphParameters(
+            G1,
+            G2,
+            l1,
+            l2,
+            nx.utils.groups(l1),
+            nx.utils.groups(l2),
+            nx.utils.groups(dict(G2.degree())),
+        )
+
+        candidates = _find_candidates(u, gparams, sparams, G1_degree)
+        assert candidates == {self.mapped[u], self.mapped[2]}
+
+
+class TestDiGraphCandidateSelection:
+    G1_edges = [
+        (1, 2),
+        (1, 4),
+        (5, 1),
+        (2, 3),
+        (4, 2),
+        (3, 4),
+        (4, 5),
+        (1, 6),
+        (6, 7),
+        (6, 8),
+        (8, 9),
+        (7, 9),
+    ]
+    mapped = {
+        0: "x",
+        1: "a",
+        2: "b",
+        3: "c",
+        4: "d",
+        5: "e",
+        6: "f",
+        7: "g",
+        8: "h",
+        9: "i",
+    }
+
+    def test_no_covered_neighbors_no_labels(self):
+        G1 = nx.DiGraph()
+        G1.add_edges_from(self.G1_edges)
+        G1.add_node(0)
+        G2 = nx.relabel_nodes(G1, self.mapped)
+
+        G1_degree = {
+            n: (in_degree, out_degree)
+            for (n, in_degree), (_, out_degree) in zip(G1.in_degree, G1.out_degree)
+        }
+
+        l1 = dict(G1.nodes(data="label", default=-1))
+        l2 = dict(G2.nodes(data="label", default=-1))
+        gparams = _GraphParameters(
+            G1,
+            G2,
+            l1,
+            l2,
+            nx.utils.groups(l1),
+            nx.utils.groups(l2),
+            nx.utils.groups(
+                {
+                    node: (in_degree, out_degree)
+                    for (node, in_degree), (_, out_degree) in zip(
+                        G2.in_degree(), G2.out_degree()
+                    )
+                }
+            ),
+        )
+
+        m = {9: self.mapped[9], 1: self.mapped[1]}
+        m_rev = {self.mapped[9]: 9, self.mapped[1]: 1}
+
+        T1_out = {2, 4, 6}
+        T1_in = {5, 7, 8}
+        T1_tilde = {0, 3}
+        T2_out = {"b", "d", "f"}
+        T2_in = {"e", "g", "h"}
+        T2_tilde = {"x", "c"}
+
+        sparams = _StateParameters(
+            m, m_rev, T1_out, T1_in, T1_tilde, None, T2_out, T2_in, T2_tilde, None
+        )
+
+        u = 3
+        candidates = _find_candidates_Di(u, gparams, sparams, G1_degree)
+        assert candidates == {self.mapped[u]}
+
+        u = 0
+        candidates = _find_candidates_Di(u, gparams, sparams, G1_degree)
+        assert candidates == {self.mapped[u]}
+
+        m.pop(9)
+        m_rev.pop(self.mapped[9])
+
+        T1_out = {2, 4, 6}
+        T1_in = {5}
+        T1_tilde = {0, 3, 7, 8, 9}
+        T2_out = {"b", "d", "f"}
+        T2_in = {"e"}
+        T2_tilde = {"x", "c", "g", "h", "i"}
+
+        sparams = _StateParameters(
+            m, m_rev, T1_out, T1_in, T1_tilde, None, T2_out, T2_in, T2_tilde, None
+        )
+
+        u = 7
+        candidates = _find_candidates_Di(u, gparams, sparams, G1_degree)
+        assert candidates == {self.mapped[u], self.mapped[8], self.mapped[3]}
+
+    def test_no_covered_neighbors_with_labels(self):
+        G1 = nx.DiGraph()
+        G1.add_edges_from(self.G1_edges)
+        G1.add_node(0)
+        G2 = nx.relabel_nodes(G1, self.mapped)
+
+        G1_degree = {
+            n: (in_degree, out_degree)
+            for (n, in_degree), (_, out_degree) in zip(G1.in_degree, G1.out_degree)
+        }
+        nx.set_node_attributes(
+            G1,
+            dict(zip(G1, it.cycle(labels_many))),
+            "label",
+        )
+        nx.set_node_attributes(
+            G2,
+            dict(
+                zip(
+                    [self.mapped[n] for n in G1],
+                    it.cycle(labels_many),
+                )
+            ),
+            "label",
+        )
+        l1 = dict(G1.nodes(data="label", default=-1))
+        l2 = dict(G2.nodes(data="label", default=-1))
+        gparams = _GraphParameters(
+            G1,
+            G2,
+            l1,
+            l2,
+            nx.utils.groups(l1),
+            nx.utils.groups(l2),
+            nx.utils.groups(
+                {
+                    node: (in_degree, out_degree)
+                    for (node, in_degree), (_, out_degree) in zip(
+                        G2.in_degree(), G2.out_degree()
+                    )
+                }
+            ),
+        )
+
+        m = {9: self.mapped[9], 1: self.mapped[1]}
+        m_rev = {self.mapped[9]: 9, self.mapped[1]: 1}
+
+        T1_out = {2, 4, 6}
+        T1_in = {5, 7, 8}
+        T1_tilde = {0, 3}
+        T2_out = {"b", "d", "f"}
+        T2_in = {"e", "g", "h"}
+        T2_tilde = {"x", "c"}
+
+        sparams = _StateParameters(
+            m, m_rev, T1_out, T1_in, T1_tilde, None, T2_out, T2_in, T2_tilde, None
+        )
+
+        u = 3
+        candidates = _find_candidates_Di(u, gparams, sparams, G1_degree)
+        assert candidates == {self.mapped[u]}
+
+        u = 0
+        candidates = _find_candidates_Di(u, gparams, sparams, G1_degree)
+        assert candidates == {self.mapped[u]}
+
+        # Change label of disconnected node
+        G1.nodes[u]["label"] = "blue"
+        l1 = dict(G1.nodes(data="label", default=-1))
+        l2 = dict(G2.nodes(data="label", default=-1))
+        gparams = _GraphParameters(
+            G1,
+            G2,
+            l1,
+            l2,
+            nx.utils.groups(l1),
+            nx.utils.groups(l2),
+            nx.utils.groups(
+                {
+                    node: (in_degree, out_degree)
+                    for (node, in_degree), (_, out_degree) in zip(
+                        G2.in_degree(), G2.out_degree()
+                    )
+                }
+            ),
+        )
+
+        # No candidate
+        candidates = _find_candidates_Di(u, gparams, sparams, G1_degree)
+        assert candidates == set()
+
+        m.pop(9)
+        m_rev.pop(self.mapped[9])
+
+        T1_out = {2, 4, 6}
+        T1_in = {5}
+        T1_tilde = {0, 3, 7, 8, 9}
+        T2_out = {"b", "d", "f"}
+        T2_in = {"e"}
+        T2_tilde = {"x", "c", "g", "h", "i"}
+
+        sparams = _StateParameters(
+            m, m_rev, T1_out, T1_in, T1_tilde, None, T2_out, T2_in, T2_tilde, None
+        )
+
+        u = 7
+        candidates = _find_candidates_Di(u, gparams, sparams, G1_degree)
+        assert candidates == {self.mapped[u]}
+
+        G1.nodes[8]["label"] = G1.nodes[7]["label"]
+        G2.nodes[self.mapped[8]]["label"] = G1.nodes[7]["label"]
+        l1 = dict(G1.nodes(data="label", default=-1))
+        l2 = dict(G2.nodes(data="label", default=-1))
+        gparams = _GraphParameters(
+            G1,
+            G2,
+            l1,
+            l2,
+            nx.utils.groups(l1),
+            nx.utils.groups(l2),
+            nx.utils.groups(
+                {
+                    node: (in_degree, out_degree)
+                    for (node, in_degree), (_, out_degree) in zip(
+                        G2.in_degree(), G2.out_degree()
+                    )
+                }
+            ),
+        )
+
+        candidates = _find_candidates_Di(u, gparams, sparams, G1_degree)
+        assert candidates == {self.mapped[u], self.mapped[8]}
+
+    def test_covered_neighbors_no_labels(self):
+        G1 = nx.DiGraph()
+        G1.add_edges_from(self.G1_edges)
+        G1.add_node(0)
+        G2 = nx.relabel_nodes(G1, self.mapped)
+
+        G1_degree = {
+            n: (in_degree, out_degree)
+            for (n, in_degree), (_, out_degree) in zip(G1.in_degree, G1.out_degree)
+        }
+
+        l1 = dict(G1.nodes(data=None, default=-1))
+        l2 = dict(G2.nodes(data=None, default=-1))
+        gparams = _GraphParameters(
+            G1,
+            G2,
+            l1,
+            l2,
+            nx.utils.groups(l1),
+            nx.utils.groups(l2),
+            nx.utils.groups(
+                {
+                    node: (in_degree, out_degree)
+                    for (node, in_degree), (_, out_degree) in zip(
+                        G2.in_degree(), G2.out_degree()
+                    )
+                }
+            ),
+        )
+
+        m = {9: self.mapped[9], 1: self.mapped[1]}
+        m_rev = {self.mapped[9]: 9, self.mapped[1]: 1}
+
+        T1_out = {2, 4, 6}
+        T1_in = {5, 7, 8}
+        T1_tilde = {0, 3}
+        T2_out = {"b", "d", "f"}
+        T2_in = {"e", "g", "h"}
+        T2_tilde = {"x", "c"}
+
+        sparams = _StateParameters(
+            m, m_rev, T1_out, T1_in, T1_tilde, None, T2_out, T2_in, T2_tilde, None
+        )
+
+        u = 5
+        candidates = _find_candidates_Di(u, gparams, sparams, G1_degree)
+        assert candidates == {self.mapped[u]}
+
+        u = 6
+        candidates = _find_candidates_Di(u, gparams, sparams, G1_degree)
+        assert candidates == {self.mapped[u]}
+
+        # Change the direction of an edge to make the degree orientation same as first candidate of u.
+        G1.remove_edge(4, 2)
+        G1.add_edge(2, 4)
+        G2.remove_edge("d", "b")
+        G2.add_edge("b", "d")
+
+        gparams = _GraphParameters(
+            G1,
+            G2,
+            l1,
+            l2,
+            nx.utils.groups(l1),
+            nx.utils.groups(l2),
+            nx.utils.groups(
+                {
+                    node: (in_degree, out_degree)
+                    for (node, in_degree), (_, out_degree) in zip(
+                        G2.in_degree(), G2.out_degree()
+                    )
+                }
+            ),
+        )
+
+        candidates = _find_candidates_Di(u, gparams, sparams, G1_degree)
+        assert candidates == {self.mapped[u], self.mapped[2]}
+
+    def test_covered_neighbors_with_labels(self):
+        G1 = nx.DiGraph()
+        G1.add_edges_from(self.G1_edges)
+        G1.add_node(0)
+        G2 = nx.relabel_nodes(G1, self.mapped)
+
+        G1.remove_edge(4, 2)
+        G1.add_edge(2, 4)
+        G2.remove_edge("d", "b")
+        G2.add_edge("b", "d")
+
+        G1_degree = {
+            n: (in_degree, out_degree)
+            for (n, in_degree), (_, out_degree) in zip(G1.in_degree, G1.out_degree)
+        }
+
+        nx.set_node_attributes(
+            G1,
+            dict(zip(G1, it.cycle(labels_many))),
+            "label",
+        )
+        nx.set_node_attributes(
+            G2,
+            dict(
+                zip(
+                    [self.mapped[n] for n in G1],
+                    it.cycle(labels_many),
+                )
+            ),
+            "label",
+        )
+        l1 = dict(G1.nodes(data="label", default=-1))
+        l2 = dict(G2.nodes(data="label", default=-1))
+        gparams = _GraphParameters(
+            G1,
+            G2,
+            l1,
+            l2,
+            nx.utils.groups(l1),
+            nx.utils.groups(l2),
+            nx.utils.groups(
+                {
+                    node: (in_degree, out_degree)
+                    for (node, in_degree), (_, out_degree) in zip(
+                        G2.in_degree(), G2.out_degree()
+                    )
+                }
+            ),
+        )
+
+        m = {9: self.mapped[9], 1: self.mapped[1]}
+        m_rev = {self.mapped[9]: 9, self.mapped[1]: 1}
+
+        T1_out = {2, 4, 6}
+        T1_in = {5, 7, 8}
+        T1_tilde = {0, 3}
+        T2_out = {"b", "d", "f"}
+        T2_in = {"e", "g", "h"}
+        T2_tilde = {"x", "c"}
+
+        sparams = _StateParameters(
+            m, m_rev, T1_out, T1_in, T1_tilde, None, T2_out, T2_in, T2_tilde, None
+        )
+
+        u = 5
+        candidates = _find_candidates_Di(u, gparams, sparams, G1_degree)
+        assert candidates == {self.mapped[u]}
+
+        u = 6
+        candidates = _find_candidates_Di(u, gparams, sparams, G1_degree)
+        assert candidates == {self.mapped[u]}
+
+        # Assign to 2, the same label as 6
+        G1.nodes[2]["label"] = G1.nodes[u]["label"]
+        G2.nodes[self.mapped[2]]["label"] = G1.nodes[u]["label"]
+        l1 = dict(G1.nodes(data="label", default=-1))
+        l2 = dict(G2.nodes(data="label", default=-1))
+        gparams = _GraphParameters(
+            G1,
+            G2,
+            l1,
+            l2,
+            nx.utils.groups(l1),
+            nx.utils.groups(l2),
+            nx.utils.groups(
+                {
+                    node: (in_degree, out_degree)
+                    for (node, in_degree), (_, out_degree) in zip(
+                        G2.in_degree(), G2.out_degree()
+                    )
+                }
+            ),
+        )
+
+        candidates = _find_candidates_Di(u, gparams, sparams, G1_degree)
+        assert candidates == {self.mapped[u], self.mapped[2]}
+
+        # Change the direction of an edge to make the degree orientation same as first candidate of u.
+        G1.remove_edge(2, 4)
+        G1.add_edge(4, 2)
+        G2.remove_edge("b", "d")
+        G2.add_edge("d", "b")
+
+        gparams = _GraphParameters(
+            G1,
+            G2,
+            l1,
+            l2,
+            nx.utils.groups(l1),
+            nx.utils.groups(l2),
+            nx.utils.groups(
+                {
+                    node: (in_degree, out_degree)
+                    for (node, in_degree), (_, out_degree) in zip(
+                        G2.in_degree(), G2.out_degree()
+                    )
+                }
+            ),
+        )
+
+        candidates = _find_candidates_Di(u, gparams, sparams, G1_degree)
+        assert candidates == {self.mapped[u]}
+
+    def test_same_in_out_degrees_no_candidate(self):
+        g1 = nx.DiGraph([(4, 1), (4, 2), (3, 4), (5, 4), (6, 4)])
+        g2 = nx.DiGraph([(1, 4), (2, 4), (3, 4), (4, 5), (4, 6)])
+
+        l1 = dict(g1.nodes(data=None, default=-1))
+        l2 = dict(g2.nodes(data=None, default=-1))
+        gparams = _GraphParameters(
+            g1,
+            g2,
+            l1,
+            l2,
+            nx.utils.groups(l1),
+            nx.utils.groups(l2),
+            nx.utils.groups(
+                {
+                    node: (in_degree, out_degree)
+                    for (node, in_degree), (_, out_degree) in zip(
+                        g2.in_degree(), g2.out_degree()
+                    )
+                }
+            ),
+        )
+
+        g1_degree = {
+            n: (in_degree, out_degree)
+            for (n, in_degree), (_, out_degree) in zip(g1.in_degree, g1.out_degree)
+        }
+
+        m = {1: 1, 2: 2, 3: 3}
+        m_rev = m.copy()
+
+        T1_out = {4}
+        T1_in = {4}
+        T1_tilde = {5, 6}
+        T2_out = {4}
+        T2_in = {4}
+        T2_tilde = {5, 6}
+
+        sparams = _StateParameters(
+            m, m_rev, T1_out, T1_in, T1_tilde, None, T2_out, T2_in, T2_tilde, None
+        )
+
+        u = 4
+        # despite the same in and out degree, there's no candidate for u=4
+        candidates = _find_candidates_Di(u, gparams, sparams, g1_degree)
+        assert candidates == set()
+        # Notice how the regular candidate selection method returns wrong result.
+        assert _find_candidates(u, gparams, sparams, g1_degree) == {4}
+
+
+class TestGraphISOFeasibility:
+    def test_const_covered_neighbors(self):
+        G1 = nx.Graph([(0, 1), (1, 2), (3, 0), (3, 2)])
+        G2 = nx.Graph([("a", "b"), ("b", "c"), ("k", "a"), ("k", "c")])
+        gparams = _GraphParameters(G1, G2, None, None, None, None, None)
+        sparams = _StateParameters(
+            {0: "a", 1: "b", 2: "c"},
+            {"a": 0, "b": 1, "c": 2},
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+        )
+        u, v = 3, "k"
+        assert _consistent_PT(u, v, gparams, sparams)
+
+    def test_const_no_covered_neighbors(self):
+        G1 = nx.Graph([(0, 1), (1, 2), (3, 4), (3, 5)])
+        G2 = nx.Graph([("a", "b"), ("b", "c"), ("k", "w"), ("k", "z")])
+        gparams = _GraphParameters(G1, G2, None, None, None, None, None)
+        sparams = _StateParameters(
+            {0: "a", 1: "b", 2: "c"},
+            {"a": 0, "b": 1, "c": 2},
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+        )
+        u, v = 3, "k"
+        assert _consistent_PT(u, v, gparams, sparams)
+
+    def test_const_mixed_covered_uncovered_neighbors(self):
+        G1 = nx.Graph([(0, 1), (1, 2), (3, 0), (3, 2), (3, 4), (3, 5)])
+        G2 = nx.Graph(
+            [("a", "b"), ("b", "c"), ("k", "a"), ("k", "c"), ("k", "w"), ("k", "z")]
+        )
+        gparams = _GraphParameters(G1, G2, None, None, None, None, None)
+        sparams = _StateParameters(
+            {0: "a", 1: "b", 2: "c"},
+            {"a": 0, "b": 1, "c": 2},
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+        )
+        u, v = 3, "k"
+        assert _consistent_PT(u, v, gparams, sparams)
+
+    def test_const_fail_cases(self):
+        G1 = nx.Graph(
+            [
+                (0, 1),
+                (1, 2),
+                (10, 0),
+                (10, 3),
+                (10, 4),
+                (10, 5),
+                (10, 6),
+                (4, 1),
+                (5, 3),
+            ]
+        )
+        G2 = nx.Graph(
+            [
+                ("a", "b"),
+                ("b", "c"),
+                ("k", "a"),
+                ("k", "d"),
+                ("k", "e"),
+                ("k", "f"),
+                ("k", "g"),
+                ("e", "b"),
+                ("f", "d"),
+            ]
+        )
+        gparams = _GraphParameters(G1, G2, None, None, None, None, None)
+        sparams = _StateParameters(
+            {0: "a", 1: "b", 2: "c", 3: "d"},
+            {"a": 0, "b": 1, "c": 2, "d": 3},
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+        )
+        u, v = 10, "k"
+        assert _consistent_PT(u, v, gparams, sparams)
+
+        # Delete one uncovered neighbor of u. Notice how it still passes the test.
+        # Two reasons for this:
+        #   1. If u, v had different degrees from the beginning, they wouldn't
+        #      be selected as candidates in the first place.
+        #   2. Even if they are selected, consistency is basically 1-look-ahead,
+        #      meaning that we take into consideration the relation of the
+        #      candidates with their mapped neighbors. The node we deleted is
+        #      not a covered neighbor.
+        #      Such nodes will be checked by the cut_PT function, which is
+        #      basically the 2-look-ahead, checking the relation of the
+        #      candidates with T1, T2 (in which belongs the node we just deleted).
+        G1.remove_node(6)
+        assert _consistent_PT(u, v, gparams, sparams)
+
+        # Add one more covered neighbor of u in G1
+        G1.add_edge(u, 2)
+        assert not _consistent_PT(u, v, gparams, sparams)
+
+        # Compensate in G2
+        G2.add_edge(v, "c")
+        assert _consistent_PT(u, v, gparams, sparams)
+
+        # Add one more covered neighbor of v in G2
+        G2.add_edge(v, "x")
+        G1.add_node(7)
+        sparams.mapping.update({7: "x"})
+        sparams.reverse_mapping.update({"x": 7})
+        assert not _consistent_PT(u, v, gparams, sparams)
+
+        # Compendate in G1
+        G1.add_edge(u, 7)
+        assert _consistent_PT(u, v, gparams, sparams)
+
+    @pytest.mark.parametrize("graph_type", (nx.Graph, nx.DiGraph))
+    def test_cut_inconsistent_labels(self, graph_type):
+        G1 = graph_type(
+            [
+                (0, 1),
+                (1, 2),
+                (10, 0),
+                (10, 3),
+                (10, 4),
+                (10, 5),
+                (10, 6),
+                (4, 1),
+                (5, 3),
+            ]
+        )
+        G2 = graph_type(
+            [
+                ("a", "b"),
+                ("b", "c"),
+                ("k", "a"),
+                ("k", "d"),
+                ("k", "e"),
+                ("k", "f"),
+                ("k", "g"),
+                ("e", "b"),
+                ("f", "d"),
+            ]
+        )
+
+        l1 = {n: "blue" for n in G1.nodes()}
+        l2 = {n: "blue" for n in G2.nodes()}
+        l1.update({6: "green"})  # Change the label of one neighbor of u
+
+        gparams = _GraphParameters(
+            G1, G2, l1, l2, nx.utils.groups(l1), nx.utils.groups(l2), None
+        )
+        sparams = _StateParameters(
+            {0: "a", 1: "b", 2: "c", 3: "d"},
+            {"a": 0, "b": 1, "c": 2, "d": 3},
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+        )
+
+        u, v = 10, "k"
+        assert _cut_PT(u, v, gparams, sparams)
+
+    def test_cut_consistent_labels(self):
+        G1 = nx.Graph(
+            [
+                (0, 1),
+                (1, 2),
+                (10, 0),
+                (10, 3),
+                (10, 4),
+                (10, 5),
+                (10, 6),
+                (4, 1),
+                (5, 3),
+            ]
+        )
+        G2 = nx.Graph(
+            [
+                ("a", "b"),
+                ("b", "c"),
+                ("k", "a"),
+                ("k", "d"),
+                ("k", "e"),
+                ("k", "f"),
+                ("k", "g"),
+                ("e", "b"),
+                ("f", "d"),
+            ]
+        )
+
+        l1 = {n: "blue" for n in G1.nodes()}
+        l2 = {n: "blue" for n in G2.nodes()}
+
+        gparams = _GraphParameters(
+            G1, G2, l1, l2, nx.utils.groups(l1), nx.utils.groups(l2), None
+        )
+        sparams = _StateParameters(
+            {0: "a", 1: "b", 2: "c", 3: "d"},
+            {"a": 0, "b": 1, "c": 2, "d": 3},
+            {4, 5},
+            None,
+            {6},
+            None,
+            {"e", "f"},
+            None,
+            {"g"},
+            None,
+        )
+
+        u, v = 10, "k"
+        assert not _cut_PT(u, v, gparams, sparams)
+
+    def test_cut_same_labels(self):
+        G1 = nx.Graph(
+            [
+                (0, 1),
+                (1, 2),
+                (10, 0),
+                (10, 3),
+                (10, 4),
+                (10, 5),
+                (10, 6),
+                (4, 1),
+                (5, 3),
+            ]
+        )
+        mapped = {0: "a", 1: "b", 2: "c", 3: "d", 4: "e", 5: "f", 6: "g", 10: "k"}
+        G2 = nx.relabel_nodes(G1, mapped)
+        l1 = {n: "blue" for n in G1.nodes()}
+        l2 = {n: "blue" for n in G2.nodes()}
+
+        gparams = _GraphParameters(
+            G1, G2, l1, l2, nx.utils.groups(l1), nx.utils.groups(l2), None
+        )
+        sparams = _StateParameters(
+            {0: "a", 1: "b", 2: "c", 3: "d"},
+            {"a": 0, "b": 1, "c": 2, "d": 3},
+            {4, 5},
+            None,
+            {6},
+            None,
+            {"e", "f"},
+            None,
+            {"g"},
+            None,
+        )
+
+        u, v = 10, "k"
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Change intersection between G1[u] and T1, so it's not the same as the one between G2[v] and T2
+        G1.remove_edge(u, 4)
+        assert _cut_PT(u, v, gparams, sparams)
+
+        # Compensate in G2
+        G2.remove_edge(v, mapped[4])
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Change intersection between G2[v] and T2_tilde, so it's not the same as the one between G1[u] and T1_tilde
+        G2.remove_edge(v, mapped[6])
+        assert _cut_PT(u, v, gparams, sparams)
+
+        # Compensate in G1
+        G1.remove_edge(u, 6)
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Add disconnected nodes, which will form the new Ti_out
+        G1.add_nodes_from([6, 7, 8])
+        G2.add_nodes_from(["g", "y", "z"])
+        sparams.T1_tilde.update({6, 7, 8})
+        sparams.T2_tilde.update({"g", "y", "z"})
+
+        l1 = {n: "blue" for n in G1.nodes()}
+        l2 = {n: "blue" for n in G2.nodes()}
+        gparams = _GraphParameters(
+            G1, G2, l1, l2, nx.utils.groups(l1), nx.utils.groups(l2), None
+        )
+
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Add some new nodes to the mapping
+        sparams.mapping.update({6: "g", 7: "y"})
+        sparams.reverse_mapping.update({"g": 6, "y": 7})
+
+        # Add more nodes to T1, T2.
+        G1.add_edges_from([(6, 20), (7, 20), (6, 21)])
+        G2.add_edges_from([("g", "i"), ("g", "j"), ("y", "j")])
+
+        sparams.mapping.update({20: "j", 21: "i"})
+        sparams.reverse_mapping.update({"j": 20, "i": 21})
+        sparams.T1.update({20, 21})
+        sparams.T2.update({"i", "j"})
+        sparams.T1_tilde.difference_update({6, 7})
+        sparams.T2_tilde.difference_update({"g", "y"})
+
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Add nodes from the new T1 and T2, as neighbors of u and v respectively
+        G1.add_edges_from([(u, 20), (u, 21)])
+        G2.add_edges_from([(v, "i"), (v, "j")])
+        l1 = {n: "blue" for n in G1.nodes()}
+        l2 = {n: "blue" for n in G2.nodes()}
+        gparams = _GraphParameters(
+            G1, G2, l1, l2, nx.utils.groups(l1), nx.utils.groups(l2), None
+        )
+
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Change the edges, maintaining the G1[u]-T1 intersection
+        G1.remove_edge(u, 20)
+        G1.add_edge(u, 4)
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Connect u to 8 which is still in T1_tilde
+        G1.add_edge(u, 8)
+        assert _cut_PT(u, v, gparams, sparams)
+
+        # Same for v and z, so that inters(G1[u], T1out) == inters(G2[v], T2out)
+        G2.add_edge(v, "z")
+        assert not _cut_PT(u, v, gparams, sparams)
+
+    def test_cut_different_labels(self):
+        G1 = nx.Graph(
+            [
+                (0, 1),
+                (1, 2),
+                (1, 14),
+                (0, 4),
+                (1, 5),
+                (2, 6),
+                (3, 7),
+                (3, 6),
+                (4, 10),
+                (4, 9),
+                (6, 10),
+                (20, 9),
+                (20, 15),
+                (20, 12),
+                (20, 11),
+                (12, 13),
+                (11, 13),
+                (20, 8),
+                (20, 3),
+                (20, 5),
+                (20, 0),
+            ]
+        )
+        mapped = {
+            0: "a",
+            1: "b",
+            2: "c",
+            3: "d",
+            4: "e",
+            5: "f",
+            6: "g",
+            7: "h",
+            8: "i",
+            9: "j",
+            10: "k",
+            11: "l",
+            12: "m",
+            13: "n",
+            14: "o",
+            15: "p",
+            20: "x",
+        }
+        G2 = nx.relabel_nodes(G1, mapped)
+
+        l1 = {n: "none" for n in G1.nodes()}
+        l2 = {}
+
+        l1.update(
+            {
+                9: "blue",
+                15: "blue",
+                12: "blue",
+                11: "green",
+                3: "green",
+                8: "red",
+                0: "red",
+                5: "yellow",
+            }
+        )
+        l2.update({mapped[n]: l for n, l in l1.items()})
+
+        gparams = _GraphParameters(
+            G1, G2, l1, l2, nx.utils.groups(l1), nx.utils.groups(l2), None
+        )
+        sparams = _StateParameters(
+            {0: "a", 1: "b", 2: "c", 3: "d"},
+            {"a": 0, "b": 1, "c": 2, "d": 3},
+            {4, 5, 6, 7, 14},
+            None,
+            {9, 10, 15, 12, 11, 13, 8},
+            None,
+            {"e", "f", "g", "h", "o"},
+            None,
+            {"j", "k", "l", "m", "n", "i", "p"},
+            None,
+        )
+
+        u, v = 20, "x"
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Change the orientation of the labels on neighbors of u compared to neighbors of v. Leave the structure intact
+        l1.update({9: "red"})
+        assert _cut_PT(u, v, gparams, sparams)
+
+        # compensate in G2
+        l2.update({mapped[9]: "red"})
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Change the intersection of G1[u] and T1
+        G1.add_edge(u, 4)
+        assert _cut_PT(u, v, gparams, sparams)
+
+        # Same for G2[v] and T2
+        G2.add_edge(v, mapped[4])
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Change the intersection of G2[v] and T2_tilde
+        G2.remove_edge(v, mapped[8])
+        assert _cut_PT(u, v, gparams, sparams)
+
+        # Same for G1[u] and T1_tilde
+        G1.remove_edge(u, 8)
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Place 8 and mapped[8] in T1 and T2 respectively, by connecting it to covered nodes
+        G1.add_edge(8, 3)
+        G2.add_edge(mapped[8], mapped[3])
+        sparams.T1.add(8)
+        sparams.T2.add(mapped[8])
+        sparams.T1_tilde.remove(8)
+        sparams.T2_tilde.remove(mapped[8])
+
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Remove neighbor of u from T1
+        G1.remove_node(5)
+        l1.pop(5)
+        sparams.T1.remove(5)
+        assert _cut_PT(u, v, gparams, sparams)
+
+        # Same in G2
+        G2.remove_node(mapped[5])
+        l2.pop(mapped[5])
+        sparams.T2.remove(mapped[5])
+        assert not _cut_PT(u, v, gparams, sparams)
+
+    def test_feasibility_same_labels(self):
+        G1 = nx.Graph(
+            [
+                (0, 1),
+                (1, 2),
+                (1, 14),
+                (0, 4),
+                (1, 5),
+                (2, 6),
+                (3, 7),
+                (3, 6),
+                (4, 10),
+                (4, 9),
+                (6, 10),
+                (20, 9),
+                (20, 15),
+                (20, 12),
+                (20, 11),
+                (12, 13),
+                (11, 13),
+                (20, 8),
+                (20, 2),
+                (20, 5),
+                (20, 0),
+            ]
+        )
+        mapped = {
+            0: "a",
+            1: "b",
+            2: "c",
+            3: "d",
+            4: "e",
+            5: "f",
+            6: "g",
+            7: "h",
+            8: "i",
+            9: "j",
+            10: "k",
+            11: "l",
+            12: "m",
+            13: "n",
+            14: "o",
+            15: "p",
+            20: "x",
+        }
+        G2 = nx.relabel_nodes(G1, mapped)
+
+        l1 = {n: "blue" for n in G1.nodes()}
+        l2 = {mapped[n]: "blue" for n in G1.nodes()}
+
+        gparams = _GraphParameters(
+            G1, G2, l1, l2, nx.utils.groups(l1), nx.utils.groups(l2), None
+        )
+        sparams = _StateParameters(
+            {0: "a", 1: "b", 2: "c", 3: "d"},
+            {"a": 0, "b": 1, "c": 2, "d": 3},
+            {4, 5, 6, 7, 14},
+            None,
+            {9, 10, 15, 12, 11, 13, 8},
+            None,
+            {"e", "f", "g", "h", "o"},
+            None,
+            {"j", "k", "l", "m", "n", "i", "p"},
+            None,
+        )
+
+        u, v = 20, "x"
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Change structure in G2 such that, ONLY consistency is harmed
+        G2.remove_edge(mapped[20], mapped[2])
+        G2.add_edge(mapped[20], mapped[3])
+
+        # Consistency check fails, while the cutting rules are satisfied!
+        assert not _cut_PT(u, v, gparams, sparams)
+        assert not _consistent_PT(u, v, gparams, sparams)
+
+        # Compensate in G1 and make it consistent
+        G1.remove_edge(20, 2)
+        G1.add_edge(20, 3)
+        assert not _cut_PT(u, v, gparams, sparams)
+        assert _consistent_PT(u, v, gparams, sparams)
+
+        # ONLY fail the cutting check
+        G2.add_edge(v, mapped[10])
+        assert _cut_PT(u, v, gparams, sparams)
+        assert _consistent_PT(u, v, gparams, sparams)
+
+    def test_feasibility_different_labels(self):
+        G1 = nx.Graph(
+            [
+                (0, 1),
+                (1, 2),
+                (1, 14),
+                (0, 4),
+                (1, 5),
+                (2, 6),
+                (3, 7),
+                (3, 6),
+                (4, 10),
+                (4, 9),
+                (6, 10),
+                (20, 9),
+                (20, 15),
+                (20, 12),
+                (20, 11),
+                (12, 13),
+                (11, 13),
+                (20, 8),
+                (20, 2),
+                (20, 5),
+                (20, 0),
+            ]
+        )
+        mapped = {
+            0: "a",
+            1: "b",
+            2: "c",
+            3: "d",
+            4: "e",
+            5: "f",
+            6: "g",
+            7: "h",
+            8: "i",
+            9: "j",
+            10: "k",
+            11: "l",
+            12: "m",
+            13: "n",
+            14: "o",
+            15: "p",
+            20: "x",
+        }
+        G2 = nx.relabel_nodes(G1, mapped)
+
+        l1 = {n: "none" for n in G1.nodes()}
+        l2 = {}
+
+        l1.update(
+            {
+                9: "blue",
+                15: "blue",
+                12: "blue",
+                11: "green",
+                2: "green",
+                8: "red",
+                0: "red",
+                5: "yellow",
+            }
+        )
+        l2.update({mapped[n]: l for n, l in l1.items()})
+
+        gparams = _GraphParameters(
+            G1, G2, l1, l2, nx.utils.groups(l1), nx.utils.groups(l2), None
+        )
+        sparams = _StateParameters(
+            {0: "a", 1: "b", 2: "c", 3: "d"},
+            {"a": 0, "b": 1, "c": 2, "d": 3},
+            {4, 5, 6, 7, 14},
+            None,
+            {9, 10, 15, 12, 11, 13, 8},
+            None,
+            {"e", "f", "g", "h", "o"},
+            None,
+            {"j", "k", "l", "m", "n", "i", "p"},
+            None,
+        )
+
+        u, v = 20, "x"
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Change structure in G2 such that, ONLY consistency is harmed
+        G2.remove_edge(mapped[20], mapped[2])
+        G2.add_edge(mapped[20], mapped[3])
+        l2.update({mapped[3]: "green"})
+
+        # Consistency check fails, while the cutting rules are satisfied!
+        assert not _cut_PT(u, v, gparams, sparams)
+        assert not _consistent_PT(u, v, gparams, sparams)
+
+        # Compensate in G1 and make it consistent
+        G1.remove_edge(20, 2)
+        G1.add_edge(20, 3)
+        l1.update({3: "green"})
+        assert not _cut_PT(u, v, gparams, sparams)
+        assert _consistent_PT(u, v, gparams, sparams)
+
+        # ONLY fail the cutting check
+        l1.update({5: "red"})
+        assert _cut_PT(u, v, gparams, sparams)
+        assert _consistent_PT(u, v, gparams, sparams)
+
+
+class TestMultiGraphISOFeasibility:
+    def test_const_covered_neighbors(self):
+        G1 = nx.MultiGraph(
+            [(0, 1), (0, 1), (1, 2), (3, 0), (3, 0), (3, 0), (3, 2), (3, 2)]
+        )
+        G2 = nx.MultiGraph(
+            [
+                ("a", "b"),
+                ("a", "b"),
+                ("b", "c"),
+                ("k", "a"),
+                ("k", "a"),
+                ("k", "a"),
+                ("k", "c"),
+                ("k", "c"),
+            ]
+        )
+        gparams = _GraphParameters(G1, G2, None, None, None, None, None)
+        sparams = _StateParameters(
+            {0: "a", 1: "b", 2: "c"},
+            {"a": 0, "b": 1, "c": 2},
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+        )
+        u, v = 3, "k"
+        assert _consistent_PT(u, v, gparams, sparams)
+
+    def test_const_no_covered_neighbors(self):
+        G1 = nx.MultiGraph([(0, 1), (0, 1), (1, 2), (3, 4), (3, 4), (3, 5)])
+        G2 = nx.MultiGraph([("a", "b"), ("b", "c"), ("k", "w"), ("k", "w"), ("k", "z")])
+        gparams = _GraphParameters(G1, G2, None, None, None, None, None)
+        sparams = _StateParameters(
+            {0: "a", 1: "b", 2: "c"},
+            {"a": 0, "b": 1, "c": 2},
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+        )
+        u, v = 3, "k"
+        assert _consistent_PT(u, v, gparams, sparams)
+
+    def test_const_mixed_covered_uncovered_neighbors(self):
+        G1 = nx.MultiGraph(
+            [(0, 1), (1, 2), (3, 0), (3, 0), (3, 0), (3, 2), (3, 2), (3, 4), (3, 5)]
+        )
+        G2 = nx.MultiGraph(
+            [
+                ("a", "b"),
+                ("b", "c"),
+                ("k", "a"),
+                ("k", "a"),
+                ("k", "a"),
+                ("k", "c"),
+                ("k", "c"),
+                ("k", "w"),
+                ("k", "z"),
+            ]
+        )
+        gparams = _GraphParameters(G1, G2, None, None, None, None, None)
+        sparams = _StateParameters(
+            {0: "a", 1: "b", 2: "c"},
+            {"a": 0, "b": 1, "c": 2},
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+        )
+        u, v = 3, "k"
+        assert _consistent_PT(u, v, gparams, sparams)
+
+    def test_const_fail_cases(self):
+        G1 = nx.MultiGraph(
+            [
+                (0, 1),
+                (1, 2),
+                (10, 0),
+                (10, 0),
+                (10, 0),
+                (10, 3),
+                (10, 3),
+                (10, 4),
+                (10, 5),
+                (10, 6),
+                (10, 6),
+                (4, 1),
+                (5, 3),
+            ]
+        )
+        mapped = {0: "a", 1: "b", 2: "c", 3: "d", 4: "e", 5: "f", 6: "g", 10: "k"}
+        G2 = nx.relabel_nodes(G1, mapped)
+
+        gparams = _GraphParameters(G1, G2, None, None, None, None, None)
+        sparams = _StateParameters(
+            {0: "a", 1: "b", 2: "c", 3: "d"},
+            {"a": 0, "b": 1, "c": 2, "d": 3},
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+        )
+        u, v = 10, "k"
+        assert _consistent_PT(u, v, gparams, sparams)
+
+        # Delete one uncovered neighbor of u. Notice how it still passes the test. Two reasons for this:
+        # 1. If u, v had different degrees from the beginning, they wouldn't be selected as candidates in the first
+        #    place.
+        # 2. Even if they are selected, consistency is basically 1-look-ahead, meaning that we take into consideration
+        #    the relation of the candidates with their mapped neighbors. The node we deleted is not a covered neighbor.
+        #    Such nodes will be checked by the cut_PT function, which is basically the 2-look-ahead, checking the
+        #    relation of the candidates with T1, T2 (in which belongs the node we just deleted).
+        G1.remove_node(6)
+        assert _consistent_PT(u, v, gparams, sparams)
+
+        # Add one more covered neighbor of u in G1
+        G1.add_edge(u, 2)
+        assert not _consistent_PT(u, v, gparams, sparams)
+
+        # Compensate in G2
+        G2.add_edge(v, "c")
+        assert _consistent_PT(u, v, gparams, sparams)
+
+        # Add one more covered neighbor of v in G2
+        G2.add_edge(v, "x")
+        G1.add_node(7)
+        sparams.mapping.update({7: "x"})
+        sparams.reverse_mapping.update({"x": 7})
+        assert not _consistent_PT(u, v, gparams, sparams)
+
+        # Compendate in G1
+        G1.add_edge(u, 7)
+        assert _consistent_PT(u, v, gparams, sparams)
+
+        # Delete an edge between u and a covered neighbor
+        G1.remove_edges_from([(u, 0), (u, 0)])
+        assert not _consistent_PT(u, v, gparams, sparams)
+
+        # Compensate in G2
+        G2.remove_edges_from([(v, mapped[0]), (v, mapped[0])])
+        assert _consistent_PT(u, v, gparams, sparams)
+
+        # Remove an edge between v and a covered neighbor
+        G2.remove_edge(v, mapped[3])
+        assert not _consistent_PT(u, v, gparams, sparams)
+
+        # Compensate in G1
+        G1.remove_edge(u, 3)
+        assert _consistent_PT(u, v, gparams, sparams)
+
+    def test_cut_same_labels(self):
+        G1 = nx.MultiGraph(
+            [
+                (0, 1),
+                (1, 2),
+                (10, 0),
+                (10, 0),
+                (10, 0),
+                (10, 3),
+                (10, 3),
+                (10, 4),
+                (10, 4),
+                (10, 5),
+                (10, 5),
+                (10, 5),
+                (10, 5),
+                (10, 6),
+                (10, 6),
+                (4, 1),
+                (5, 3),
+            ]
+        )
+        mapped = {0: "a", 1: "b", 2: "c", 3: "d", 4: "e", 5: "f", 6: "g", 10: "k"}
+        G2 = nx.relabel_nodes(G1, mapped)
+        l1 = {n: "blue" for n in G1.nodes()}
+        l2 = {n: "blue" for n in G2.nodes()}
+
+        gparams = _GraphParameters(
+            G1, G2, l1, l2, nx.utils.groups(l1), nx.utils.groups(l2), None
+        )
+        sparams = _StateParameters(
+            {0: "a", 1: "b", 2: "c", 3: "d"},
+            {"a": 0, "b": 1, "c": 2, "d": 3},
+            {4, 5},
+            None,
+            {6},
+            None,
+            {"e", "f"},
+            None,
+            {"g"},
+            None,
+        )
+
+        u, v = 10, "k"
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Remove one of the multiple edges between u and a neighbor
+        G1.remove_edge(u, 4)
+        assert _cut_PT(u, v, gparams, sparams)
+
+        # Compensate in G2
+        G1.remove_edge(u, 4)
+        G2.remove_edges_from([(v, mapped[4]), (v, mapped[4])])
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Change intersection between G2[v] and T2_tilde, so it's not the same as the one between G1[u] and T1_tilde
+        G2.remove_edge(v, mapped[6])
+        assert _cut_PT(u, v, gparams, sparams)
+
+        # Compensate in G1
+        G1.remove_edge(u, 6)
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Add more edges between u and neighbor which belongs in T1_tilde
+        G1.add_edges_from([(u, 5), (u, 5), (u, 5)])
+        assert _cut_PT(u, v, gparams, sparams)
+
+        # Compensate in G2
+        G2.add_edges_from([(v, mapped[5]), (v, mapped[5]), (v, mapped[5])])
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Add disconnected nodes, which will form the new Ti_out
+        G1.add_nodes_from([6, 7, 8])
+        G2.add_nodes_from(["g", "y", "z"])
+        G1.add_edges_from([(u, 6), (u, 6), (u, 6), (u, 8)])
+        G2.add_edges_from([(v, "g"), (v, "g"), (v, "g"), (v, "z")])
+
+        sparams.T1_tilde.update({6, 7, 8})
+        sparams.T2_tilde.update({"g", "y", "z"})
+
+        l1 = {n: "blue" for n in G1.nodes()}
+        l2 = {n: "blue" for n in G2.nodes()}
+        gparams = _GraphParameters(
+            G1, G2, l1, l2, nx.utils.groups(l1), nx.utils.groups(l2), None
+        )
+
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Add some new nodes to the mapping
+        sparams.mapping.update({6: "g", 7: "y"})
+        sparams.reverse_mapping.update({"g": 6, "y": 7})
+
+        # Add more nodes to T1, T2.
+        G1.add_edges_from([(6, 20), (7, 20), (6, 21)])
+        G2.add_edges_from([("g", "i"), ("g", "j"), ("y", "j")])
+
+        sparams.T1.update({20, 21})
+        sparams.T2.update({"i", "j"})
+        sparams.T1_tilde.difference_update({6, 7})
+        sparams.T2_tilde.difference_update({"g", "y"})
+
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Remove some edges
+        G2.remove_edge(v, "g")
+        assert _cut_PT(u, v, gparams, sparams)
+
+        G1.remove_edge(u, 6)
+        G1.add_edge(u, 8)
+        G2.add_edge(v, "z")
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Add nodes from the new T1 and T2, as neighbors of u and v respectively
+        G1.add_edges_from([(u, 20), (u, 20), (u, 20), (u, 21)])
+        G2.add_edges_from([(v, "i"), (v, "i"), (v, "i"), (v, "j")])
+        l1 = {n: "blue" for n in G1.nodes()}
+        l2 = {n: "blue" for n in G2.nodes()}
+        gparams = _GraphParameters(
+            G1, G2, l1, l2, nx.utils.groups(l1), nx.utils.groups(l2), None
+        )
+
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Change the edges
+        G1.remove_edge(u, 20)
+        G1.add_edge(u, 4)
+        assert _cut_PT(u, v, gparams, sparams)
+
+        G2.remove_edge(v, "i")
+        G2.add_edge(v, mapped[4])
+        assert not _cut_PT(u, v, gparams, sparams)
+
+    def test_cut_different_labels(self):
+        G1 = nx.MultiGraph(
+            [
+                (0, 1),
+                (0, 1),
+                (1, 2),
+                (1, 2),
+                (1, 14),
+                (0, 4),
+                (1, 5),
+                (2, 6),
+                (3, 7),
+                (3, 6),
+                (4, 10),
+                (4, 9),
+                (6, 10),
+                (20, 9),
+                (20, 9),
+                (20, 9),
+                (20, 15),
+                (20, 15),
+                (20, 12),
+                (20, 11),
+                (20, 11),
+                (20, 11),
+                (12, 13),
+                (11, 13),
+                (20, 8),
+                (20, 8),
+                (20, 3),
+                (20, 3),
+                (20, 5),
+                (20, 5),
+                (20, 5),
+                (20, 0),
+                (20, 0),
+                (20, 0),
+            ]
+        )
+        mapped = {
+            0: "a",
+            1: "b",
+            2: "c",
+            3: "d",
+            4: "e",
+            5: "f",
+            6: "g",
+            7: "h",
+            8: "i",
+            9: "j",
+            10: "k",
+            11: "l",
+            12: "m",
+            13: "n",
+            14: "o",
+            15: "p",
+            20: "x",
+        }
+        G2 = nx.relabel_nodes(G1, mapped)
+
+        l1 = {n: "none" for n in G1.nodes()}
+        l2 = {}
+
+        l1.update(
+            {
+                9: "blue",
+                15: "blue",
+                12: "blue",
+                11: "green",
+                3: "green",
+                8: "red",
+                0: "red",
+                5: "yellow",
+            }
+        )
+        l2.update({mapped[n]: l for n, l in l1.items()})
+
+        gparams = _GraphParameters(
+            G1, G2, l1, l2, nx.utils.groups(l1), nx.utils.groups(l2), None
+        )
+        sparams = _StateParameters(
+            {0: "a", 1: "b", 2: "c", 3: "d"},
+            {"a": 0, "b": 1, "c": 2, "d": 3},
+            {4, 5, 6, 7, 14},
+            None,
+            {9, 10, 15, 12, 11, 13, 8},
+            None,
+            {"e", "f", "g", "h", "o"},
+            None,
+            {"j", "k", "l", "m", "n", "i", "p"},
+            None,
+        )
+
+        u, v = 20, "x"
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Change the orientation of the labels on neighbors of u compared to neighbors of v. Leave the structure intact
+        l1.update({9: "red"})
+        assert _cut_PT(u, v, gparams, sparams)
+
+        # compensate in G2
+        l2.update({mapped[9]: "red"})
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Change the intersection of G1[u] and T1
+        G1.add_edge(u, 4)
+        assert _cut_PT(u, v, gparams, sparams)
+
+        # Same for G2[v] and T2
+        G2.add_edge(v, mapped[4])
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Delete one from the multiple edges
+        G2.remove_edge(v, mapped[8])
+        assert _cut_PT(u, v, gparams, sparams)
+
+        # Same for G1[u] and T1_tilde
+        G1.remove_edge(u, 8)
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Place 8 and mapped[8] in T1 and T2 respectively, by connecting it to covered nodes
+        G1.add_edges_from([(8, 3), (8, 3), (8, u)])
+        G2.add_edges_from([(mapped[8], mapped[3]), (mapped[8], mapped[3])])
+        sparams.T1.add(8)
+        sparams.T2.add(mapped[8])
+        sparams.T1_tilde.remove(8)
+        sparams.T2_tilde.remove(mapped[8])
+
+        assert _cut_PT(u, v, gparams, sparams)
+
+        # Fix uneven edges
+        G1.remove_edge(8, u)
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Remove neighbor of u from T1
+        G1.remove_node(5)
+        l1.pop(5)
+        sparams.T1.remove(5)
+        assert _cut_PT(u, v, gparams, sparams)
+
+        # Same in G2
+        G2.remove_node(mapped[5])
+        l2.pop(mapped[5])
+        sparams.T2.remove(mapped[5])
+        assert not _cut_PT(u, v, gparams, sparams)
+
+    def test_feasibility_same_labels(self):
+        G1 = nx.MultiGraph(
+            [
+                (0, 1),
+                (0, 1),
+                (1, 2),
+                (1, 2),
+                (1, 14),
+                (0, 4),
+                (1, 5),
+                (2, 6),
+                (3, 7),
+                (3, 6),
+                (4, 10),
+                (4, 9),
+                (6, 10),
+                (20, 9),
+                (20, 9),
+                (20, 9),
+                (20, 15),
+                (20, 15),
+                (20, 12),
+                (20, 11),
+                (20, 11),
+                (20, 11),
+                (12, 13),
+                (11, 13),
+                (20, 8),
+                (20, 8),
+                (20, 3),
+                (20, 3),
+                (20, 5),
+                (20, 5),
+                (20, 5),
+                (20, 0),
+                (20, 0),
+                (20, 0),
+            ]
+        )
+        mapped = {
+            0: "a",
+            1: "b",
+            2: "c",
+            3: "d",
+            4: "e",
+            5: "f",
+            6: "g",
+            7: "h",
+            8: "i",
+            9: "j",
+            10: "k",
+            11: "l",
+            12: "m",
+            13: "n",
+            14: "o",
+            15: "p",
+            20: "x",
+        }
+        G2 = nx.relabel_nodes(G1, mapped)
+        l1 = {n: "blue" for n in G1.nodes()}
+        l2 = {mapped[n]: "blue" for n in G1.nodes()}
+
+        gparams = _GraphParameters(
+            G1, G2, l1, l2, nx.utils.groups(l1), nx.utils.groups(l2), None
+        )
+        sparams = _StateParameters(
+            {0: "a", 1: "b", 2: "c", 3: "d"},
+            {"a": 0, "b": 1, "c": 2, "d": 3},
+            {4, 5, 6, 7, 14},
+            None,
+            {9, 10, 15, 12, 11, 13, 8},
+            None,
+            {"e", "f", "g", "h", "o"},
+            None,
+            {"j", "k", "l", "m", "n", "i", "p"},
+            None,
+        )
+
+        u, v = 20, "x"
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Change structure in G2 such that, ONLY consistency is harmed
+        G2.remove_edges_from([(mapped[20], mapped[3]), (mapped[20], mapped[3])])
+        G2.add_edges_from([(mapped[20], mapped[2]), (mapped[20], mapped[2])])
+
+        # Consistency check fails, while the cutting rules are satisfied!
+        assert not _cut_PT(u, v, gparams, sparams)
+        assert not _consistent_PT(u, v, gparams, sparams)
+
+        # Compensate in G1 and make it consistent
+        G1.remove_edges_from([(20, 3), (20, 3)])
+        G1.add_edges_from([(20, 2), (20, 2)])
+        assert not _cut_PT(u, v, gparams, sparams)
+        assert _consistent_PT(u, v, gparams, sparams)
+
+        # ONLY fail the cutting check
+        G2.add_edges_from([(v, mapped[10])] * 5)
+        assert _cut_PT(u, v, gparams, sparams)
+        assert _consistent_PT(u, v, gparams, sparams)
+
+        # Pass all tests
+        G1.add_edges_from([(u, 10)] * 5)
+        assert not _cut_PT(u, v, gparams, sparams)
+        assert _consistent_PT(u, v, gparams, sparams)
+
+    def test_feasibility_different_labels(self):
+        G1 = nx.MultiGraph(
+            [
+                (0, 1),
+                (0, 1),
+                (1, 2),
+                (1, 2),
+                (1, 14),
+                (0, 4),
+                (1, 5),
+                (2, 6),
+                (3, 7),
+                (3, 6),
+                (4, 10),
+                (4, 9),
+                (6, 10),
+                (20, 9),
+                (20, 9),
+                (20, 9),
+                (20, 15),
+                (20, 15),
+                (20, 12),
+                (20, 11),
+                (20, 11),
+                (20, 11),
+                (12, 13),
+                (11, 13),
+                (20, 8),
+                (20, 8),
+                (20, 2),
+                (20, 2),
+                (20, 5),
+                (20, 5),
+                (20, 5),
+                (20, 0),
+                (20, 0),
+                (20, 0),
+            ]
+        )
+        mapped = {
+            0: "a",
+            1: "b",
+            2: "c",
+            3: "d",
+            4: "e",
+            5: "f",
+            6: "g",
+            7: "h",
+            8: "i",
+            9: "j",
+            10: "k",
+            11: "l",
+            12: "m",
+            13: "n",
+            14: "o",
+            15: "p",
+            20: "x",
+        }
+        G2 = nx.relabel_nodes(G1, mapped)
+        l1 = {n: "none" for n in G1.nodes()}
+        l2 = {}
+
+        l1.update(
+            {
+                9: "blue",
+                15: "blue",
+                12: "blue",
+                11: "green",
+                2: "green",
+                8: "red",
+                0: "red",
+                5: "yellow",
+            }
+        )
+        l2.update({mapped[n]: l for n, l in l1.items()})
+
+        gparams = _GraphParameters(
+            G1, G2, l1, l2, nx.utils.groups(l1), nx.utils.groups(l2), None
+        )
+        sparams = _StateParameters(
+            {0: "a", 1: "b", 2: "c", 3: "d"},
+            {"a": 0, "b": 1, "c": 2, "d": 3},
+            {4, 5, 6, 7, 14},
+            None,
+            {9, 10, 15, 12, 11, 13, 8},
+            None,
+            {"e", "f", "g", "h", "o"},
+            None,
+            {"j", "k", "l", "m", "n", "i", "p"},
+            None,
+        )
+
+        u, v = 20, "x"
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Change structure in G2 such that, ONLY consistency is harmed
+        G2.remove_edges_from([(mapped[20], mapped[2]), (mapped[20], mapped[2])])
+        G2.add_edges_from([(mapped[20], mapped[3]), (mapped[20], mapped[3])])
+        l2.update({mapped[3]: "green"})
+
+        # Consistency check fails, while the cutting rules are satisfied!
+        assert not _cut_PT(u, v, gparams, sparams)
+        assert not _consistent_PT(u, v, gparams, sparams)
+
+        # Compensate in G1 and make it consistent
+        G1.remove_edges_from([(20, 2), (20, 2)])
+        G1.add_edges_from([(20, 3), (20, 3)])
+        l1.update({3: "green"})
+        assert not _cut_PT(u, v, gparams, sparams)
+        assert _consistent_PT(u, v, gparams, sparams)
+
+        # ONLY fail the cutting check
+        l1.update({5: "red"})
+        assert _cut_PT(u, v, gparams, sparams)
+        assert _consistent_PT(u, v, gparams, sparams)
+
+
+class TestDiGraphISOFeasibility:
+    def test_const_covered_neighbors(self):
+        G1 = nx.DiGraph([(0, 1), (1, 2), (0, 3), (2, 3)])
+        G2 = nx.DiGraph([("a", "b"), ("b", "c"), ("a", "k"), ("c", "k")])
+        gparams = _GraphParameters(G1, G2, None, None, None, None, None)
+        sparams = _StateParameters(
+            {0: "a", 1: "b", 2: "c"},
+            {"a": 0, "b": 1, "c": 2},
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+        )
+        u, v = 3, "k"
+        assert _consistent_PT(u, v, gparams, sparams)
+
+    def test_const_no_covered_neighbors(self):
+        G1 = nx.DiGraph([(0, 1), (1, 2), (3, 4), (3, 5)])
+        G2 = nx.DiGraph([("a", "b"), ("b", "c"), ("k", "w"), ("k", "z")])
+        gparams = _GraphParameters(G1, G2, None, None, None, None, None)
+        sparams = _StateParameters(
+            {0: "a", 1: "b", 2: "c"},
+            {"a": 0, "b": 1, "c": 2},
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+        )
+        u, v = 3, "k"
+        assert _consistent_PT(u, v, gparams, sparams)
+
+    def test_const_mixed_covered_uncovered_neighbors(self):
+        G1 = nx.DiGraph([(0, 1), (1, 2), (3, 0), (3, 2), (3, 4), (3, 5)])
+        G2 = nx.DiGraph(
+            [("a", "b"), ("b", "c"), ("k", "a"), ("k", "c"), ("k", "w"), ("k", "z")]
+        )
+        gparams = _GraphParameters(G1, G2, None, None, None, None, None)
+        sparams = _StateParameters(
+            {0: "a", 1: "b", 2: "c"},
+            {"a": 0, "b": 1, "c": 2},
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+        )
+        u, v = 3, "k"
+        assert _consistent_PT(u, v, gparams, sparams)
+
+    def test_const_fail_cases(self):
+        G1 = nx.DiGraph(
+            [
+                (0, 1),
+                (2, 1),
+                (10, 0),
+                (10, 3),
+                (10, 4),
+                (5, 10),
+                (10, 6),
+                (1, 4),
+                (5, 3),
+            ]
+        )
+        G2 = nx.DiGraph(
+            [
+                ("a", "b"),
+                ("c", "b"),
+                ("k", "a"),
+                ("k", "d"),
+                ("k", "e"),
+                ("f", "k"),
+                ("k", "g"),
+                ("b", "e"),
+                ("f", "d"),
+            ]
+        )
+        gparams = _GraphParameters(G1, G2, None, None, None, None, None)
+        sparams = _StateParameters(
+            {0: "a", 1: "b", 2: "c", 3: "d"},
+            {"a": 0, "b": 1, "c": 2, "d": 3},
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+        )
+        u, v = 10, "k"
+        assert _consistent_PT(u, v, gparams, sparams)
+
+        # Delete one uncovered neighbor of u. Notice how it still passes the
+        # test. Two reasons for this:
+        #   1. If u, v had different degrees from the beginning, they wouldn't
+        #      be selected as candidates in the first place.
+        #   2. Even if they are selected, consistency is basically
+        #      1-look-ahead, meaning that we take into consideration the
+        #      relation of the candidates with their mapped neighbors.
+        #      The node we deleted is not a covered neighbor.
+        #      Such nodes will be checked by the cut_PT function, which is
+        #      basically the 2-look-ahead, checking the relation of the
+        #      candidates with T1, T2 (in which belongs the node we just deleted).
+        G1.remove_node(6)
+        assert _consistent_PT(u, v, gparams, sparams)
+
+        # Add one more covered neighbor of u in G1
+        G1.add_edge(u, 2)
+        assert not _consistent_PT(u, v, gparams, sparams)
+
+        # Compensate in G2
+        G2.add_edge(v, "c")
+        assert _consistent_PT(u, v, gparams, sparams)
+
+        # Add one more covered neighbor of v in G2
+        G2.add_edge(v, "x")
+        G1.add_node(7)
+        sparams.mapping.update({7: "x"})
+        sparams.reverse_mapping.update({"x": 7})
+        assert not _consistent_PT(u, v, gparams, sparams)
+
+        # Compensate in G1
+        G1.add_edge(u, 7)
+        assert _consistent_PT(u, v, gparams, sparams)
+
+    def test_cut_inconsistent_labels(self):
+        G1 = nx.DiGraph(
+            [
+                (0, 1),
+                (2, 1),
+                (10, 0),
+                (10, 3),
+                (10, 4),
+                (5, 10),
+                (10, 6),
+                (1, 4),
+                (5, 3),
+            ]
+        )
+        G2 = nx.DiGraph(
+            [
+                ("a", "b"),
+                ("c", "b"),
+                ("k", "a"),
+                ("k", "d"),
+                ("k", "e"),
+                ("f", "k"),
+                ("k", "g"),
+                ("b", "e"),
+                ("f", "d"),
+            ]
+        )
+
+        l1 = {n: "blue" for n in G1.nodes()}
+        l2 = {n: "blue" for n in G2.nodes()}
+        l1.update({5: "green"})  # Change the label of one neighbor of u
+
+        gparams = _GraphParameters(
+            G1, G2, l1, l2, nx.utils.groups(l1), nx.utils.groups(l2), None
+        )
+        sparams = _StateParameters(
+            {0: "a", 1: "b", 2: "c", 3: "d"},
+            {"a": 0, "b": 1, "c": 2, "d": 3},
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+            None,
+        )
+
+        u, v = 10, "k"
+        assert _cut_PT(u, v, gparams, sparams)
+
+    def test_cut_consistent_labels(self):
+        G1 = nx.DiGraph(
+            [
+                (0, 1),
+                (2, 1),
+                (10, 0),
+                (10, 3),
+                (10, 4),
+                (5, 10),
+                (10, 6),
+                (1, 4),
+                (5, 3),
+            ]
+        )
+        G2 = nx.DiGraph(
+            [
+                ("a", "b"),
+                ("c", "b"),
+                ("k", "a"),
+                ("k", "d"),
+                ("k", "e"),
+                ("f", "k"),
+                ("k", "g"),
+                ("b", "e"),
+                ("f", "d"),
+            ]
+        )
+
+        l1 = {n: "blue" for n in G1.nodes()}
+        l2 = {n: "blue" for n in G2.nodes()}
+
+        gparams = _GraphParameters(
+            G1, G2, l1, l2, nx.utils.groups(l1), nx.utils.groups(l2), None
+        )
+        sparams = _StateParameters(
+            {0: "a", 1: "b", 2: "c", 3: "d"},
+            {"a": 0, "b": 1, "c": 2, "d": 3},
+            {4},
+            {5, 10},
+            {6},
+            None,
+            {"e"},
+            {"f", "k"},
+            {"g"},
+            None,
+        )
+
+        u, v = 10, "k"
+        assert not _cut_PT(u, v, gparams, sparams)
+
+    def test_cut_same_labels(self):
+        G1 = nx.DiGraph(
+            [
+                (0, 1),
+                (2, 1),
+                (10, 0),
+                (10, 3),
+                (10, 4),
+                (5, 10),
+                (10, 6),
+                (1, 4),
+                (5, 3),
+            ]
+        )
+        mapped = {0: "a", 1: "b", 2: "c", 3: "d", 4: "e", 5: "f", 6: "g", 10: "k"}
+        G2 = nx.relabel_nodes(G1, mapped)
+        l1 = {n: "blue" for n in G1.nodes()}
+        l2 = {n: "blue" for n in G2.nodes()}
+
+        gparams = _GraphParameters(
+            G1, G2, l1, l2, nx.utils.groups(l1), nx.utils.groups(l2), None
+        )
+        sparams = _StateParameters(
+            {0: "a", 1: "b", 2: "c", 3: "d"},
+            {"a": 0, "b": 1, "c": 2, "d": 3},
+            {4},
+            {5, 10},
+            {6},
+            None,
+            {"e"},
+            {"f", "k"},
+            {"g"},
+            None,
+        )
+
+        u, v = 10, "k"
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Change intersection between G1[u] and T1_out, so it's not the same as the one between G2[v] and T2_out
+        G1.remove_edge(u, 4)
+        assert _cut_PT(u, v, gparams, sparams)
+
+        # Compensate in G2
+        G2.remove_edge(v, mapped[4])
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Change intersection between G1[u] and T1_in, so it's not the same as the one between G2[v] and T2_in
+        G1.remove_edge(5, u)
+        assert _cut_PT(u, v, gparams, sparams)
+
+        # Compensate in G2
+        G2.remove_edge(mapped[5], v)
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Change intersection between G2[v] and T2_tilde, so it's not the same as the one between G1[u] and T1_tilde
+        G2.remove_edge(v, mapped[6])
+        assert _cut_PT(u, v, gparams, sparams)
+
+        # Compensate in G1
+        G1.remove_edge(u, 6)
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Add disconnected nodes, which will form the new Ti_tilde
+        G1.add_nodes_from([6, 7, 8])
+        G2.add_nodes_from(["g", "y", "z"])
+        sparams.T1_tilde.update({6, 7, 8})
+        sparams.T2_tilde.update({"g", "y", "z"})
+
+        l1 = {n: "blue" for n in G1.nodes()}
+        l2 = {n: "blue" for n in G2.nodes()}
+        gparams = _GraphParameters(
+            G1, G2, l1, l2, nx.utils.groups(l1), nx.utils.groups(l2), None
+        )
+
+        assert not _cut_PT(u, v, gparams, sparams)
+
+    def test_cut_different_labels(self):
+        G1 = nx.DiGraph(
+            [
+                (0, 1),
+                (1, 2),
+                (14, 1),
+                (0, 4),
+                (1, 5),
+                (2, 6),
+                (3, 7),
+                (3, 6),
+                (10, 4),
+                (4, 9),
+                (6, 10),
+                (20, 9),
+                (20, 15),
+                (20, 12),
+                (20, 11),
+                (12, 13),
+                (11, 13),
+                (20, 8),
+                (20, 3),
+                (20, 5),
+                (0, 20),
+            ]
+        )
+        mapped = {
+            0: "a",
+            1: "b",
+            2: "c",
+            3: "d",
+            4: "e",
+            5: "f",
+            6: "g",
+            7: "h",
+            8: "i",
+            9: "j",
+            10: "k",
+            11: "l",
+            12: "m",
+            13: "n",
+            14: "o",
+            15: "p",
+            20: "x",
+        }
+        G2 = nx.relabel_nodes(G1, mapped)
+
+        l1 = {n: "none" for n in G1.nodes()}
+        l2 = {}
+
+        l1.update(
+            {
+                9: "blue",
+                15: "blue",
+                12: "blue",
+                11: "green",
+                3: "green",
+                8: "red",
+                0: "red",
+                5: "yellow",
+            }
+        )
+        l2.update({mapped[n]: l for n, l in l1.items()})
+
+        gparams = _GraphParameters(
+            G1, G2, l1, l2, nx.utils.groups(l1), nx.utils.groups(l2), None
+        )
+        sparams = _StateParameters(
+            {0: "a", 1: "b", 2: "c", 3: "d"},
+            {"a": 0, "b": 1, "c": 2, "d": 3},
+            {4, 5, 6, 7, 20},
+            {14, 20},
+            {9, 10, 15, 12, 11, 13, 8},
+            None,
+            {"e", "f", "g", "x"},
+            {"o", "x"},
+            {"j", "k", "l", "m", "n", "i", "p"},
+            None,
+        )
+
+        u, v = 20, "x"
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Change the orientation of the labels on neighbors of u compared to neighbors of v. Leave the structure intact
+        l1.update({9: "red"})
+        assert _cut_PT(u, v, gparams, sparams)
+
+        # compensate in G2
+        l2.update({mapped[9]: "red"})
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Change the intersection of G1[u] and T1_out
+        G1.add_edge(u, 4)
+        assert _cut_PT(u, v, gparams, sparams)
+
+        # Same for G2[v] and T2_out
+        G2.add_edge(v, mapped[4])
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Change the intersection of G1[u] and T1_in
+        G1.add_edge(u, 14)
+        assert _cut_PT(u, v, gparams, sparams)
+
+        # Same for G2[v] and T2_in
+        G2.add_edge(v, mapped[14])
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Change the intersection of G2[v] and T2_tilde
+        G2.remove_edge(v, mapped[8])
+        assert _cut_PT(u, v, gparams, sparams)
+
+        # Same for G1[u] and T1_tilde
+        G1.remove_edge(u, 8)
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Place 8 and mapped[8] in T1 and T2 respectively, by connecting it to covered nodes
+        G1.add_edge(8, 3)
+        G2.add_edge(mapped[8], mapped[3])
+        sparams.T1.add(8)
+        sparams.T2.add(mapped[8])
+        sparams.T1_tilde.remove(8)
+        sparams.T2_tilde.remove(mapped[8])
+
+        assert not _cut_PT(u, v, gparams, sparams)
+
+        # Remove neighbor of u from T1
+        G1.remove_node(5)
+        l1.pop(5)
+        sparams.T1.remove(5)
+        assert _cut_PT(u, v, gparams, sparams)
+
+        # Same in G2
+        G2.remove_node(mapped[5])
+        l2.pop(mapped[5])
+        sparams.T2.remove(mapped[5])
+        assert not _cut_PT(u, v, gparams, sparams)
+
+    def test_predecessor_T1_in_fail(self):
+        G1 = nx.DiGraph(
+            [(0, 1), (0, 3), (4, 0), (1, 5), (5, 2), (3, 6), (4, 6), (6, 5)]
+        )
+        mapped = {0: "a", 1: "b", 2: "c", 3: "d", 4: "e", 5: "f", 6: "g"}
+        G2 = nx.relabel_nodes(G1, mapped)
+        l1 = {n: "blue" for n in G1.nodes()}
+        l2 = {n: "blue" for n in G2.nodes()}
+
+        gparams = _GraphParameters(
+            G1, G2, l1, l2, nx.utils.groups(l1), nx.utils.groups(l2), None
+        )
+        sparams = _StateParameters(
+            {0: "a", 1: "b", 2: "c"},
+            {"a": 0, "b": 1, "c": 2},
+            {3, 5},
+            {4, 5},
+            {6},
+            None,
+            {"d", "f"},
+            {"f"},  # mapped[4] is missing from T2_in
+            {"g"},
+            None,
+        )
+
+        u, v = 6, "g"
+        assert _cut_PT(u, v, gparams, sparams)
+
+        sparams.T2_in.add("e")
+        assert not _cut_PT(u, v, gparams, sparams)
+
+
+class TestGraphTinoutUpdating:
+    edges = [
+        (1, 3),
+        (2, 3),
+        (3, 4),
+        (4, 9),
+        (4, 5),
+        (3, 9),
+        (5, 8),
+        (5, 7),
+        (8, 7),
+        (6, 7),
+    ]
+    mapped = {
+        0: "x",
+        1: "a",
+        2: "b",
+        3: "c",
+        4: "d",
+        5: "e",
+        6: "f",
+        7: "g",
+        8: "h",
+        9: "i",
+    }
+    G1 = nx.Graph()
+    G1.add_edges_from(edges)
+    G1.add_node(0)
+    G2 = nx.relabel_nodes(G1, mapping=mapped)
+
+    def test_updating(self):
+        G2_degree = dict(self.G2.degree)
+        gparams, sparams = _initialize_parameters(self.G1, self.G2, G2_degree)
+        m, m_rev, T1, _, T1_tilde, _, T2, _, T2_tilde, _ = sparams
+
+        # Add node to the mapping
+        m[4] = self.mapped[4]
+        m_rev[self.mapped[4]] = 4
+        _update_Tinout(4, self.mapped[4], gparams, sparams)
+
+        assert T1 == {3, 5, 9}
+        assert T2 == {"c", "i", "e"}
+        assert T1_tilde == {0, 1, 2, 6, 7, 8}
+        assert T2_tilde == {"x", "a", "b", "f", "g", "h"}
+
+        # Add node to the mapping
+        m[5] = self.mapped[5]
+        m_rev.update({self.mapped[5]: 5})
+        _update_Tinout(5, self.mapped[5], gparams, sparams)
+
+        assert T1 == {3, 9, 8, 7}
+        assert T2 == {"c", "i", "h", "g"}
+        assert T1_tilde == {0, 1, 2, 6}
+        assert T2_tilde == {"x", "a", "b", "f"}
+
+        # Add node to the mapping
+        m[6] = self.mapped[6]
+        m_rev.update({self.mapped[6]: 6})
+        _update_Tinout(6, self.mapped[6], gparams, sparams)
+
+        assert T1 == {3, 9, 8, 7}
+        assert T2 == {"c", "i", "h", "g"}
+        assert T1_tilde == {0, 1, 2}
+        assert T2_tilde == {"x", "a", "b"}
+
+        # Add node to the mapping
+        m[3] = self.mapped[3]
+        m_rev.update({self.mapped[3]: 3})
+        _update_Tinout(3, self.mapped[3], gparams, sparams)
+
+        assert T1 == {1, 2, 9, 8, 7}
+        assert T2 == {"a", "b", "i", "h", "g"}
+        assert T1_tilde == {0}
+        assert T2_tilde == {"x"}
+
+        # Add node to the mapping
+        m[0] = self.mapped[0]
+        m_rev.update({self.mapped[0]: 0})
+        _update_Tinout(0, self.mapped[0], gparams, sparams)
+
+        assert T1 == {1, 2, 9, 8, 7}
+        assert T2 == {"a", "b", "i", "h", "g"}
+        assert T1_tilde == set()
+        assert T2_tilde == set()
+
+    def test_restoring(self):
+        m = {0: "x", 3: "c", 4: "d", 5: "e", 6: "f"}
+        m_rev = {"x": 0, "c": 3, "d": 4, "e": 5, "f": 6}
+
+        T1 = {1, 2, 7, 9, 8}
+        T2 = {"a", "b", "g", "i", "h"}
+        T1_tilde = set()
+        T2_tilde = set()
+
+        gparams = _GraphParameters(self.G1, self.G2, {}, {}, {}, {}, {})
+        sparams = _StateParameters(
+            m, m_rev, T1, None, T1_tilde, None, T2, None, T2_tilde, None
+        )
+
+        # Remove a node from the mapping
+        m.pop(0)
+        m_rev.pop("x")
+        _restore_Tinout(0, self.mapped[0], gparams, sparams)
+
+        assert T1 == {1, 2, 7, 9, 8}
+        assert T2 == {"a", "b", "g", "i", "h"}
+        assert T1_tilde == {0}
+        assert T2_tilde == {"x"}
+
+        # Remove a node from the mapping
+        m.pop(6)
+        m_rev.pop("f")
+        _restore_Tinout(6, self.mapped[6], gparams, sparams)
+
+        assert T1 == {1, 2, 7, 9, 8}
+        assert T2 == {"a", "b", "g", "i", "h"}
+        assert T1_tilde == {0, 6}
+        assert T2_tilde == {"x", "f"}
+
+        # Remove a node from the mapping
+        m.pop(3)
+        m_rev.pop("c")
+        _restore_Tinout(3, self.mapped[3], gparams, sparams)
+
+        assert T1 == {7, 9, 8, 3}
+        assert T2 == {"g", "i", "h", "c"}
+        assert T1_tilde == {0, 6, 1, 2}
+        assert T2_tilde == {"x", "f", "a", "b"}
+
+        # Remove a node from the mapping
+        m.pop(5)
+        m_rev.pop("e")
+        _restore_Tinout(5, self.mapped[5], gparams, sparams)
+
+        assert T1 == {9, 3, 5}
+        assert T2 == {"i", "c", "e"}
+        assert T1_tilde == {0, 6, 1, 2, 7, 8}
+        assert T2_tilde == {"x", "f", "a", "b", "g", "h"}
+
+        # Remove a node from the mapping
+        m.pop(4)
+        m_rev.pop("d")
+        _restore_Tinout(4, self.mapped[4], gparams, sparams)
+
+        assert T1 == set()
+        assert T2 == set()
+        assert T1_tilde == set(self.G1.nodes())
+        assert T2_tilde == set(self.G2.nodes())
+
+
+class TestDiGraphTinoutUpdating:
+    edges = [
+        (1, 3),
+        (3, 2),
+        (3, 4),
+        (4, 9),
+        (4, 5),
+        (3, 9),
+        (5, 8),
+        (5, 7),
+        (8, 7),
+        (7, 6),
+    ]
+    mapped = {
+        0: "x",
+        1: "a",
+        2: "b",
+        3: "c",
+        4: "d",
+        5: "e",
+        6: "f",
+        7: "g",
+        8: "h",
+        9: "i",
+    }
+    G1 = nx.DiGraph(edges)
+    G1.add_node(0)
+    G2 = nx.relabel_nodes(G1, mapping=mapped)
+
+    def test_updating(self):
+        G2_degree = {
+            n: (in_degree, out_degree)
+            for (n, in_degree), (_, out_degree) in zip(
+                self.G2.in_degree, self.G2.out_degree
+            )
+        }
+        gparams, sparams = _initialize_parameters(self.G1, self.G2, G2_degree)
+        m, m_rev, T1_out, T1_in, T1_tilde, _, T2_out, T2_in, T2_tilde, _ = sparams
+
+        # Add node to the mapping
+        m[4] = self.mapped[4]
+        m_rev[self.mapped[4]] = 4
+        _update_Tinout(4, self.mapped[4], gparams, sparams)
+
+        assert T1_out == {5, 9}
+        assert T1_in == {3}
+        assert T2_out == {"i", "e"}
+        assert T2_in == {"c"}
+        assert T1_tilde == {0, 1, 2, 6, 7, 8}
+        assert T2_tilde == {"x", "a", "b", "f", "g", "h"}
+
+        # Add node to the mapping
+        m[5] = self.mapped[5]
+        m_rev[self.mapped[5]] = 5
+        _update_Tinout(5, self.mapped[5], gparams, sparams)
+
+        assert T1_out == {9, 8, 7}
+        assert T1_in == {3}
+        assert T2_out == {"i", "g", "h"}
+        assert T2_in == {"c"}
+        assert T1_tilde == {0, 1, 2, 6}
+        assert T2_tilde == {"x", "a", "b", "f"}
+
+        # Add node to the mapping
+        m[6] = self.mapped[6]
+        m_rev[self.mapped[6]] = 6
+        _update_Tinout(6, self.mapped[6], gparams, sparams)
+
+        assert T1_out == {9, 8, 7}
+        assert T1_in == {3, 7}
+        assert T2_out == {"i", "g", "h"}
+        assert T2_in == {"c", "g"}
+        assert T1_tilde == {0, 1, 2}
+        assert T2_tilde == {"x", "a", "b"}
+
+        # Add node to the mapping
+        m[3] = self.mapped[3]
+        m_rev[self.mapped[3]] = 3
+        _update_Tinout(3, self.mapped[3], gparams, sparams)
+
+        assert T1_out == {9, 8, 7, 2}
+        assert T1_in == {7, 1}
+        assert T2_out == {"i", "g", "h", "b"}
+        assert T2_in == {"g", "a"}
+        assert T1_tilde == {0}
+        assert T2_tilde == {"x"}
+
+        # Add node to the mapping
+        m[0] = self.mapped[0]
+        m_rev[self.mapped[0]] = 0
+        _update_Tinout(0, self.mapped[0], gparams, sparams)
+
+        assert T1_out == {9, 8, 7, 2}
+        assert T1_in == {7, 1}
+        assert T2_out == {"i", "g", "h", "b"}
+        assert T2_in == {"g", "a"}
+        assert T1_tilde == set()
+        assert T2_tilde == set()
+
+    def test_restoring(self):
+        m = {0: "x", 3: "c", 4: "d", 5: "e", 6: "f"}
+        m_rev = {"x": 0, "c": 3, "d": 4, "e": 5, "f": 6}
+
+        T1_out = {2, 7, 9, 8}
+        T1_in = {1, 7}
+        T2_out = {"b", "g", "i", "h"}
+        T2_in = {"a", "g"}
+        T1_tilde = set()
+        T2_tilde = set()
+
+        gparams = _GraphParameters(self.G1, self.G2, {}, {}, {}, {}, {})
+        sparams = _StateParameters(
+            m, m_rev, T1_out, T1_in, T1_tilde, None, T2_out, T2_in, T2_tilde, None
+        )
+
+        # Remove a node from the mapping
+        m.pop(0)
+        m_rev.pop("x")
+        _restore_Tinout_Di(0, self.mapped[0], gparams, sparams)
+
+        assert T1_out == {2, 7, 9, 8}
+        assert T1_in == {1, 7}
+        assert T2_out == {"b", "g", "i", "h"}
+        assert T2_in == {"a", "g"}
+        assert T1_tilde == {0}
+        assert T2_tilde == {"x"}
+
+        # Remove a node from the mapping
+        m.pop(6)
+        m_rev.pop("f")
+        _restore_Tinout_Di(6, self.mapped[6], gparams, sparams)
+
+        assert T1_out == {2, 9, 8, 7}
+        assert T1_in == {1}
+        assert T2_out == {"b", "i", "h", "g"}
+        assert T2_in == {"a"}
+        assert T1_tilde == {0, 6}
+        assert T2_tilde == {"x", "f"}
+
+        # Remove a node from the mapping
+        m.pop(3)
+        m_rev.pop("c")
+        _restore_Tinout_Di(3, self.mapped[3], gparams, sparams)
+
+        assert T1_out == {9, 8, 7}
+        assert T1_in == {3}
+        assert T2_out == {"i", "h", "g"}
+        assert T2_in == {"c"}
+        assert T1_tilde == {0, 6, 1, 2}
+        assert T2_tilde == {"x", "f", "a", "b"}
+
+        # Remove a node from the mapping
+        m.pop(5)
+        m_rev.pop("e")
+        _restore_Tinout_Di(5, self.mapped[5], gparams, sparams)
+
+        assert T1_out == {9, 5}
+        assert T1_in == {3}
+        assert T2_out == {"i", "e"}
+        assert T2_in == {"c"}
+        assert T1_tilde == {0, 6, 1, 2, 8, 7}
+        assert T2_tilde == {"x", "f", "a", "b", "h", "g"}
+
+        # Remove a node from the mapping
+        m.pop(4)
+        m_rev.pop("d")
+        _restore_Tinout_Di(4, self.mapped[4], gparams, sparams)
+
+        assert T1_out == set()
+        assert T1_in == set()
+        assert T2_out == set()
+        assert T2_in == set()
+        assert T1_tilde == set(self.G1.nodes())
+        assert T2_tilde == set(self.G2.nodes())
diff --git a/networkx/algorithms/isomorphism/tests/test_vf2userfunc.py b/networkx/algorithms/isomorphism/tests/test_vf2userfunc.py
index 5a22d41..66a434e 100644
--- a/networkx/algorithms/isomorphism/tests/test_vf2userfunc.py
+++ b/networkx/algorithms/isomorphism/tests/test_vf2userfunc.py
@@ -14,7 +14,6 @@ def test_simple():
     w = "weight"
     edges = [(0, 0, 1), (0, 0, 1.5), (0, 1, 2), (1, 0, 3)]
     for g1 in [nx.Graph(), nx.DiGraph(), nx.MultiGraph(), nx.MultiDiGraph()]:
-
         g1.add_weighted_edges_from(edges)
         g2 = g1.subgraph(g1.nodes())
         if g1.is_multigraph():
diff --git a/networkx/algorithms/isomorphism/tree_isomorphism.py b/networkx/algorithms/isomorphism/tree_isomorphism.py
index 7e13d02..372224c 100644
--- a/networkx/algorithms/isomorphism/tree_isomorphism.py
+++ b/networkx/algorithms/isomorphism/tree_isomorphism.py
@@ -74,7 +74,7 @@ def root_trees(t1, root1, t2, root2):
 def assign_levels(G, root):
     level = {}
     level[root] = 0
-    for (v1, v2) in nx.bfs_edges(G, root):
+    for v1, v2 in nx.bfs_edges(G, root):
         level[v2] = level[v1] + 1
 
     return level
@@ -83,7 +83,7 @@ def assign_levels(G, root):
 # now group the nodes at each level
 def group_by_levels(levels):
     L = {}
-    for (n, lev) in levels.items():
+    for n, lev in levels.items():
         if lev not in L:
             L[lev] = []
         L[lev].append(n)
@@ -103,7 +103,7 @@ def generate_isomorphism(v, w, M, ordered_children):
 def rooted_tree_isomorphism(t1, root1, t2, root2):
     """
     Given two rooted trees `t1` and `t2`,
-    with roots `root1` and `root2` respectivly
+    with roots `root1` and `root2` respectively
     this routine will determine if they are isomorphic.
 
     These trees may be either directed or undirected,
@@ -168,7 +168,7 @@ def rooted_tree_isomorphism(t1, root1, t2, root2):
     # nothing to do on last level so start on h-1
     # also nothing to do for our fake level 0, so skip that
     for i in range(h - 1, 0, -1):
-        # update the ordered_labels and ordered_childen
+        # update the ordered_labels and ordered_children
         # for any children
         for v in L[i]:
             # nothing to do if no children
@@ -186,7 +186,7 @@ def rooted_tree_isomorphism(t1, root1, t2, root2):
         forlabel = sorted((ordered_labels[v], v) for v in L[i])
 
         # now assign labels to these nodes, according to the sorted order
-        # starting from 0, where idential ordered_labels get the same label
+        # starting from 0, where identical ordered_labels get the same label
         current = 0
         for i, (ol, v) in enumerate(forlabel):
             # advance to next label if not 0, and different from previous
diff --git a/networkx/algorithms/isomorphism/vf2pp.py b/networkx/algorithms/isomorphism/vf2pp.py
new file mode 100644
index 0000000..1dbdca0
--- /dev/null
+++ b/networkx/algorithms/isomorphism/vf2pp.py
@@ -0,0 +1,1065 @@
+"""
+***************
+VF2++ Algorithm
+***************
+
+An implementation of the VF2++ algorithm [1]_ for Graph Isomorphism testing.
+
+The simplest interface to use this module is to call:
+
+`vf2pp_is_isomorphic`: to check whether two graphs are isomorphic.
+`vf2pp_isomorphism`: to obtain the node mapping between two graphs,
+in case they are isomorphic.
+`vf2pp_all_isomorphisms`: to generate all possible mappings between two graphs,
+if isomorphic.
+
+Introduction
+------------
+The VF2++ algorithm, follows a similar logic to that of VF2, while also
+introducing new easy-to-check cutting rules and determining the optimal access
+order of nodes. It is also implemented in a non-recursive manner, which saves
+both time and space, when compared to its previous counterpart.
+
+The optimal node ordering is obtained after taking into consideration both the
+degree but also the label rarity of each node.
+This way we place the nodes that are more likely to match, first in the order,
+thus examining the most promising branches in the beginning.
+The rules also consider node labels, making it easier to prune unfruitful
+branches early in the process.
+
+Examples
+--------
+
+Suppose G1 and G2 are Isomorphic Graphs. Verification is as follows:
+
+Without node labels:
+
+>>> import networkx as nx
+>>> G1 = nx.path_graph(4)
+>>> G2 = nx.path_graph(4)
+>>> nx.vf2pp_is_isomorphic(G1, G2, node_label=None)
+True
+>>> nx.vf2pp_isomorphism(G1, G2, node_label=None)
+{1: 1, 2: 2, 0: 0, 3: 3}
+
+With node labels:
+
+>>> G1 = nx.path_graph(4)
+>>> G2 = nx.path_graph(4)
+>>> mapped = {1: 1, 2: 2, 3: 3, 0: 0}
+>>> nx.set_node_attributes(G1, dict(zip(G1, ["blue", "red", "green", "yellow"])), "label")
+>>> nx.set_node_attributes(G2, dict(zip([mapped[u] for u in G1], ["blue", "red", "green", "yellow"])), "label")
+>>> nx.vf2pp_is_isomorphic(G1, G2, node_label="label")
+True
+>>> nx.vf2pp_isomorphism(G1, G2, node_label="label")
+{1: 1, 2: 2, 0: 0, 3: 3}
+
+References
+----------
+.. [1] Jüttner, Alpár & Madarasi, Péter. (2018). "VF2++—An improved subgraph
+   isomorphism algorithm". Discrete Applied Mathematics. 242.
+   https://doi.org/10.1016/j.dam.2018.02.018
+
+"""
+import collections
+
+import networkx as nx
+
+__all__ = ["vf2pp_isomorphism", "vf2pp_is_isomorphic", "vf2pp_all_isomorphisms"]
+
+_GraphParameters = collections.namedtuple(
+    "_GraphParameters",
+    [
+        "G1",
+        "G2",
+        "G1_labels",
+        "G2_labels",
+        "nodes_of_G1Labels",
+        "nodes_of_G2Labels",
+        "G2_nodes_of_degree",
+    ],
+)
+
+_StateParameters = collections.namedtuple(
+    "_StateParameters",
+    [
+        "mapping",
+        "reverse_mapping",
+        "T1",
+        "T1_in",
+        "T1_tilde",
+        "T1_tilde_in",
+        "T2",
+        "T2_in",
+        "T2_tilde",
+        "T2_tilde_in",
+    ],
+)
+
+
+def vf2pp_isomorphism(G1, G2, node_label=None, default_label=None):
+    """Return an isomorphic mapping between `G1` and `G2` if it exists.
+
+    Parameters
+    ----------
+    G1, G2 : NetworkX Graph or MultiGraph instances.
+        The two graphs to check for isomorphism.
+
+    node_label : str, optional
+        The name of the node attribute to be used when comparing nodes.
+        The default is `None`, meaning node attributes are not considered
+        in the comparison. Any node that doesn't have the `node_label`
+        attribute uses `default_label` instead.
+
+    default_label : scalar
+        Default value to use when a node doesn't have an attribute
+        named `node_label`. Default is `None`.
+
+    Returns
+    -------
+    dict or None
+        Node mapping if the two graphs are isomorphic. None otherwise.
+    """
+    try:
+        mapping = next(vf2pp_all_isomorphisms(G1, G2, node_label, default_label))
+        return mapping
+    except StopIteration:
+        return None
+
+
+def vf2pp_is_isomorphic(G1, G2, node_label=None, default_label=None):
+    """Examines whether G1 and G2 are isomorphic.
+
+    Parameters
+    ----------
+    G1, G2 : NetworkX Graph or MultiGraph instances.
+        The two graphs to check for isomorphism.
+
+    node_label : str, optional
+        The name of the node attribute to be used when comparing nodes.
+        The default is `None`, meaning node attributes are not considered
+        in the comparison. Any node that doesn't have the `node_label`
+        attribute uses `default_label` instead.
+
+    default_label : scalar
+        Default value to use when a node doesn't have an attribute
+        named `node_label`. Default is `None`.
+
+    Returns
+    -------
+    bool
+        True if the two graphs are isomorphic, False otherwise.
+    """
+    if vf2pp_isomorphism(G1, G2, node_label, default_label) is not None:
+        return True
+    return False
+
+
+def vf2pp_all_isomorphisms(G1, G2, node_label=None, default_label=None):
+    """Yields all the possible mappings between G1 and G2.
+
+    Parameters
+    ----------
+    G1, G2 : NetworkX Graph or MultiGraph instances.
+        The two graphs to check for isomorphism.
+
+    node_label : str, optional
+        The name of the node attribute to be used when comparing nodes.
+        The default is `None`, meaning node attributes are not considered
+        in the comparison. Any node that doesn't have the `node_label`
+        attribute uses `default_label` instead.
+
+    default_label : scalar
+        Default value to use when a node doesn't have an attribute
+        named `node_label`. Default is `None`.
+
+    Yields
+    ------
+    dict
+        Isomorphic mapping between the nodes in `G1` and `G2`.
+    """
+    if G1.number_of_nodes() == 0 or G2.number_of_nodes() == 0:
+        return False
+
+    # Create the degree dicts based on graph type
+    if G1.is_directed():
+        G1_degree = {
+            n: (in_degree, out_degree)
+            for (n, in_degree), (_, out_degree) in zip(G1.in_degree, G1.out_degree)
+        }
+        G2_degree = {
+            n: (in_degree, out_degree)
+            for (n, in_degree), (_, out_degree) in zip(G2.in_degree, G2.out_degree)
+        }
+    else:
+        G1_degree = dict(G1.degree)
+        G2_degree = dict(G2.degree)
+
+    if not G1.is_directed():
+        find_candidates = _find_candidates
+        restore_Tinout = _restore_Tinout
+    else:
+        find_candidates = _find_candidates_Di
+        restore_Tinout = _restore_Tinout_Di
+
+    # Check that both graphs have the same number of nodes and degree sequence
+    if G1.order() != G2.order():
+        return False
+    if sorted(G1_degree.values()) != sorted(G2_degree.values()):
+        return False
+
+    # Initialize parameters and cache necessary information about degree and labels
+    graph_params, state_params = _initialize_parameters(
+        G1, G2, G2_degree, node_label, default_label
+    )
+
+    # Check if G1 and G2 have the same labels, and that number of nodes per label is equal between the two graphs
+    if not _precheck_label_properties(graph_params):
+        return False
+
+    # Calculate the optimal node ordering
+    node_order = _matching_order(graph_params)
+
+    # Initialize the stack
+    stack = []
+    candidates = iter(
+        find_candidates(node_order[0], graph_params, state_params, G1_degree)
+    )
+    stack.append((node_order[0], candidates))
+
+    mapping = state_params.mapping
+    reverse_mapping = state_params.reverse_mapping
+
+    # Index of the node from the order, currently being examined
+    matching_node = 1
+
+    while stack:
+        current_node, candidate_nodes = stack[-1]
+
+        try:
+            candidate = next(candidate_nodes)
+        except StopIteration:
+            # If no remaining candidates, return to a previous state, and follow another branch
+            stack.pop()
+            matching_node -= 1
+            if stack:
+                # Pop the previously added u-v pair, and look for a different candidate _v for u
+                popped_node1, _ = stack[-1]
+                popped_node2 = mapping[popped_node1]
+                mapping.pop(popped_node1)
+                reverse_mapping.pop(popped_node2)
+                restore_Tinout(popped_node1, popped_node2, graph_params, state_params)
+            continue
+
+        if _feasibility(current_node, candidate, graph_params, state_params):
+            # Terminate if mapping is extended to its full
+            if len(mapping) == G2.number_of_nodes() - 1:
+                cp_mapping = mapping.copy()
+                cp_mapping[current_node] = candidate
+                yield cp_mapping
+                continue
+
+            # Feasibility rules pass, so extend the mapping and update the parameters
+            mapping[current_node] = candidate
+            reverse_mapping[candidate] = current_node
+            _update_Tinout(current_node, candidate, graph_params, state_params)
+            # Append the next node and its candidates to the stack
+            candidates = iter(
+                find_candidates(
+                    node_order[matching_node], graph_params, state_params, G1_degree
+                )
+            )
+            stack.append((node_order[matching_node], candidates))
+            matching_node += 1
+
+
+def _precheck_label_properties(graph_params):
+    G1, G2, G1_labels, G2_labels, nodes_of_G1Labels, nodes_of_G2Labels, _ = graph_params
+    if any(
+        label not in nodes_of_G1Labels or len(nodes_of_G1Labels[label]) != len(nodes)
+        for label, nodes in nodes_of_G2Labels.items()
+    ):
+        return False
+    return True
+
+
+def _initialize_parameters(G1, G2, G2_degree, node_label=None, default_label=-1):
+    """Initializes all the necessary parameters for VF2++
+
+    Parameters
+    ----------
+    G1,G2: NetworkX Graph or MultiGraph instances.
+        The two graphs to check for isomorphism or monomorphism
+
+    G1_labels,G2_labels: dict
+        The label of every node in G1 and G2 respectively
+
+    Returns
+    -------
+    graph_params: namedtuple
+        Contains all the Graph-related parameters:
+
+        G1,G2
+        G1_labels,G2_labels: dict
+
+    state_params: namedtuple
+        Contains all the State-related parameters:
+
+        mapping: dict
+            The mapping as extended so far. Maps nodes of G1 to nodes of G2
+
+        reverse_mapping: dict
+            The reverse mapping as extended so far. Maps nodes from G2 to nodes of G1. It's basically "mapping" reversed
+
+        T1, T2: set
+            Ti contains uncovered neighbors of covered nodes from Gi, i.e. nodes that are not in the mapping, but are
+            neighbors of nodes that are.
+
+        T1_out, T2_out: set
+            Ti_out contains all the nodes from Gi, that are neither in the mapping nor in Ti
+    """
+    G1_labels = dict(G1.nodes(data=node_label, default=default_label))
+    G2_labels = dict(G2.nodes(data=node_label, default=default_label))
+
+    graph_params = _GraphParameters(
+        G1,
+        G2,
+        G1_labels,
+        G2_labels,
+        nx.utils.groups(G1_labels),
+        nx.utils.groups(G2_labels),
+        nx.utils.groups(G2_degree),
+    )
+
+    T1, T1_in = set(), set()
+    T2, T2_in = set(), set()
+    if G1.is_directed():
+        T1_tilde, T1_tilde_in = (
+            set(G1.nodes()),
+            set(),
+        )  # todo: do we need Ti_tilde_in? What nodes does it have?
+        T2_tilde, T2_tilde_in = set(G2.nodes()), set()
+    else:
+        T1_tilde, T1_tilde_in = set(G1.nodes()), set()
+        T2_tilde, T2_tilde_in = set(G2.nodes()), set()
+
+    state_params = _StateParameters(
+        {},
+        {},
+        T1,
+        T1_in,
+        T1_tilde,
+        T1_tilde_in,
+        T2,
+        T2_in,
+        T2_tilde,
+        T2_tilde_in,
+    )
+
+    return graph_params, state_params
+
+
+def _matching_order(graph_params):
+    """The node ordering as introduced in VF2++.
+
+    Notes
+    -----
+    Taking into account the structure of the Graph and the node labeling, the nodes are placed in an order such that,
+    most of the unfruitful/infeasible branches of the search space can be pruned on high levels, significantly
+    decreasing the number of visited states. The premise is that, the algorithm will be able to recognize
+    inconsistencies early, proceeding to go deep into the search tree only if it's needed.
+
+    Parameters
+    ----------
+    graph_params: namedtuple
+        Contains:
+
+            G1,G2: NetworkX Graph or MultiGraph instances.
+                The two graphs to check for isomorphism or monomorphism.
+
+            G1_labels,G2_labels: dict
+                The label of every node in G1 and G2 respectively.
+
+    Returns
+    -------
+    node_order: list
+        The ordering of the nodes.
+    """
+    G1, G2, G1_labels, _, _, nodes_of_G2Labels, _ = graph_params
+    if not G1 and not G2:
+        return {}
+
+    if G1.is_directed():
+        G1 = G1.to_undirected(as_view=True)
+
+    V1_unordered = set(G1.nodes())
+    label_rarity = {label: len(nodes) for label, nodes in nodes_of_G2Labels.items()}
+    used_degrees = {node: 0 for node in G1}
+    node_order = []
+
+    while V1_unordered:
+        max_rarity = min(label_rarity[G1_labels[x]] for x in V1_unordered)
+        rarest_nodes = [
+            n for n in V1_unordered if label_rarity[G1_labels[n]] == max_rarity
+        ]
+        max_node = max(rarest_nodes, key=G1.degree)
+
+        for dlevel_nodes in nx.bfs_layers(G1, max_node):
+            nodes_to_add = dlevel_nodes.copy()
+            while nodes_to_add:
+                max_used_degree = max(used_degrees[n] for n in nodes_to_add)
+                max_used_degree_nodes = [
+                    n for n in nodes_to_add if used_degrees[n] == max_used_degree
+                ]
+                max_degree = max(G1.degree[n] for n in max_used_degree_nodes)
+                max_degree_nodes = [
+                    n for n in max_used_degree_nodes if G1.degree[n] == max_degree
+                ]
+                next_node = min(
+                    max_degree_nodes, key=lambda x: label_rarity[G1_labels[x]]
+                )
+
+                node_order.append(next_node)
+                for node in G1.neighbors(next_node):
+                    used_degrees[node] += 1
+
+                nodes_to_add.remove(next_node)
+                label_rarity[G1_labels[next_node]] -= 1
+                V1_unordered.discard(next_node)
+
+    return node_order
+
+
+def _find_candidates(
+    u, graph_params, state_params, G1_degree
+):  # todo: make the 4th argument the degree of u
+    """Given node u of G1, finds the candidates of u from G2.
+
+    Parameters
+    ----------
+    u: Graph node
+        The node from G1 for which to find the candidates from G2.
+
+    graph_params: namedtuple
+        Contains all the Graph-related parameters:
+
+        G1,G2: NetworkX Graph or MultiGraph instances.
+            The two graphs to check for isomorphism or monomorphism
+
+        G1_labels,G2_labels: dict
+            The label of every node in G1 and G2 respectively
+
+    state_params: namedtuple
+        Contains all the State-related parameters:
+
+        mapping: dict
+            The mapping as extended so far. Maps nodes of G1 to nodes of G2
+
+        reverse_mapping: dict
+            The reverse mapping as extended so far. Maps nodes from G2 to nodes of G1. It's basically "mapping" reversed
+
+        T1, T2: set
+            Ti contains uncovered neighbors of covered nodes from Gi, i.e. nodes that are not in the mapping, but are
+            neighbors of nodes that are.
+
+        T1_tilde, T2_tilde: set
+            Ti_tilde contains all the nodes from Gi, that are neither in the mapping nor in Ti
+
+    Returns
+    -------
+    candidates: set
+        The nodes from G2 which are candidates for u.
+    """
+    G1, G2, G1_labels, _, _, nodes_of_G2Labels, G2_nodes_of_degree = graph_params
+    mapping, reverse_mapping, _, _, _, _, _, _, T2_tilde, _ = state_params
+
+    covered_neighbors = [nbr for nbr in G1[u] if nbr in mapping]
+    if not covered_neighbors:
+        candidates = set(nodes_of_G2Labels[G1_labels[u]])
+        candidates.intersection_update(G2_nodes_of_degree[G1_degree[u]])
+        candidates.intersection_update(T2_tilde)
+        candidates.difference_update(reverse_mapping)
+        if G1.is_multigraph():
+            candidates.difference_update(
+                {
+                    node
+                    for node in candidates
+                    if G1.number_of_edges(u, u) != G2.number_of_edges(node, node)
+                }
+            )
+        return candidates
+
+    nbr1 = covered_neighbors[0]
+    common_nodes = set(G2[mapping[nbr1]])
+
+    for nbr1 in covered_neighbors[1:]:
+        common_nodes.intersection_update(G2[mapping[nbr1]])
+
+    common_nodes.difference_update(reverse_mapping)
+    common_nodes.intersection_update(G2_nodes_of_degree[G1_degree[u]])
+    common_nodes.intersection_update(nodes_of_G2Labels[G1_labels[u]])
+    if G1.is_multigraph():
+        common_nodes.difference_update(
+            {
+                node
+                for node in common_nodes
+                if G1.number_of_edges(u, u) != G2.number_of_edges(node, node)
+            }
+        )
+    return common_nodes
+
+
+def _find_candidates_Di(u, graph_params, state_params, G1_degree):
+    G1, G2, G1_labels, _, _, nodes_of_G2Labels, G2_nodes_of_degree = graph_params
+    mapping, reverse_mapping, _, _, _, _, _, _, T2_tilde, _ = state_params
+
+    covered_successors = [succ for succ in G1[u] if succ in mapping]
+    covered_predecessors = [pred for pred in G1.pred[u] if pred in mapping]
+
+    if not (covered_successors or covered_predecessors):
+        candidates = set(nodes_of_G2Labels[G1_labels[u]])
+        candidates.intersection_update(G2_nodes_of_degree[G1_degree[u]])
+        candidates.intersection_update(T2_tilde)
+        candidates.difference_update(reverse_mapping)
+        if G1.is_multigraph():
+            candidates.difference_update(
+                {
+                    node
+                    for node in candidates
+                    if G1.number_of_edges(u, u) != G2.number_of_edges(node, node)
+                }
+            )
+        return candidates
+
+    if covered_successors:
+        succ1 = covered_successors[0]
+        common_nodes = set(G2.pred[mapping[succ1]])
+
+        for succ1 in covered_successors[1:]:
+            common_nodes.intersection_update(G2.pred[mapping[succ1]])
+    else:
+        pred1 = covered_predecessors.pop()
+        common_nodes = set(G2[mapping[pred1]])
+
+    for pred1 in covered_predecessors:
+        common_nodes.intersection_update(G2[mapping[pred1]])
+
+    common_nodes.difference_update(reverse_mapping)
+    common_nodes.intersection_update(G2_nodes_of_degree[G1_degree[u]])
+    common_nodes.intersection_update(nodes_of_G2Labels[G1_labels[u]])
+    if G1.is_multigraph():
+        common_nodes.difference_update(
+            {
+                node
+                for node in common_nodes
+                if G1.number_of_edges(u, u) != G2.number_of_edges(node, node)
+            }
+        )
+    return common_nodes
+
+
+def _feasibility(node1, node2, graph_params, state_params):
+    """Given a candidate pair of nodes u and v from G1 and G2 respectively, checks if it's feasible to extend the
+    mapping, i.e. if u and v can be matched.
+
+    Notes
+    -----
+    This function performs all the necessary checking by applying both consistency and cutting rules.
+
+    Parameters
+    ----------
+    node1, node2: Graph node
+        The candidate pair of nodes being checked for matching
+
+    graph_params: namedtuple
+        Contains all the Graph-related parameters:
+
+        G1,G2: NetworkX Graph or MultiGraph instances.
+            The two graphs to check for isomorphism or monomorphism
+
+        G1_labels,G2_labels: dict
+            The label of every node in G1 and G2 respectively
+
+    state_params: namedtuple
+        Contains all the State-related parameters:
+
+        mapping: dict
+            The mapping as extended so far. Maps nodes of G1 to nodes of G2
+
+        reverse_mapping: dict
+            The reverse mapping as extended so far. Maps nodes from G2 to nodes of G1. It's basically "mapping" reversed
+
+        T1, T2: set
+            Ti contains uncovered neighbors of covered nodes from Gi, i.e. nodes that are not in the mapping, but are
+            neighbors of nodes that are.
+
+        T1_out, T2_out: set
+            Ti_out contains all the nodes from Gi, that are neither in the mapping nor in Ti
+
+    Returns
+    -------
+    True if all checks are successful, False otherwise.
+    """
+    G1 = graph_params.G1
+
+    if _cut_PT(node1, node2, graph_params, state_params):
+        return False
+
+    if G1.is_multigraph():
+        if not _consistent_PT(node1, node2, graph_params, state_params):
+            return False
+
+    return True
+
+
+def _cut_PT(u, v, graph_params, state_params):
+    """Implements the cutting rules for the ISO problem.
+
+    Parameters
+    ----------
+    u, v: Graph node
+        The two candidate nodes being examined.
+
+    graph_params: namedtuple
+        Contains all the Graph-related parameters:
+
+        G1,G2: NetworkX Graph or MultiGraph instances.
+            The two graphs to check for isomorphism or monomorphism
+
+        G1_labels,G2_labels: dict
+            The label of every node in G1 and G2 respectively
+
+    state_params: namedtuple
+        Contains all the State-related parameters:
+
+        mapping: dict
+            The mapping as extended so far. Maps nodes of G1 to nodes of G2
+
+        reverse_mapping: dict
+            The reverse mapping as extended so far. Maps nodes from G2 to nodes of G1. It's basically "mapping" reversed
+
+        T1, T2: set
+            Ti contains uncovered neighbors of covered nodes from Gi, i.e. nodes that are not in the mapping, but are
+            neighbors of nodes that are.
+
+        T1_tilde, T2_tilde: set
+            Ti_out contains all the nodes from Gi, that are neither in the mapping nor in Ti
+
+    Returns
+    -------
+    True if we should prune this branch, i.e. the node pair failed the cutting checks. False otherwise.
+    """
+    G1, G2, G1_labels, G2_labels, _, _, _ = graph_params
+    (
+        _,
+        _,
+        T1,
+        T1_in,
+        T1_tilde,
+        _,
+        T2,
+        T2_in,
+        T2_tilde,
+        _,
+    ) = state_params
+
+    u_labels_predecessors, v_labels_predecessors = {}, {}
+    if G1.is_directed():
+        u_labels_predecessors = nx.utils.groups(
+            {n1: G1_labels[n1] for n1 in G1.pred[u]}
+        )
+        v_labels_predecessors = nx.utils.groups(
+            {n2: G2_labels[n2] for n2 in G2.pred[v]}
+        )
+
+        if set(u_labels_predecessors.keys()) != set(v_labels_predecessors.keys()):
+            return True
+
+    u_labels_successors = nx.utils.groups({n1: G1_labels[n1] for n1 in G1[u]})
+    v_labels_successors = nx.utils.groups({n2: G2_labels[n2] for n2 in G2[v]})
+
+    # if the neighbors of u, do not have the same labels as those of v, NOT feasible.
+    if set(u_labels_successors.keys()) != set(v_labels_successors.keys()):
+        return True
+
+    for label, G1_nbh in u_labels_successors.items():
+        G2_nbh = v_labels_successors[label]
+
+        if G1.is_multigraph():
+            # Check for every neighbor in the neighborhood, if u-nbr1 has same edges as v-nbr2
+            u_nbrs_edges = sorted(G1.number_of_edges(u, x) for x in G1_nbh)
+            v_nbrs_edges = sorted(G2.number_of_edges(v, x) for x in G2_nbh)
+            if any(
+                u_nbr_edges != v_nbr_edges
+                for u_nbr_edges, v_nbr_edges in zip(u_nbrs_edges, v_nbrs_edges)
+            ):
+                return True
+
+        if len(T1.intersection(G1_nbh)) != len(T2.intersection(G2_nbh)):
+            return True
+        if len(T1_tilde.intersection(G1_nbh)) != len(T2_tilde.intersection(G2_nbh)):
+            return True
+        if G1.is_directed() and len(T1_in.intersection(G1_nbh)) != len(
+            T2_in.intersection(G2_nbh)
+        ):
+            return True
+
+    if not G1.is_directed():
+        return False
+
+    for label, G1_pred in u_labels_predecessors.items():
+        G2_pred = v_labels_predecessors[label]
+
+        if G1.is_multigraph():
+            # Check for every neighbor in the neighborhood, if u-nbr1 has same edges as v-nbr2
+            u_pred_edges = sorted(G1.number_of_edges(u, x) for x in G1_pred)
+            v_pred_edges = sorted(G2.number_of_edges(v, x) for x in G2_pred)
+            if any(
+                u_nbr_edges != v_nbr_edges
+                for u_nbr_edges, v_nbr_edges in zip(u_pred_edges, v_pred_edges)
+            ):
+                return True
+
+        if len(T1.intersection(G1_pred)) != len(T2.intersection(G2_pred)):
+            return True
+        if len(T1_tilde.intersection(G1_pred)) != len(T2_tilde.intersection(G2_pred)):
+            return True
+        if len(T1_in.intersection(G1_pred)) != len(T2_in.intersection(G2_pred)):
+            return True
+
+    return False
+
+
+def _consistent_PT(u, v, graph_params, state_params):
+    """Checks the consistency of extending the mapping using the current node pair.
+
+    Parameters
+    ----------
+    u, v: Graph node
+        The two candidate nodes being examined.
+
+    graph_params: namedtuple
+        Contains all the Graph-related parameters:
+
+        G1,G2: NetworkX Graph or MultiGraph instances.
+            The two graphs to check for isomorphism or monomorphism
+
+        G1_labels,G2_labels: dict
+            The label of every node in G1 and G2 respectively
+
+    state_params: namedtuple
+        Contains all the State-related parameters:
+
+        mapping: dict
+            The mapping as extended so far. Maps nodes of G1 to nodes of G2
+
+        reverse_mapping: dict
+            The reverse mapping as extended so far. Maps nodes from G2 to nodes of G1. It's basically "mapping" reversed
+
+        T1, T2: set
+            Ti contains uncovered neighbors of covered nodes from Gi, i.e. nodes that are not in the mapping, but are
+            neighbors of nodes that are.
+
+        T1_out, T2_out: set
+            Ti_out contains all the nodes from Gi, that are neither in the mapping nor in Ti
+
+    Returns
+    -------
+    True if the pair passes all the consistency checks successfully. False otherwise.
+    """
+    G1, G2 = graph_params.G1, graph_params.G2
+    mapping, reverse_mapping = state_params.mapping, state_params.reverse_mapping
+
+    for neighbor in G1[u]:
+        if neighbor in mapping:
+            if G1.number_of_edges(u, neighbor) != G2.number_of_edges(
+                v, mapping[neighbor]
+            ):
+                return False
+
+    for neighbor in G2[v]:
+        if neighbor in reverse_mapping:
+            if G1.number_of_edges(u, reverse_mapping[neighbor]) != G2.number_of_edges(
+                v, neighbor
+            ):
+                return False
+
+    if not G1.is_directed():
+        return True
+
+    for predecessor in G1.pred[u]:
+        if predecessor in mapping:
+            if G1.number_of_edges(predecessor, u) != G2.number_of_edges(
+                mapping[predecessor], v
+            ):
+                return False
+
+    for predecessor in G2.pred[v]:
+        if predecessor in reverse_mapping:
+            if G1.number_of_edges(
+                reverse_mapping[predecessor], u
+            ) != G2.number_of_edges(predecessor, v):
+                return False
+
+    return True
+
+
+def _update_Tinout(new_node1, new_node2, graph_params, state_params):
+    """Updates the Ti/Ti_out (i=1,2) when a new node pair u-v is added to the mapping.
+
+    Notes
+    -----
+    This function should be called right after the feasibility checks are passed, and node1 is mapped to node2. The
+    purpose of this function is to avoid brute force computing of Ti/Ti_out by iterating over all nodes of the graph
+    and checking which nodes satisfy the necessary conditions. Instead, in every step of the algorithm we focus
+    exclusively on the two nodes that are being added to the mapping, incrementally updating Ti/Ti_out.
+
+    Parameters
+    ----------
+    new_node1, new_node2: Graph node
+        The two new nodes, added to the mapping.
+
+    graph_params: namedtuple
+        Contains all the Graph-related parameters:
+
+        G1,G2: NetworkX Graph or MultiGraph instances.
+            The two graphs to check for isomorphism or monomorphism
+
+        G1_labels,G2_labels: dict
+            The label of every node in G1 and G2 respectively
+
+    state_params: namedtuple
+        Contains all the State-related parameters:
+
+        mapping: dict
+            The mapping as extended so far. Maps nodes of G1 to nodes of G2
+
+        reverse_mapping: dict
+            The reverse mapping as extended so far. Maps nodes from G2 to nodes of G1. It's basically "mapping" reversed
+
+        T1, T2: set
+            Ti contains uncovered neighbors of covered nodes from Gi, i.e. nodes that are not in the mapping, but are
+            neighbors of nodes that are.
+
+        T1_tilde, T2_tilde: set
+            Ti_out contains all the nodes from Gi, that are neither in the mapping nor in Ti
+    """
+    G1, G2, _, _, _, _, _ = graph_params
+    (
+        mapping,
+        reverse_mapping,
+        T1,
+        T1_in,
+        T1_tilde,
+        T1_tilde_in,
+        T2,
+        T2_in,
+        T2_tilde,
+        T2_tilde_in,
+    ) = state_params
+
+    uncovered_successors_G1 = {succ for succ in G1[new_node1] if succ not in mapping}
+    uncovered_successors_G2 = {
+        succ for succ in G2[new_node2] if succ not in reverse_mapping
+    }
+
+    # Add the uncovered neighbors of node1 and node2 in T1 and T2 respectively
+    T1.update(uncovered_successors_G1)
+    T2.update(uncovered_successors_G2)
+    T1.discard(new_node1)
+    T2.discard(new_node2)
+
+    T1_tilde.difference_update(uncovered_successors_G1)
+    T2_tilde.difference_update(uncovered_successors_G2)
+    T1_tilde.discard(new_node1)
+    T2_tilde.discard(new_node2)
+
+    if not G1.is_directed():
+        return
+
+    uncovered_predecessors_G1 = {
+        pred for pred in G1.pred[new_node1] if pred not in mapping
+    }
+    uncovered_predecessors_G2 = {
+        pred for pred in G2.pred[new_node2] if pred not in reverse_mapping
+    }
+
+    T1_in.update(uncovered_predecessors_G1)
+    T2_in.update(uncovered_predecessors_G2)
+    T1_in.discard(new_node1)
+    T2_in.discard(new_node2)
+
+    T1_tilde.difference_update(uncovered_predecessors_G1)
+    T2_tilde.difference_update(uncovered_predecessors_G2)
+    T1_tilde.discard(new_node1)
+    T2_tilde.discard(new_node2)
+
+
+def _restore_Tinout(popped_node1, popped_node2, graph_params, state_params):
+    """Restores the previous version of Ti/Ti_out when a node pair is deleted from the mapping.
+
+    Parameters
+    ----------
+    popped_node1, popped_node2: Graph node
+        The two nodes deleted from the mapping.
+
+    graph_params: namedtuple
+        Contains all the Graph-related parameters:
+
+        G1,G2: NetworkX Graph or MultiGraph instances.
+            The two graphs to check for isomorphism or monomorphism
+
+        G1_labels,G2_labels: dict
+            The label of every node in G1 and G2 respectively
+
+    state_params: namedtuple
+        Contains all the State-related parameters:
+
+        mapping: dict
+            The mapping as extended so far. Maps nodes of G1 to nodes of G2
+
+        reverse_mapping: dict
+            The reverse mapping as extended so far. Maps nodes from G2 to nodes of G1. It's basically "mapping" reversed
+
+        T1, T2: set
+            Ti contains uncovered neighbors of covered nodes from Gi, i.e. nodes that are not in the mapping, but are
+            neighbors of nodes that are.
+
+        T1_tilde, T2_tilde: set
+            Ti_out contains all the nodes from Gi, that are neither in the mapping nor in Ti
+    """
+    # If the node we want to remove from the mapping, has at least one covered neighbor, add it to T1.
+    G1, G2, _, _, _, _, _ = graph_params
+    (
+        mapping,
+        reverse_mapping,
+        T1,
+        T1_in,
+        T1_tilde,
+        T1_tilde_in,
+        T2,
+        T2_in,
+        T2_tilde,
+        T2_tilde_in,
+    ) = state_params
+
+    is_added = False
+    for neighbor in G1[popped_node1]:
+        if neighbor in mapping:
+            # if a neighbor of the excluded node1 is in the mapping, keep node1 in T1
+            is_added = True
+            T1.add(popped_node1)
+        else:
+            # check if its neighbor has another connection with a covered node. If not, only then exclude it from T1
+            if any(nbr in mapping for nbr in G1[neighbor]):
+                continue
+            T1.discard(neighbor)
+            T1_tilde.add(neighbor)
+
+    # Case where the node is not present in neither the mapping nor T1. By definition, it should belong to T1_tilde
+    if not is_added:
+        T1_tilde.add(popped_node1)
+
+    is_added = False
+    for neighbor in G2[popped_node2]:
+        if neighbor in reverse_mapping:
+            is_added = True
+            T2.add(popped_node2)
+        else:
+            if any(nbr in reverse_mapping for nbr in G2[neighbor]):
+                continue
+            T2.discard(neighbor)
+            T2_tilde.add(neighbor)
+
+    if not is_added:
+        T2_tilde.add(popped_node2)
+
+
+def _restore_Tinout_Di(popped_node1, popped_node2, graph_params, state_params):
+    # If the node we want to remove from the mapping, has at least one covered neighbor, add it to T1.
+    G1, G2, _, _, _, _, _ = graph_params
+    (
+        mapping,
+        reverse_mapping,
+        T1,
+        T1_in,
+        T1_tilde,
+        T1_tilde_in,
+        T2,
+        T2_in,
+        T2_tilde,
+        T2_tilde_in,
+    ) = state_params
+
+    is_added = False
+    for successor in G1[popped_node1]:
+        if successor in mapping:
+            # if a neighbor of the excluded node1 is in the mapping, keep node1 in T1
+            is_added = True
+            T1_in.add(popped_node1)
+        else:
+            # check if its neighbor has another connection with a covered node. If not, only then exclude it from T1
+            if not any(pred in mapping for pred in G1.pred[successor]):
+                T1.discard(successor)
+
+            if not any(succ in mapping for succ in G1[successor]):
+                T1_in.discard(successor)
+
+            if successor not in T1:
+                if successor not in T1_in:
+                    T1_tilde.add(successor)
+
+    for predecessor in G1.pred[popped_node1]:
+        if predecessor in mapping:
+            # if a neighbor of the excluded node1 is in the mapping, keep node1 in T1
+            is_added = True
+            T1.add(popped_node1)
+        else:
+            # check if its neighbor has another connection with a covered node. If not, only then exclude it from T1
+            if not any(pred in mapping for pred in G1.pred[predecessor]):
+                T1.discard(predecessor)
+
+            if not any(succ in mapping for succ in G1[predecessor]):
+                T1_in.discard(predecessor)
+
+            if not (predecessor in T1 or predecessor in T1_in):
+                T1_tilde.add(predecessor)
+
+    # Case where the node is not present in neither the mapping nor T1. By definition it should belong to T1_tilde
+    if not is_added:
+        T1_tilde.add(popped_node1)
+
+    is_added = False
+    for successor in G2[popped_node2]:
+        if successor in reverse_mapping:
+            is_added = True
+            T2_in.add(popped_node2)
+        else:
+            if not any(pred in reverse_mapping for pred in G2.pred[successor]):
+                T2.discard(successor)
+
+            if not any(succ in reverse_mapping for succ in G2[successor]):
+                T2_in.discard(successor)
+
+            if successor not in T2:
+                if successor not in T2_in:
+                    T2_tilde.add(successor)
+
+    for predecessor in G2.pred[popped_node2]:
+        if predecessor in reverse_mapping:
+            # if a neighbor of the excluded node1 is in the mapping, keep node1 in T1
+            is_added = True
+            T2.add(popped_node2)
+        else:
+            # check if its neighbor has another connection with a covered node. If not, only then exclude it from T1
+            if not any(pred in reverse_mapping for pred in G2.pred[predecessor]):
+                T2.discard(predecessor)
+
+            if not any(succ in reverse_mapping for succ in G2[predecessor]):
+                T2_in.discard(predecessor)
+
+            if not (predecessor in T2 or predecessor in T2_in):
+                T2_tilde.add(predecessor)
+
+    if not is_added:
+        T2_tilde.add(popped_node2)
diff --git a/networkx/algorithms/isomorphism/vf2userfunc.py b/networkx/algorithms/isomorphism/vf2userfunc.py
index 349129e..9484edc 100644
--- a/networkx/algorithms/isomorphism/vf2userfunc.py
+++ b/networkx/algorithms/isomorphism/vf2userfunc.py
@@ -46,7 +46,6 @@ def _semantic_feasibility(self, G1_node, G2_node):
 
     # Make sure the edges match
     if self.edge_match is not None:
-
         # Cached lookups
         G1nbrs = self.G1_adj[G1_node]
         G2nbrs = self.G2_adj[G2_node]
@@ -188,10 +187,6 @@ class DiGraphMatcher(vf2.DiGraphMatcher):
 class MultiGraphMatcher(GraphMatcher):
     """VF2 isomorphism checker for undirected multigraphs."""
 
-    pass
-
 
 class MultiDiGraphMatcher(DiGraphMatcher):
     """VF2 isomorphism checker for directed multigraphs."""
-
-    pass
diff --git a/networkx/algorithms/link_analysis/hits_alg.py b/networkx/algorithms/link_analysis/hits_alg.py
index 2deb3f4..47fcdb4 100644
--- a/networkx/algorithms/link_analysis/hits_alg.py
+++ b/networkx/algorithms/link_analysis/hits_alg.py
@@ -2,9 +2,10 @@
 """
 import networkx as nx
 
-__all__ = ["hits", "hits_numpy", "hits_scipy", "authority_matrix", "hub_matrix"]
+__all__ = ["hits"]
 
 
+@nx._dispatch
 def hits(G, max_iter=100, tol=1.0e-8, nstart=None, normalized=True):
     """Returns HITS hubs and authorities values for nodes.
 
@@ -144,51 +145,9 @@ def _hits_python(G, max_iter=100, tol=1.0e-8, nstart=None, normalized=True):
     return h, a
 
 
-def authority_matrix(G, nodelist=None):
-    """Returns the HITS authority matrix.
-
-    .. deprecated:: 2.6
-    """
-    import warnings
-
-    msg = (
-        "\nauthority_matrix is deprecated as of version 2.6 and will be removed "
-        "in version 3.0.\n"
-        "The authority matrix can be computed by::\n"
-        "    >>> M = nx.to_numpy_array(G, nodelist=nodelist)\n"
-        "    >>> M.T @ M"
-    )
-    warnings.warn(msg, DeprecationWarning)
-    M = nx.to_numpy_array(G, nodelist=nodelist)
-    return M.T @ M
-
-
-def hub_matrix(G, nodelist=None):
-    """Returns the HITS hub matrix.
-
-    .. deprecated:: 2.6
-    """
-    import warnings
-
-    msg = (
-        "\nhub_matrix is deprecated as of version 2.6 and will be removed "
-        "in version 3.0.\n"
-        "The hub matrix can be computed by::\n"
-        "    >>> M = nx.to_numpy_array(G, nodelist=nodelist)\n"
-        "    >>> M @ M.T"
-    )
-    warnings.warn(msg, DeprecationWarning)
-    M = nx.to_numpy_array(G, nodelist=nodelist)
-    return M @ M.T
-
-
-def hits_numpy(G, normalized=True):
+def _hits_numpy(G, normalized=True):
     """Returns HITS hubs and authorities values for nodes.
 
-    .. deprecated:: 2.6
-
-       hits_numpy is deprecated and will be removed in networkx 3.0.
-
     The HITS algorithm computes two numbers for a node.
     Authorities estimates the node value based on the incoming links.
     Hubs estimates the node value based on outgoing links.
@@ -221,10 +180,11 @@ def hits_numpy(G, normalized=True):
     >>> hubs_matrix = adj_ary @ adj_ary.T
     >>> authority_matrix = adj_ary.T @ adj_ary
 
-    `hits_numpy` maps the eigenvector corresponding to the maximum eigenvalue
+    `_hits_numpy` maps the eigenvector corresponding to the maximum eigenvalue
     of the respective matrices to the nodes in `G`:
 
-    >>> hubs, authority = nx.hits_numpy(G)
+    >>> from networkx.algorithms.link_analysis.hits_alg import _hits_numpy
+    >>> hubs, authority = _hits_numpy(G)
 
     Notes
     -----
@@ -245,19 +205,8 @@ def hits_numpy(G, normalized=True):
        doi:10.1145/324133.324140.
        http://www.cs.cornell.edu/home/kleinber/auth.pdf.
     """
-    import warnings
-
     import numpy as np
 
-    warnings.warn(
-        (
-            "networkx.hits_numpy is deprecated and will be removed"
-            "in NetworkX 3.0, use networkx.hits instead."
-        ),
-        DeprecationWarning,
-        stacklevel=2,
-    )
-
     if len(G) == 0:
         return {}, {}
     adj_ary = nx.to_numpy_array(G)
@@ -280,12 +229,9 @@ def hits_numpy(G, normalized=True):
     return hubs, authorities
 
 
-def hits_scipy(G, max_iter=100, tol=1.0e-6, nstart=None, normalized=True):
+def _hits_scipy(G, max_iter=100, tol=1.0e-6, nstart=None, normalized=True):
     """Returns HITS hubs and authorities values for nodes.
 
-    .. deprecated:: 2.6
-
-       hits_scipy is deprecated and will be removed in networkx 3.0
 
     The HITS algorithm computes two numbers for a node.
     Authorities estimates the node value based on the incoming links.
@@ -316,8 +262,9 @@ def hits_scipy(G, max_iter=100, tol=1.0e-6, nstart=None, normalized=True):
 
     Examples
     --------
+    >>> from networkx.algorithms.link_analysis.hits_alg import _hits_scipy
     >>> G = nx.path_graph(4)
-    >>> h, a = nx.hits(G)
+    >>> h, a = _hits_scipy(G)
 
     Notes
     -----
@@ -350,19 +297,8 @@ def hits_scipy(G, max_iter=100, tol=1.0e-6, nstart=None, normalized=True):
        doi:10.1145/324133.324140.
        http://www.cs.cornell.edu/home/kleinber/auth.pdf.
     """
-    import warnings
-
     import numpy as np
 
-    warnings.warn(
-        (
-            "networkx.hits_scipy is deprecated and will be removed"
-            "in NetworkX 3.0, use networkx.hits instead."
-        ),
-        DeprecationWarning,
-        stacklevel=2,
-    )
-
     if len(G) == 0:
         return {}, {}
     A = nx.to_scipy_sparse_array(G, nodelist=list(G))
diff --git a/networkx/algorithms/link_analysis/pagerank_alg.py b/networkx/algorithms/link_analysis/pagerank_alg.py
index ece444c..6aab084 100644
--- a/networkx/algorithms/link_analysis/pagerank_alg.py
+++ b/networkx/algorithms/link_analysis/pagerank_alg.py
@@ -3,9 +3,10 @@ from warnings import warn
 
 import networkx as nx
 
-__all__ = ["pagerank", "pagerank_numpy", "pagerank_scipy", "google_matrix"]
+__all__ = ["pagerank", "google_matrix"]
 
 
+@nx._dispatch
 def pagerank(
     G,
     alpha=0.85,
@@ -35,7 +36,7 @@ def pagerank(
       The "personalization vector" consisting of a dictionary with a
       key some subset of graph nodes and personalization value each of those.
       At least one personalization value must be non-zero.
-      If not specfiied, a nodes personalization value will be zero.
+      If not specified, a nodes personalization value will be zero.
       By default, a uniform distribution is used.
 
     max_iter : integer, optional
@@ -43,6 +44,7 @@ def pagerank(
 
     tol : float, optional
       Error tolerance used to check convergence in power method solver.
+      The iteration will stop after a tolerance of ``len(G) * tol`` is reached.
 
     nstart : dictionary, optional
       Starting value of PageRank iteration for each node.
@@ -86,7 +88,7 @@ def pagerank(
 
     See Also
     --------
-    pagerank_numpy, pagerank_scipy, google_matrix
+    google_matrix
 
     Raises
     ------
@@ -105,7 +107,7 @@ def pagerank(
        http://dbpubs.stanford.edu:8090/pub/showDoc.Fulltext?lang=en&doc=1999-66&format=pdf
 
     """
-    return pagerank_scipy(
+    return _pagerank_scipy(
         G, alpha, personalization, max_iter, tol, nstart, weight, dangling
     )
 
@@ -170,6 +172,7 @@ def _pagerank_python(
     raise nx.PowerIterationFailedConvergence(max_iter)
 
 
+@nx._dispatch
 def google_matrix(
     G, alpha=0.85, personalization=None, nodelist=None, weight="weight", dangling=None
 ):
@@ -188,7 +191,7 @@ def google_matrix(
       The "personalization vector" consisting of a dictionary with a
       key some subset of graph nodes and personalization value each of those.
       At least one personalization value must be non-zero.
-      If not specfiied, a nodes personalization value will be zero.
+      If not specified, a nodes personalization value will be zero.
       By default, a uniform distribution is used.
 
     nodelist : list, optional
@@ -209,12 +212,12 @@ def google_matrix(
 
     Returns
     -------
-    A : NumPy matrix
+    A : 2D NumPy ndarray
        Google matrix of the graph
 
     Notes
     -----
-    The matrix returned represents the transition matrix that describes the
+    The array returned represents the transition matrix that describes the
     Markov chain used in PageRank. For PageRank to converge to a unique
     solution (i.e., a unique stationary distribution in a Markov chain), the
     transition matrix must be irreducible. In other words, it must be that
@@ -227,28 +230,17 @@ def google_matrix(
 
     See Also
     --------
-    pagerank, pagerank_numpy, pagerank_scipy
+    pagerank
     """
-    # TODO: Remove this warning in version 3.0
-    import warnings
-
     import numpy as np
 
-    warnings.warn(
-        "google_matrix will return an np.ndarray instead of a np.matrix in\n"
-        "NetworkX version 3.0.",
-        FutureWarning,
-        stacklevel=2,
-    )
-
     if nodelist is None:
         nodelist = list(G)
 
     A = nx.to_numpy_array(G, nodelist=nodelist, weight=weight)
     N = len(G)
     if N == 0:
-        # TODO: Remove np.asmatrix wrapper in version 3.0
-        return np.asmatrix(A)
+        return A
 
     # Personalization vector
     if personalization is None:
@@ -273,11 +265,12 @@ def google_matrix(
 
     A /= A.sum(axis=1)[:, np.newaxis]  # Normalize rows to sum to 1
 
-    # TODO: Remove np.asmatrix wrapper in version 3.0
-    return np.asmatrix(alpha * A + (1 - alpha) * p)
+    return alpha * A + (1 - alpha) * p
 
 
-def pagerank_numpy(G, alpha=0.85, personalization=None, weight="weight", dangling=None):
+def _pagerank_numpy(
+    G, alpha=0.85, personalization=None, weight="weight", dangling=None
+):
     """Returns the PageRank of the nodes in the graph.
 
     PageRank computes a ranking of the nodes in the graph G based on
@@ -297,7 +290,7 @@ def pagerank_numpy(G, alpha=0.85, personalization=None, weight="weight", danglin
       The "personalization vector" consisting of a dictionary with a
       key some subset of graph nodes and personalization value each of those.
       At least one personalization value must be non-zero.
-      If not specfiied, a nodes personalization value will be zero.
+      If not specified, a nodes personalization value will be zero.
       By default, a uniform distribution is used.
 
     weight : key, optional
@@ -319,8 +312,9 @@ def pagerank_numpy(G, alpha=0.85, personalization=None, weight="weight", danglin
 
     Examples
     --------
+    >>> from networkx.algorithms.link_analysis.pagerank_alg import _pagerank_numpy
     >>> G = nx.DiGraph(nx.path_graph(4))
-    >>> pr = nx.pagerank_numpy(G, alpha=0.9)
+    >>> pr = _pagerank_numpy(G, alpha=0.9)
 
     Notes
     -----
@@ -334,7 +328,7 @@ def pagerank_numpy(G, alpha=0.85, personalization=None, weight="weight", danglin
 
     See Also
     --------
-    pagerank, pagerank_scipy, google_matrix
+    pagerank, google_matrix
 
     References
     ----------
@@ -345,8 +339,6 @@ def pagerank_numpy(G, alpha=0.85, personalization=None, weight="weight", danglin
        The PageRank citation ranking: Bringing order to the Web. 1999
        http://dbpubs.stanford.edu:8090/pub/showDoc.Fulltext?lang=en&doc=1999-66&format=pdf
     """
-    msg = "networkx.pagerank_numpy is deprecated and will be removed in NetworkX 3.0, use networkx.pagerank instead."
-    warn(msg, DeprecationWarning, stacklevel=2)
     import numpy as np
 
     if len(G) == 0:
@@ -363,7 +355,7 @@ def pagerank_numpy(G, alpha=0.85, personalization=None, weight="weight", danglin
     return dict(zip(G, map(float, largest / norm)))
 
 
-def pagerank_scipy(
+def _pagerank_scipy(
     G,
     alpha=0.85,
     personalization=None,
@@ -392,7 +384,7 @@ def pagerank_scipy(
       The "personalization vector" consisting of a dictionary with a
       key some subset of graph nodes and personalization value each of those.
       At least one personalization value must be non-zero.
-      If not specfiied, a nodes personalization value will be zero.
+      If not specified, a nodes personalization value will be zero.
       By default, a uniform distribution is used.
 
     max_iter : integer, optional
@@ -400,6 +392,7 @@ def pagerank_scipy(
 
     tol : float, optional
       Error tolerance used to check convergence in power method solver.
+      The iteration will stop after a tolerance of ``len(G) * tol`` is reached.
 
     nstart : dictionary, optional
       Starting value of PageRank iteration for each node.
@@ -423,8 +416,9 @@ def pagerank_scipy(
 
     Examples
     --------
+    >>> from networkx.algorithms.link_analysis.pagerank_alg import _pagerank_scipy
     >>> G = nx.DiGraph(nx.path_graph(4))
-    >>> pr = nx.pagerank_scipy(G, alpha=0.9)
+    >>> pr = _pagerank_scipy(G, alpha=0.9)
 
     Notes
     -----
@@ -437,7 +431,7 @@ def pagerank_scipy(
 
     See Also
     --------
-    pagerank, pagerank_numpy, google_matrix
+    pagerank
 
     Raises
     ------
@@ -455,8 +449,6 @@ def pagerank_scipy(
        The PageRank citation ranking: Bringing order to the Web. 1999
        http://dbpubs.stanford.edu:8090/pub/showDoc.Fulltext?lang=en&doc=1999-66&format=pdf
     """
-    msg = "networkx.pagerank_scipy is deprecated and will be removed in NetworkX 3.0, use networkx.pagerank instead."
-    warn(msg, DeprecationWarning, stacklevel=2)
     import numpy as np
     import scipy as sp
     import scipy.sparse  # call as sp.sparse
diff --git a/networkx/algorithms/link_analysis/tests/test_hits.py b/networkx/algorithms/link_analysis/tests/test_hits.py
index df5f0da..cb9d647 100644
--- a/networkx/algorithms/link_analysis/tests/test_hits.py
+++ b/networkx/algorithms/link_analysis/tests/test_hits.py
@@ -6,7 +6,11 @@ np = pytest.importorskip("numpy")
 sp = pytest.importorskip("scipy")
 import scipy.sparse  # call as sp.sparse
 
-from networkx.algorithms.link_analysis.hits_alg import _hits_python
+from networkx.algorithms.link_analysis.hits_alg import (
+    _hits_numpy,
+    _hits_python,
+    _hits_scipy,
+)
 
 # Example from
 # A. Langville and C. Meyer, "A survey of eigenvector methods of web
@@ -16,7 +20,6 @@ from networkx.algorithms.link_analysis.hits_alg import _hits_python
 class TestHITS:
     @classmethod
     def setup_class(cls):
-
         G = nx.DiGraph()
 
         edges = [(1, 3), (1, 5), (2, 1), (3, 5), (5, 4), (5, 3), (6, 5)]
@@ -32,13 +35,13 @@ class TestHITS:
 
     def test_hits_numpy(self):
         G = self.G
-        h, a = nx.hits_numpy(G)
+        h, a = _hits_numpy(G)
         for n in G:
             assert h[n] == pytest.approx(G.h[n], abs=1e-4)
         for n in G:
             assert a[n] == pytest.approx(G.a[n], abs=1e-4)
 
-    @pytest.mark.parametrize("hits_alg", (nx.hits, nx.hits_scipy, _hits_python))
+    @pytest.mark.parametrize("hits_alg", (nx.hits, _hits_python, _hits_scipy))
     def test_hits(self, hits_alg):
         G = self.G
         h, a = hits_alg(G, tol=1.0e-08)
@@ -56,34 +59,21 @@ class TestHITS:
     def test_empty(self):
         G = nx.Graph()
         assert nx.hits(G) == ({}, {})
-        assert nx.hits_numpy(G) == ({}, {})
+        assert _hits_numpy(G) == ({}, {})
         assert _hits_python(G) == ({}, {})
-        assert nx.hits_scipy(G) == ({}, {})
-        assert nx.authority_matrix(G).shape == (0, 0)
-        assert nx.hub_matrix(G).shape == (0, 0)
+        assert _hits_scipy(G) == ({}, {})
 
     def test_hits_not_convergent(self):
         G = nx.path_graph(50)
         with pytest.raises(nx.PowerIterationFailedConvergence):
-            nx.hits_scipy(G, max_iter=1)
+            _hits_scipy(G, max_iter=1)
         with pytest.raises(nx.PowerIterationFailedConvergence):
             _hits_python(G, max_iter=1)
         with pytest.raises(nx.PowerIterationFailedConvergence):
-            nx.hits_scipy(G, max_iter=0)
+            _hits_scipy(G, max_iter=0)
         with pytest.raises(nx.PowerIterationFailedConvergence):
             _hits_python(G, max_iter=0)
         with pytest.raises(ValueError):
             nx.hits(G, max_iter=0)
         with pytest.raises(sp.sparse.linalg.ArpackNoConvergence):
             nx.hits(G, max_iter=1)
-
-
-@pytest.mark.parametrize("hits_alg", (nx.hits_numpy, nx.hits_scipy))
-def test_deprecation_warnings(hits_alg):
-    """Make sure deprecation warnings are raised.
-
-    To be removed when deprecations expire.
-    """
-    G = nx.DiGraph(nx.path_graph(4))
-    with pytest.warns(DeprecationWarning):
-        hits_alg(G)
diff --git a/networkx/algorithms/link_analysis/tests/test_pagerank.py b/networkx/algorithms/link_analysis/tests/test_pagerank.py
index 4c9722f..6a30f0c 100644
--- a/networkx/algorithms/link_analysis/tests/test_pagerank.py
+++ b/networkx/algorithms/link_analysis/tests/test_pagerank.py
@@ -3,11 +3,16 @@ import random
 import pytest
 
 import networkx as nx
+from networkx.classes.tests import dispatch_interface
 
 np = pytest.importorskip("numpy")
 pytest.importorskip("scipy")
 
-from networkx.algorithms.link_analysis.pagerank_alg import _pagerank_python
+from networkx.algorithms.link_analysis.pagerank_alg import (
+    _pagerank_numpy,
+    _pagerank_python,
+    _pagerank_scipy,
+)
 
 # Example from
 # A. Langville and C. Meyer, "A survey of eigenvector methods of web
@@ -74,19 +79,22 @@ class TestPageRank:
 
     def test_numpy_pagerank(self):
         G = self.G
-        p = nx.pagerank_numpy(G, alpha=0.9)
+        p = _pagerank_numpy(G, alpha=0.9)
         for n in G:
             assert p[n] == pytest.approx(G.pagerank[n], abs=1e-4)
 
-    def test_google_matrix(self):
-        G = self.G
+    # This additionally tests the @nx._dispatch mechanism, treating
+    # nx.google_matrix as if it were a re-implementation from another package
+    @pytest.mark.parametrize("wrapper", [lambda x: x, dispatch_interface.convert])
+    def test_google_matrix(self, wrapper):
+        G = wrapper(self.G)
         M = nx.google_matrix(G, alpha=0.9, nodelist=sorted(G))
         _, ev = np.linalg.eig(M.T)
-        p = np.array(ev[:, 0] / ev[:, 0].sum())[:, 0]
-        for (a, b) in zip(p, self.G.pagerank.values()):
+        p = ev[:, 0] / ev[:, 0].sum()
+        for a, b in zip(p, self.G.pagerank.values()):
             assert a == pytest.approx(b, abs=1e-7)
 
-    @pytest.mark.parametrize("alg", (nx.pagerank, _pagerank_python, nx.pagerank_numpy))
+    @pytest.mark.parametrize("alg", (nx.pagerank, _pagerank_python, _pagerank_numpy))
     def test_personalization(self, alg):
         G = nx.complete_graph(4)
         personalize = {0: 1, 1: 1, 2: 4, 3: 4}
@@ -153,7 +161,7 @@ class TestPageRank:
                 else:
                     assert M2[i, j] == pytest.approx(M1[i, j], abs=1e-4)
 
-    @pytest.mark.parametrize("alg", (nx.pagerank, _pagerank_python, nx.pagerank_numpy))
+    @pytest.mark.parametrize("alg", (nx.pagerank, _pagerank_python, _pagerank_numpy))
     def test_dangling_pagerank(self, alg):
         pr = alg(self.G, dangling=self.dangling_edges)
         for n in self.G:
@@ -163,7 +171,7 @@ class TestPageRank:
         G = nx.Graph()
         assert nx.pagerank(G) == {}
         assert _pagerank_python(G) == {}
-        assert nx.pagerank_numpy(G) == {}
+        assert _pagerank_numpy(G) == {}
         assert nx.google_matrix(G).shape == (0, 0)
 
     @pytest.mark.parametrize("alg", (nx.pagerank, _pagerank_python))
@@ -184,37 +192,26 @@ class TestPageRank:
 class TestPageRankScipy(TestPageRank):
     def test_scipy_pagerank(self):
         G = self.G
-        p = nx.pagerank_scipy(G, alpha=0.9, tol=1.0e-08)
+        p = _pagerank_scipy(G, alpha=0.9, tol=1.0e-08)
         for n in G:
             assert p[n] == pytest.approx(G.pagerank[n], abs=1e-4)
         personalize = {n: random.random() for n in G}
-        p = nx.pagerank_scipy(G, alpha=0.9, tol=1.0e-08, personalization=personalize)
+        p = _pagerank_scipy(G, alpha=0.9, tol=1.0e-08, personalization=personalize)
 
         nstart = {n: random.random() for n in G}
-        p = nx.pagerank_scipy(G, alpha=0.9, tol=1.0e-08, nstart=nstart)
+        p = _pagerank_scipy(G, alpha=0.9, tol=1.0e-08, nstart=nstart)
         for n in G:
             assert p[n] == pytest.approx(G.pagerank[n], abs=1e-4)
 
     def test_scipy_pagerank_max_iter(self):
         with pytest.raises(nx.PowerIterationFailedConvergence):
-            nx.pagerank_scipy(self.G, max_iter=0)
+            _pagerank_scipy(self.G, max_iter=0)
 
     def test_dangling_scipy_pagerank(self):
-        pr = nx.pagerank_scipy(self.G, dangling=self.dangling_edges)
+        pr = _pagerank_scipy(self.G, dangling=self.dangling_edges)
         for n in self.G:
             assert pr[n] == pytest.approx(self.G.dangling_pagerank[n], abs=1e-4)
 
     def test_empty_scipy(self):
         G = nx.Graph()
-        assert nx.pagerank_scipy(G) == {}
-
-
-@pytest.mark.parametrize("pagerank_alg", (nx.pagerank_numpy, nx.pagerank_scipy))
-def test_deprecation_warnings(pagerank_alg):
-    """Make sure deprecation warnings are raised.
-
-    To be removed when deprecations expire.
-    """
-    G = nx.DiGraph(nx.path_graph(4))
-    with pytest.warns(DeprecationWarning):
-        pagerank_alg(G, alpha=0.9)
+        assert _pagerank_scipy(G) == {}
diff --git a/networkx/algorithms/link_prediction.py b/networkx/algorithms/link_prediction.py
index de29e24..771ce79 100644
--- a/networkx/algorithms/link_prediction.py
+++ b/networkx/algorithms/link_prediction.py
@@ -94,6 +94,7 @@ def resource_allocation_index(G, ebunch=None):
     return _apply_prediction(G, predict, ebunch)
 
 
+@nx._dispatch
 @not_implemented_for("directed")
 @not_implemented_for("multigraph")
 def jaccard_coefficient(G, ebunch=None):
diff --git a/networkx/algorithms/lowest_common_ancestors.py b/networkx/algorithms/lowest_common_ancestors.py
index 68aaf7d..9c40758 100644
--- a/networkx/algorithms/lowest_common_ancestors.py
+++ b/networkx/algorithms/lowest_common_ancestors.py
@@ -14,7 +14,6 @@ __all__ = [
 
 
 @not_implemented_for("undirected")
-@not_implemented_for("multigraph")
 def all_pairs_lowest_common_ancestor(G, pairs=None):
     """Return the lowest common ancestor of all pairs or the provided pairs
 
@@ -112,7 +111,6 @@ def all_pairs_lowest_common_ancestor(G, pairs=None):
 
 
 @not_implemented_for("undirected")
-@not_implemented_for("multigraph")
 def lowest_common_ancestor(G, node1, node2, default=None):
     """Compute the lowest common ancestor of the given pair of nodes.
 
@@ -150,7 +148,6 @@ def lowest_common_ancestor(G, node1, node2, default=None):
 
 
 @not_implemented_for("undirected")
-@not_implemented_for("multigraph")
 def tree_all_pairs_lowest_common_ancestor(G, root=None, pairs=None):
     r"""Yield the lowest common ancestor for sets of pairs in a tree.
 
@@ -237,7 +234,8 @@ def tree_all_pairs_lowest_common_ancestor(G, root=None, pairs=None):
                     msg = "No root specified and tree has multiple sources."
                     raise nx.NetworkXError(msg)
                 root = n
-            elif deg > 1:
+            # checking deg>1 is not sufficient for MultiDiGraphs
+            elif deg > 1 and len(G.pred[n]) > 1:
                 msg = "Tree LCA only defined on trees; use DAG routine."
                 raise nx.NetworkXError(msg)
     if root is None:
diff --git a/networkx/algorithms/matching.py b/networkx/algorithms/matching.py
index a4e7e9c..1a94f20 100644
--- a/networkx/algorithms/matching.py
+++ b/networkx/algorithms/matching.py
@@ -255,7 +255,7 @@ def is_perfect_matching(G, matching):
 
 @not_implemented_for("multigraph")
 @not_implemented_for("directed")
-def min_weight_matching(G, maxcardinality=None, weight="weight"):
+def min_weight_matching(G, weight="weight"):
     """Computing a minimum-weight maximal matching of G.
 
     Use the maximum-weight algorithm with edge weights subtracted
@@ -290,15 +290,6 @@ def min_weight_matching(G, maxcardinality=None, weight="weight"):
     G : NetworkX graph
       Undirected graph
 
-    maxcardinality: bool
-        .. deprecated:: 2.8
-            The `maxcardinality` parameter will be removed in v3.0.
-            It doesn't make sense to set it to False when looking for
-            a min weight matching because then we just return no edges.
-
-        If maxcardinality is True, compute the maximum-cardinality matching
-        with minimum weight among all maximum-cardinality matchings.
-
     weight: string, optional (default='weight')
        Edge data key corresponding to the edge weight.
        If key not found, uses 1 as weight.
@@ -312,12 +303,6 @@ def min_weight_matching(G, maxcardinality=None, weight="weight"):
     --------
     max_weight_matching
     """
-    if maxcardinality not in (True, None):
-        raise nx.NetworkXError(
-            "The argument maxcardinality does not make sense "
-            "in the context of minimum weight matchings."
-            "It is deprecated and will be removed in v3.0."
-        )
     if len(G.edges) == 0:
         return max_weight_matching(G, maxcardinality=True, weight=weight)
     G_edges = G.edges(data=weight, default=1)
@@ -406,8 +391,6 @@ def max_weight_matching(G, maxcardinality=False, weight="weight"):
     class NoNode:
         """Dummy value which is different from any node."""
 
-        pass
-
     class Blossom:
         """Representation of a non-trivial blossom or sub-blossom."""
 
@@ -821,7 +804,7 @@ def max_weight_matching(G, maxcardinality=False, weight="weight"):
     # Swap matched/unmatched edges over an alternating path between two
     # single vertices. The augmenting path runs through S-vertices v and w.
     def augmentMatching(v, w):
-        for (s, j) in ((v, w), (w, v)):
+        for s, j in ((v, w), (w, v)):
             # Match vertex s to vertex j. Then trace back from s
             # until we find a single vertex, swapping matched and unmatched
             # edges as we go.
@@ -878,7 +861,7 @@ def max_weight_matching(G, maxcardinality=False, weight="weight"):
                 jblossoms.append(blossomparent[jblossoms[-1]])
             iblossoms.reverse()
             jblossoms.reverse()
-            for (bi, bj) in zip(iblossoms, jblossoms):
+            for bi, bj in zip(iblossoms, jblossoms):
                 if bi != bj:
                     break
                 s += 2 * blossomdual[bi]
@@ -893,13 +876,12 @@ def max_weight_matching(G, maxcardinality=False, weight="weight"):
         for b in blossomdual:
             if blossomdual[b] > 0:
                 assert len(b.edges) % 2 == 1
-                for (i, j) in b.edges[1::2]:
+                for i, j in b.edges[1::2]:
                     assert mate[i] == j and mate[j] == i
         # Ok.
 
     # Main loop: continue until no further improvement is possible.
     while 1:
-
         # Each iteration of this loop is a "stage".
         # A stage finds an augmenting path and uses that to improve
         # the matching.
@@ -928,7 +910,6 @@ def max_weight_matching(G, maxcardinality=False, weight="weight"):
         # Loop until we succeed in augmenting the matching.
         augmented = 0
         while 1:
-
             # Each iteration of this loop is a "substage".
             # A substage tries to find an augmenting path;
             # if found, the path is used to improve the matching and
@@ -939,7 +920,6 @@ def max_weight_matching(G, maxcardinality=False, weight="weight"):
             # Continue labeling until all vertices which are reachable
             # through an alternating path have got a label.
             while queue and not augmented:
-
                 # Take an S vertex from the queue.
                 v = queue.pop()
                 assert label[inblossom[v]] == 1
@@ -1057,7 +1037,7 @@ def max_weight_matching(G, maxcardinality=False, weight="weight"):
             if deltatype == -1:
                 # No further improvement possible; max-cardinality optimum
                 # reached. Do a final delta update to make the optimum
-                # verifyable.
+                # verifiable.
                 assert maxcardinality
                 deltatype = 1
                 delta = max(0, min(dualvar.values()))
diff --git a/networkx/algorithms/minors/contraction.py b/networkx/algorithms/minors/contraction.py
index c1e9adf..ddaf9b2 100644
--- a/networkx/algorithms/minors/contraction.py
+++ b/networkx/algorithms/minors/contraction.py
@@ -308,7 +308,7 @@ def quotient_graph(
     # If the partition is a dict, it is assumed to be one where the keys are
     # user-defined block labels, and values are block lists, tuples or sets.
     if isinstance(partition, dict):
-        partition = [block for block in partition.values()]
+        partition = list(partition.values())
 
     # If the user provided partition as a collection of sets. Then we
     # need to check if partition covers all of G nodes. If the answer
@@ -344,9 +344,12 @@ def _quotient_graph(
 
         def node_data(b):
             S = G.subgraph(b)
-            return dict(
-                graph=S, nnodes=len(S), nedges=S.number_of_edges(), density=density(S)
-            )
+            return {
+                "graph": S,
+                "nnodes": len(S),
+                "nedges": S.number_of_edges(),
+                "density": density(S),
+            }
 
     # Each block of the partition becomes a node in the quotient graph.
     partition = [frozenset(b) for b in partition]
@@ -505,7 +508,7 @@ def contracted_nodes(G, u, v, self_loops=True, copy=True):
     v_data = H.nodes[v]
     H.remove_node(v)
 
-    for (prev_w, prev_x, d) in edges_to_remap:
+    for prev_w, prev_x, d in edges_to_remap:
         w = prev_w if prev_w != v else u
         x = prev_x if prev_x != v else u
 
diff --git a/networkx/algorithms/minors/tests/test_contraction.py b/networkx/algorithms/minors/tests/test_contraction.py
index 1dc4978..9a0e59f 100644
--- a/networkx/algorithms/minors/tests/test_contraction.py
+++ b/networkx/algorithms/minors/tests/test_contraction.py
@@ -352,7 +352,7 @@ class TestContraction:
         expected = nx.relabel_nodes(expected, {1: 2, 2: 3})
         expected.add_edge(0, 0)
         cdict = {1: {"baz": "xyzzy"}}
-        expected.nodes[0].update(dict(foo="bar", contraction=cdict))
+        expected.nodes[0].update({"foo": "bar", "contraction": cdict})
         assert nx.is_isomorphic(actual, expected)
         assert actual.nodes == expected.nodes
 
diff --git a/networkx/algorithms/node_classification.py b/networkx/algorithms/node_classification.py
new file mode 100644
index 0000000..2875db0
--- /dev/null
+++ b/networkx/algorithms/node_classification.py
@@ -0,0 +1,218 @@
+""" This module provides the functions for node classification problem.
+
+The functions in this module are not imported
+into the top level `networkx` namespace.
+You can access these functions by importing
+the `networkx.algorithms.node_classification` modules,
+then accessing the functions as attributes of `node_classification`.
+For example:
+
+  >>> from networkx.algorithms import node_classification
+  >>> G = nx.path_graph(4)
+  >>> G.edges()
+  EdgeView([(0, 1), (1, 2), (2, 3)])
+  >>> G.nodes[0]["label"] = "A"
+  >>> G.nodes[3]["label"] = "B"
+  >>> node_classification.harmonic_function(G)
+  ['A', 'A', 'B', 'B']
+
+References
+----------
+Zhu, X., Ghahramani, Z., & Lafferty, J. (2003, August).
+Semi-supervised learning using gaussian fields and harmonic functions.
+In ICML (Vol. 3, pp. 912-919).
+"""
+import networkx as nx
+
+__all__ = ["harmonic_function", "local_and_global_consistency"]
+
+
+@nx.utils.not_implemented_for("directed")
+def harmonic_function(G, max_iter=30, label_name="label"):
+    """Node classification by Harmonic function
+
+    Function for computing Harmonic function algorithm by Zhu et al.
+
+    Parameters
+    ----------
+    G : NetworkX Graph
+    max_iter : int
+        maximum number of iterations allowed
+    label_name : string
+        name of target labels to predict
+
+    Returns
+    -------
+    predicted : list
+        List of length ``len(G)`` with the predicted labels for each node.
+
+    Raises
+    ------
+    NetworkXError
+        If no nodes in `G` have attribute `label_name`.
+
+    Examples
+    --------
+    >>> from networkx.algorithms import node_classification
+    >>> G = nx.path_graph(4)
+    >>> G.nodes[0]["label"] = "A"
+    >>> G.nodes[3]["label"] = "B"
+    >>> G.nodes(data=True)
+    NodeDataView({0: {'label': 'A'}, 1: {}, 2: {}, 3: {'label': 'B'}})
+    >>> G.edges()
+    EdgeView([(0, 1), (1, 2), (2, 3)])
+    >>> predicted = node_classification.harmonic_function(G)
+    >>> predicted
+    ['A', 'A', 'B', 'B']
+
+    References
+    ----------
+    Zhu, X., Ghahramani, Z., & Lafferty, J. (2003, August).
+    Semi-supervised learning using gaussian fields and harmonic functions.
+    In ICML (Vol. 3, pp. 912-919).
+    """
+    import numpy as np
+    import scipy as sp
+    import scipy.sparse  # call as sp.sparse
+
+    X = nx.to_scipy_sparse_array(G)  # adjacency matrix
+    labels, label_dict = _get_label_info(G, label_name)
+
+    if labels.shape[0] == 0:
+        raise nx.NetworkXError(
+            f"No node on the input graph is labeled by '{label_name}'."
+        )
+
+    n_samples = X.shape[0]
+    n_classes = label_dict.shape[0]
+    F = np.zeros((n_samples, n_classes))
+
+    # Build propagation matrix
+    degrees = X.sum(axis=0)
+    degrees[degrees == 0] = 1  # Avoid division by 0
+    # TODO: csr_array
+    D = sp.sparse.csr_array(sp.sparse.diags((1.0 / degrees), offsets=0))
+    P = (D @ X).tolil()
+    P[labels[:, 0]] = 0  # labels[:, 0] indicates IDs of labeled nodes
+    # Build base matrix
+    B = np.zeros((n_samples, n_classes))
+    B[labels[:, 0], labels[:, 1]] = 1
+
+    for _ in range(max_iter):
+        F = (P @ F) + B
+
+    return label_dict[np.argmax(F, axis=1)].tolist()
+
+
+@nx.utils.not_implemented_for("directed")
+def local_and_global_consistency(G, alpha=0.99, max_iter=30, label_name="label"):
+    """Node classification by Local and Global Consistency
+
+    Function for computing Local and global consistency algorithm by Zhou et al.
+
+    Parameters
+    ----------
+    G : NetworkX Graph
+    alpha : float
+        Clamping factor
+    max_iter : int
+        Maximum number of iterations allowed
+    label_name : string
+        Name of target labels to predict
+
+    Returns
+    -------
+    predicted : list
+        List of length ``len(G)`` with the predicted labels for each node.
+
+    Raises
+    ------
+    NetworkXError
+        If no nodes in `G` have attribute `label_name`.
+
+    Examples
+    --------
+    >>> from networkx.algorithms import node_classification
+    >>> G = nx.path_graph(4)
+    >>> G.nodes[0]["label"] = "A"
+    >>> G.nodes[3]["label"] = "B"
+    >>> G.nodes(data=True)
+    NodeDataView({0: {'label': 'A'}, 1: {}, 2: {}, 3: {'label': 'B'}})
+    >>> G.edges()
+    EdgeView([(0, 1), (1, 2), (2, 3)])
+    >>> predicted = node_classification.local_and_global_consistency(G)
+    >>> predicted
+    ['A', 'A', 'B', 'B']
+
+    References
+    ----------
+    Zhou, D., Bousquet, O., Lal, T. N., Weston, J., & Schölkopf, B. (2004).
+    Learning with local and global consistency.
+    Advances in neural information processing systems, 16(16), 321-328.
+    """
+    import numpy as np
+    import scipy as sp
+    import scipy.sparse  # call as sp.sparse
+
+    X = nx.to_scipy_sparse_array(G)  # adjacency matrix
+    labels, label_dict = _get_label_info(G, label_name)
+
+    if labels.shape[0] == 0:
+        raise nx.NetworkXError(
+            f"No node on the input graph is labeled by '{label_name}'."
+        )
+
+    n_samples = X.shape[0]
+    n_classes = label_dict.shape[0]
+    F = np.zeros((n_samples, n_classes))
+
+    # Build propagation matrix
+    degrees = X.sum(axis=0)
+    degrees[degrees == 0] = 1  # Avoid division by 0
+    # TODO: csr_array
+    D2 = np.sqrt(sp.sparse.csr_array(sp.sparse.diags((1.0 / degrees), offsets=0)))
+    P = alpha * ((D2 @ X) @ D2)
+    # Build base matrix
+    B = np.zeros((n_samples, n_classes))
+    B[labels[:, 0], labels[:, 1]] = 1 - alpha
+
+    for _ in range(max_iter):
+        F = (P @ F) + B
+
+    return label_dict[np.argmax(F, axis=1)].tolist()
+
+
+def _get_label_info(G, label_name):
+    """Get and return information of labels from the input graph
+
+    Parameters
+    ----------
+    G : Network X graph
+    label_name : string
+        Name of the target label
+
+    Returns
+    ----------
+    labels : numpy array, shape = [n_labeled_samples, 2]
+        Array of pairs of labeled node ID and label ID
+    label_dict : numpy array, shape = [n_classes]
+        Array of labels
+        i-th element contains the label corresponding label ID `i`
+    """
+    import numpy as np
+
+    labels = []
+    label_to_id = {}
+    lid = 0
+    for i, n in enumerate(G.nodes(data=True)):
+        if label_name in n[1]:
+            label = n[1][label_name]
+            if label not in label_to_id:
+                label_to_id[label] = lid
+                lid += 1
+            labels.append([i, label_to_id[label]])
+    labels = np.array(labels)
+    label_dict = np.array(
+        [label for label, _ in sorted(label_to_id.items(), key=lambda x: x[1])]
+    )
+    return (labels, label_dict)
diff --git a/networkx/algorithms/node_classification/__init__.py b/networkx/algorithms/node_classification/__init__.py
deleted file mode 100644
index 23fa264..0000000
--- a/networkx/algorithms/node_classification/__init__.py
+++ /dev/null
@@ -1,52 +0,0 @@
-""" This module provides the functions for node classification problem.
-
-The functions in this module are not imported
-into the top level `networkx` namespace.
-You can access these functions by importing
-the `networkx.algorithms.node_classification` modules,
-then accessing the functions as attributes of `node_classification`.
-For example:
-
-  >>> from networkx.algorithms import node_classification
-  >>> G = nx.path_graph(4)
-  >>> G.edges()
-  EdgeView([(0, 1), (1, 2), (2, 3)])
-  >>> G.nodes[0]["label"] = "A"
-  >>> G.nodes[3]["label"] = "B"
-  >>> node_classification.harmonic_function(G)
-  ['A', 'A', 'B', 'B']
-
-"""
-
-
-def __getattr__(name):
-    if name in ("hmn", "lgc"):
-        import warnings
-        import importlib
-
-        fn_name = (
-            "harmonic_function" if name == "hmn" else "local_and_global_consistency"
-        )
-        msg = (
-            f"The {name}  module is deprecated and will be removed in version 3.0.\n"
-            f"Access `{fn_name}` directly from `node_classification`:\n\n"
-            "    from networkx.algorithms import node_classification\n"
-            f"    node_classification.{fn_name}\n"
-        )
-        warnings.warn(msg, category=DeprecationWarning, stacklevel=2)
-        return importlib.import_module(
-            f".{name}", "networkx.algorithms.node_classification"
-        )
-    if name == "harmonic_function":
-        from .hmn import harmonic_function
-
-        return harmonic_function
-    if name == "local_and_global_consistency":
-        from .lgc import local_and_global_consistency
-
-        return local_and_global_consistency
-    raise AttributeError(f"module {__name__} has no attribute {name}")
-
-
-def __dir__():
-    return ["harmonic_function", "local_and_global_consistency"]
diff --git a/networkx/algorithms/node_classification/hmn.py b/networkx/algorithms/node_classification/hmn.py
deleted file mode 100644
index 727ee36..0000000
--- a/networkx/algorithms/node_classification/hmn.py
+++ /dev/null
@@ -1,88 +0,0 @@
-"""Function for computing Harmonic function algorithm by Zhu et al.
-
-References
-----------
-Zhu, X., Ghahramani, Z., & Lafferty, J. (2003, August).
-Semi-supervised learning using gaussian fields and harmonic functions.
-In ICML (Vol. 3, pp. 912-919).
-"""
-import networkx as nx
-from networkx.algorithms.node_classification.utils import _get_label_info
-from networkx.utils.decorators import not_implemented_for
-
-__all__ = ["harmonic_function"]
-
-
-@not_implemented_for("directed")
-def harmonic_function(G, max_iter=30, label_name="label"):
-    """Node classification by Harmonic function
-
-    Parameters
-    ----------
-    G : NetworkX Graph
-    max_iter : int
-        maximum number of iterations allowed
-    label_name : string
-        name of target labels to predict
-
-    Returns
-    -------
-    predicted : list
-        List of length ``len(G)`` with the predicted labels for each node.
-
-    Raises
-    ------
-    NetworkXError
-        If no nodes in `G` have attribute `label_name`.
-
-    Examples
-    --------
-    >>> from networkx.algorithms import node_classification
-    >>> G = nx.path_graph(4)
-    >>> G.nodes[0]["label"] = "A"
-    >>> G.nodes[3]["label"] = "B"
-    >>> G.nodes(data=True)
-    NodeDataView({0: {'label': 'A'}, 1: {}, 2: {}, 3: {'label': 'B'}})
-    >>> G.edges()
-    EdgeView([(0, 1), (1, 2), (2, 3)])
-    >>> predicted = node_classification.harmonic_function(G)
-    >>> predicted
-    ['A', 'A', 'B', 'B']
-
-    References
-    ----------
-    Zhu, X., Ghahramani, Z., & Lafferty, J. (2003, August).
-    Semi-supervised learning using gaussian fields and harmonic functions.
-    In ICML (Vol. 3, pp. 912-919).
-    """
-    import numpy as np
-    import scipy as sp
-    import scipy.sparse  # call as sp.sparse
-
-    X = nx.to_scipy_sparse_array(G)  # adjacency matrix
-    labels, label_dict = _get_label_info(G, label_name)
-
-    if labels.shape[0] == 0:
-        raise nx.NetworkXError(
-            f"No node on the input graph is labeled by '{label_name}'."
-        )
-
-    n_samples = X.shape[0]
-    n_classes = label_dict.shape[0]
-    F = np.zeros((n_samples, n_classes))
-
-    # Build propagation matrix
-    degrees = X.sum(axis=0)
-    degrees[degrees == 0] = 1  # Avoid division by 0
-    # TODO: csr_array
-    D = sp.sparse.csr_array(sp.sparse.diags((1.0 / degrees), offsets=0))
-    P = (D @ X).tolil()
-    P[labels[:, 0]] = 0  # labels[:, 0] indicates IDs of labeled nodes
-    # Build base matrix
-    B = np.zeros((n_samples, n_classes))
-    B[labels[:, 0], labels[:, 1]] = 1
-
-    for _ in range(max_iter):
-        F = (P @ F) + B
-
-    return label_dict[np.argmax(F, axis=1)].tolist()
diff --git a/networkx/algorithms/node_classification/lgc.py b/networkx/algorithms/node_classification/lgc.py
deleted file mode 100644
index 5324470..0000000
--- a/networkx/algorithms/node_classification/lgc.py
+++ /dev/null
@@ -1,89 +0,0 @@
-"""Function for computing Local and global consistency algorithm by Zhou et al.
-
-References
-----------
-Zhou, D., Bousquet, O., Lal, T. N., Weston, J., & Schölkopf, B. (2004).
-Learning with local and global consistency.
-Advances in neural information processing systems, 16(16), 321-328.
-"""
-import networkx as nx
-from networkx.algorithms.node_classification.utils import _get_label_info
-from networkx.utils.decorators import not_implemented_for
-
-__all__ = ["local_and_global_consistency"]
-
-
-@not_implemented_for("directed")
-def local_and_global_consistency(G, alpha=0.99, max_iter=30, label_name="label"):
-    """Node classification by Local and Global Consistency
-
-    Parameters
-    ----------
-    G : NetworkX Graph
-    alpha : float
-        Clamping factor
-    max_iter : int
-        Maximum number of iterations allowed
-    label_name : string
-        Name of target labels to predict
-
-    Returns
-    -------
-    predicted : list
-        List of length ``len(G)`` with the predicted labels for each node.
-
-    Raises
-    ------
-    NetworkXError
-        If no nodes in `G` have attribute `label_name`.
-
-    Examples
-    --------
-    >>> from networkx.algorithms import node_classification
-    >>> G = nx.path_graph(4)
-    >>> G.nodes[0]["label"] = "A"
-    >>> G.nodes[3]["label"] = "B"
-    >>> G.nodes(data=True)
-    NodeDataView({0: {'label': 'A'}, 1: {}, 2: {}, 3: {'label': 'B'}})
-    >>> G.edges()
-    EdgeView([(0, 1), (1, 2), (2, 3)])
-    >>> predicted = node_classification.local_and_global_consistency(G)
-    >>> predicted
-    ['A', 'A', 'B', 'B']
-
-    References
-    ----------
-    Zhou, D., Bousquet, O., Lal, T. N., Weston, J., & Schölkopf, B. (2004).
-    Learning with local and global consistency.
-    Advances in neural information processing systems, 16(16), 321-328.
-    """
-    import numpy as np
-    import scipy as sp
-    import scipy.sparse  # call as sp.sparse
-
-    X = nx.to_scipy_sparse_array(G)  # adjacency matrix
-    labels, label_dict = _get_label_info(G, label_name)
-
-    if labels.shape[0] == 0:
-        raise nx.NetworkXError(
-            f"No node on the input graph is labeled by '{label_name}'."
-        )
-
-    n_samples = X.shape[0]
-    n_classes = label_dict.shape[0]
-    F = np.zeros((n_samples, n_classes))
-
-    # Build propagation matrix
-    degrees = X.sum(axis=0)
-    degrees[degrees == 0] = 1  # Avoid division by 0
-    # TODO: csr_array
-    D2 = np.sqrt(sp.sparse.csr_array(sp.sparse.diags((1.0 / degrees), offsets=0)))
-    P = alpha * ((D2 @ X) @ D2)
-    # Build base matrix
-    B = np.zeros((n_samples, n_classes))
-    B[labels[:, 0], labels[:, 1]] = 1 - alpha
-
-    for _ in range(max_iter):
-        F = (P @ F) + B
-
-    return label_dict[np.argmax(F, axis=1)].tolist()
diff --git a/networkx/algorithms/node_classification/utils.py b/networkx/algorithms/node_classification/utils.py
deleted file mode 100644
index f7d7ac2..0000000
--- a/networkx/algorithms/node_classification/utils.py
+++ /dev/null
@@ -1,34 +0,0 @@
-def _get_label_info(G, label_name):
-    """Get and return information of labels from the input graph
-
-    Parameters
-    ----------
-    G : Network X graph
-    label_name : string
-        Name of the target label
-
-    Returns
-    ----------
-    labels : numpy array, shape = [n_labeled_samples, 2]
-        Array of pairs of labeled node ID and label ID
-    label_dict : numpy array, shape = [n_classes]
-        Array of labels
-        i-th element contains the label corresponding label ID `i`
-    """
-    import numpy as np
-
-    labels = []
-    label_to_id = {}
-    lid = 0
-    for i, n in enumerate(G.nodes(data=True)):
-        if label_name in n[1]:
-            label = n[1][label_name]
-            if label not in label_to_id:
-                label_to_id[label] = lid
-                lid += 1
-            labels.append([i, label_to_id[label]])
-    labels = np.array(labels)
-    label_dict = np.array(
-        [label for label, _ in sorted(label_to_id.items(), key=lambda x: x[1])]
-    )
-    return (labels, label_dict)
diff --git a/networkx/algorithms/operators/all.py b/networkx/algorithms/operators/all.py
index 7d7c19c..2dd4643 100644
--- a/networkx/algorithms/operators/all.py
+++ b/networkx/algorithms/operators/all.py
@@ -1,26 +1,27 @@
 """Operations on many graphs.
 """
-from itertools import zip_longest
+from itertools import chain, repeat
 
 import networkx as nx
 
 __all__ = ["union_all", "compose_all", "disjoint_union_all", "intersection_all"]
 
 
-def union_all(graphs, rename=(None,)):
+def union_all(graphs, rename=()):
     """Returns the union of all graphs.
 
     The graphs must be disjoint, otherwise an exception is raised.
 
     Parameters
     ----------
-    graphs : list of graphs
-       List of NetworkX graphs
+    graphs : iterable
+       Iterable of NetworkX graphs
 
-    rename : bool , default=(None, None)
-       Node names of G and H can be changed by specifying the tuple
+    rename : iterable , optional
+       Node names of graphs can be changed by specifying the tuple
        rename=('G-','H-') (for example).  Node "u" in G is then renamed
-       "G-u" and "v" in H is renamed "H-v".
+       "G-u" and "v" in H is renamed "H-v". Infinite generators (like itertools.count)
+       are also supported.
 
     Returns
     -------
@@ -45,16 +46,8 @@ def union_all(graphs, rename=(None,)):
     union
     disjoint_union_all
     """
-    # collect the graphs in case an iterator was passed
-    graphs = list(graphs)
-
-    if not graphs:
-        raise ValueError("cannot apply union_all to an empty list")
-
-    U = graphs[0]
-
-    if any(G.is_multigraph() != U.is_multigraph() for G in graphs):
-        raise nx.NetworkXError("All graphs must be graphs or multigraphs.")
+    R = None
+    seen_nodes = set()
 
     # rename graph to obtain disjoint node labels
     def add_prefix(graph, prefix):
@@ -62,41 +55,37 @@ def union_all(graphs, rename=(None,)):
             return graph
 
         def label(x):
-            if isinstance(x, str):
-                name = prefix + x
-            else:
-                name = prefix + repr(x)
-            return name
+            return f"{prefix}{x}"
 
         return nx.relabel_nodes(graph, label)
 
-    graphs = [add_prefix(G, name) for G, name in zip_longest(graphs, rename)]
-
-    if sum(len(G) for G in graphs) != len(set().union(*graphs)):
-        raise nx.NetworkXError(
-            "The node sets of the graphs are not disjoint.",
-            "Use appropriate rename"
-            "=(G1prefix,G2prefix,...,GNprefix)"
-            "or use disjoint_union(G1,G2,...,GN).",
-        )
-
-    # Union is the same type as first graph
-    R = U.__class__()
-
-    # add graph attributes, later attributes take precedent over earlier ones
-    for G in graphs:
+    rename = chain(rename, repeat(None))
+    graphs = (add_prefix(G, name) for G, name in zip(graphs, rename))
+
+    for i, G in enumerate(graphs):
+        G_nodes_set = set(G.nodes)
+        if i == 0:
+            # Union is the same type as first graph
+            R = G.__class__()
+        elif G.is_multigraph() != R.is_multigraph():
+            raise nx.NetworkXError("All graphs must be graphs or multigraphs.")
+        elif not seen_nodes.isdisjoint(G_nodes_set):
+            raise nx.NetworkXError(
+                "The node sets of the graphs are not disjoint.",
+                "Use appropriate rename"
+                "=(G1prefix,G2prefix,...,GNprefix)"
+                "or use disjoint_union(G1,G2,...,GN).",
+            )
+
+        seen_nodes |= G_nodes_set
         R.graph.update(G.graph)
-
-    # add nodes and attributes
-    for G in graphs:
         R.add_nodes_from(G.nodes(data=True))
+        R.add_edges_from(
+            G.edges(keys=True, data=True) if G.is_multigraph() else G.edges(data=True)
+        )
 
-    if U.is_multigraph():
-        for G in graphs:
-            R.add_edges_from(G.edges(keys=True, data=True))
-    else:
-        for G in graphs:
-            R.add_edges_from(G.edges(data=True))
+    if R is None:
+        raise ValueError("cannot apply union_all to an empty list")
 
     return R
 
@@ -109,8 +98,8 @@ def disjoint_union_all(graphs):
 
     Parameters
     ----------
-    graphs : list
-       List of NetworkX graphs
+    graphs : iterable
+       Iterable of NetworkX graphs
 
     Returns
     -------
@@ -129,22 +118,15 @@ def disjoint_union_all(graphs):
     If a graph attribute is present in multiple graphs, then the value
     from the last graph in the list with that attribute is used.
     """
-    graphs = list(graphs)
 
-    if not graphs:
-        raise ValueError("cannot apply disjoint_union_all to an empty list")
+    def yield_relabeled(graphs):
+        first_label = 0
+        for G in graphs:
+            yield nx.convert_node_labels_to_integers(G, first_label=first_label)
+            first_label += len(G)
 
-    first_labels = [0]
-    for G in graphs[:-1]:
-        first_labels.append(len(G) + first_labels[-1])
+    R = union_all(yield_relabeled(graphs))
 
-    relabeled = [
-        nx.convert_node_labels_to_integers(G, first_label=first_label)
-        for G, first_label in zip(graphs, first_labels)
-    ]
-    R = union_all(relabeled)
-    for G in graphs:
-        R.graph.update(G.graph)
     return R
 
 
@@ -156,8 +138,8 @@ def compose_all(graphs):
 
     Parameters
     ----------
-    graphs : list
-       List of NetworkX graphs
+    graphs : iterable
+       Iterable of NetworkX graphs
 
     Returns
     -------
@@ -177,30 +159,25 @@ def compose_all(graphs):
     If a graph attribute is present in multiple graphs, then the value
     from the last graph in the list with that attribute is used.
     """
-    graphs = list(graphs)
-
-    if not graphs:
-        raise ValueError("cannot apply compose_all to an empty list")
-
-    U = graphs[0]
+    R = None
 
-    if any(G.is_multigraph() != U.is_multigraph() for G in graphs):
-        raise nx.NetworkXError("All graphs must be graphs or multigraphs.")
-
-    R = U.__class__()
     # add graph attributes, H attributes take precedent over G attributes
-    for G in graphs:
-        R.graph.update(G.graph)
+    for i, G in enumerate(graphs):
+        if i == 0:
+            # create new graph
+            R = G.__class__()
+        elif G.is_multigraph() != R.is_multigraph():
+            raise nx.NetworkXError("All graphs must be graphs or multigraphs.")
 
-    for G in graphs:
+        R.graph.update(G.graph)
         R.add_nodes_from(G.nodes(data=True))
+        R.add_edges_from(
+            G.edges(keys=True, data=True) if G.is_multigraph() else G.edges(data=True)
+        )
+
+    if R is None:
+        raise ValueError("cannot apply compose_all to an empty list")
 
-    if U.is_multigraph():
-        for G in graphs:
-            R.add_edges_from(G.edges(keys=True, data=True))
-    else:
-        for G in graphs:
-            R.add_edges_from(G.edges(data=True))
     return R
 
 
@@ -210,8 +187,8 @@ def intersection_all(graphs):
 
     Parameters
     ----------
-    graphs : list
-       List of NetworkX graphs
+    graphs : iterable
+       Iterable of NetworkX graphs
 
     Returns
     -------
@@ -227,27 +204,28 @@ def intersection_all(graphs):
     Attributes from the graph, nodes, and edges are not copied to the new
     graph.
     """
-    graphs = list(graphs)
-
-    if not graphs:
-        raise ValueError("cannot apply intersection_all to an empty list")
+    R = None
+
+    for i, G in enumerate(graphs):
+        G_nodes_set = set(G.nodes)
+        G_edges_set = set(G.edges(keys=True) if G.is_multigraph() else G.edges())
+        if i == 0:
+            # create new graph
+            R = G.__class__()
+            node_intersection = G_nodes_set
+            edge_intersection = G_edges_set
+        elif G.is_multigraph() != R.is_multigraph():
+            raise nx.NetworkXError("All graphs must be graphs or multigraphs.")
+        else:
+            node_intersection &= G_nodes_set
+            edge_intersection &= G_edges_set
 
-    U = graphs[0]
+        R.graph.update(G.graph)
 
-    if any(G.is_multigraph() != U.is_multigraph() for G in graphs):
-        raise nx.NetworkXError("All graphs must be graphs or multigraphs.")
+    if R is None:
+        raise ValueError("cannot apply intersection_all to an empty list")
 
-    # create new graph
-    node_intersection = set.intersection(*[set(G.nodes) for G in graphs])
-    R = U.__class__()
     R.add_nodes_from(node_intersection)
-
-    if U.is_multigraph():
-        edge_sets = [set(G.edges(keys=True)) for G in graphs]
-    else:
-        edge_sets = [set(G.edges()) for G in graphs]
-
-    edge_intersection = set.intersection(*edge_sets)
     R.add_edges_from(edge_intersection)
 
     return R
diff --git a/networkx/algorithms/operators/binary.py b/networkx/algorithms/operators/binary.py
index 7ea3d77..09f59d1 100644
--- a/networkx/algorithms/operators/binary.py
+++ b/networkx/algorithms/operators/binary.py
@@ -14,7 +14,7 @@ __all__ = [
 ]
 
 
-def union(G, H, rename=(None, None), name=None):
+def union(G, H, rename=()):
     """Combine graphs G and H. The names of nodes must be unique.
 
     A name collision between the graphs will raise an exception.
@@ -27,17 +27,11 @@ def union(G, H, rename=(None, None), name=None):
     G, H : graph
        A NetworkX graph
 
-    rename : tuple , default=(None, None)
+    rename : iterable , optional
        Node names of G and H can be changed by specifying the tuple
        rename=('G-','H-') (for example).  Node "u" in G is then renamed
        "G-u" and "v" in H is renamed "H-v".
 
-    name : string
-       Specify the name for the union graph
-
-       .. deprecated:: 2.7
-           This is deprecated and will be removed in version v3.0.
-
     Returns
     -------
     U : A union graph with the same type as G.
@@ -72,15 +66,6 @@ def union(G, H, rename=(None, None), name=None):
 
 
     """
-    if name is not None:
-        import warnings
-
-        warnings.warn(
-            "name parameter is deprecated and will be removed in version 3.0",
-            DeprecationWarning,
-            stacklevel=2,
-        )
-
     return nx.union_all([G, H], rename)
 
 
@@ -433,11 +418,7 @@ def full_join(G, H, rename=(None, None)):
             return graph
 
         def label(x):
-            if isinstance(x, str):
-                name = prefix + x
-            else:
-                name = prefix + repr(x)
-            return name
+            return f"{prefix}{x}"
 
         return nx.relabel_nodes(graph, label)
 
diff --git a/networkx/algorithms/operators/product.py b/networkx/algorithms/operators/product.py
index 4c56bbe..e89300f 100644
--- a/networkx/algorithms/operators/product.py
+++ b/networkx/algorithms/operators/product.py
@@ -13,6 +13,7 @@ __all__ = [
     "strong_product",
     "power",
     "rooted_product",
+    "corona_product",
 ]
 
 
@@ -20,7 +21,7 @@ def _dict_product(d1, d2):
     return {k: (d1.get(k), d2.get(k)) for k in set(d1) | set(d2)}
 
 
-# Generators for producting graph products
+# Generators for producing graph products
 def _node_product(G, H):
     for u, v in product(G, H):
         yield ((u, v), _dict_product(G.nodes[u], H.nodes[v]))
@@ -109,7 +110,7 @@ def _edges_cross_nodes_and_nodes(G, H):
 
 
 def _init_product_graph(G, H):
-    if not G.is_directed() == H.is_directed():
+    if G.is_directed() != H.is_directed():
         msg = "G and H must be both directed or both undirected"
         raise nx.NetworkXError(msg)
     if G.is_multigraph() or H.is_multigraph():
@@ -459,3 +460,67 @@ def rooted_product(G, H, root):
     R.add_edges_from(((g, e[0]), (g, e[1])) for g in G for e in H.edges())
 
     return R
+
+
+@not_implemented_for("directed")
+@not_implemented_for("multigraph")
+def corona_product(G, H):
+    r"""Returns the Corona product of G and H.
+
+    The corona product of $G$ and $H$ is the graph $C = G \circ H$ obtained by
+    taking one copy of $G$, called the center graph, $|V(G)|$ copies of $H$,
+    called the outer graph, and making the $i$-th vertex of $G$ adjacent to
+    every vertex of the $i$-th copy of $H$, where $1 ≤ i ≤ |V(G)|$.
+
+    Parameters
+    ----------
+    G, H: NetworkX graphs
+        The graphs to take the carona product of.
+        `G` is the center graph and `H` is the outer graph
+
+    Returns
+    -------
+    C: NetworkX graph
+        The Corona product of G and H.
+
+    Raises
+    ------
+    NetworkXError
+        If G and H are not both directed or both undirected.
+
+    Examples
+    --------
+    >>> G = nx.cycle_graph(4)
+    >>> H = nx.path_graph(2)
+    >>> C = nx.corona_product(G, H)
+    >>> list(C)
+    [0, 1, 2, 3, (0, 0), (0, 1), (1, 0), (1, 1), (2, 0), (2, 1), (3, 0), (3, 1)]
+    >>> print(C)
+    Graph with 12 nodes and 16 edges
+
+    References
+    ----------
+    [1] M. Tavakoli, F. Rahbarnia, and A. R. Ashrafi,
+        "Studying the corona product of graphs under some graph invariants,"
+        Transactions on Combinatorics, vol. 3, no. 3, pp. 43–49, Sep. 2014,
+        doi: 10.22108/toc.2014.5542.
+    [2] A. Faraji, "Corona Product in Graph Theory," Ali Faraji, May 11, 2021.
+        https://blog.alifaraji.ir/math/graph-theory/corona-product.html (accessed Dec. 07, 2021).
+    """
+    GH = _init_product_graph(G, H)
+    GH.add_nodes_from(G)
+    GH.add_edges_from(G.edges)
+
+    for G_node in G:
+        # copy nodes of H in GH, call it H_i
+        GH.add_nodes_from((G_node, v) for v in H)
+
+        # copy edges of H_i based on H
+        GH.add_edges_from(
+            ((G_node, e0), (G_node, e1), d) for e0, e1, d in H.edges.data()
+        )
+
+        # creating new edges between H_i and a G's node
+        GH.add_edges_from((G_node, (G_node, H_node)) for H_node in H)
+
+    return GH
diff --git a/networkx/algorithms/operators/tests/test_all.py b/networkx/algorithms/operators/tests/test_all.py
index e09791b..2454851 100644
--- a/networkx/algorithms/operators/tests/test_all.py
+++ b/networkx/algorithms/operators/tests/test_all.py
@@ -28,13 +28,13 @@ def test_union_all_attributes():
         assert ghj.nodes[n] == eval(graph).nodes[int(node)]
 
     assert ghj.graph["attr"] == "attr"
-    assert ghj.graph["name"] == "j"  # j graph attributes take precendent
+    assert ghj.graph["name"] == "j"  # j graph attributes take precedent
 
 
 def test_intersection_all():
     G = nx.Graph()
     H = nx.Graph()
-    R = nx.Graph()
+    R = nx.Graph(awesome=True)
     G.add_nodes_from([1, 2, 3, 4])
     G.add_edge(1, 2)
     G.add_edge(2, 3)
@@ -47,6 +47,7 @@ def test_intersection_all():
     I = nx.intersection_all([G, H, R])
     assert set(I.nodes()) == {1, 2, 3, 4}
     assert sorted(I.edges()) == [(2, 3)]
+    assert I.graph["awesome"]
 
 
 def test_intersection_all_different_node_sets():
@@ -238,9 +239,10 @@ def test_union_all_multigraph():
 
 
 def test_input_output():
-    l = [nx.Graph([(1, 2)]), nx.Graph([(3, 4)])]
+    l = [nx.Graph([(1, 2)]), nx.Graph([(3, 4)], awesome=True)]
     U = nx.disjoint_union_all(l)
     assert len(l) == 2
+    assert U.graph["awesome"]
     C = nx.compose_all(l)
     assert len(l) == 2
     l = [nx.Graph([(1, 2)]), nx.Graph([(1, 2)])]
diff --git a/networkx/algorithms/operators/tests/test_binary.py b/networkx/algorithms/operators/tests/test_binary.py
index f11e159..b4e64f8 100644
--- a/networkx/algorithms/operators/tests/test_binary.py
+++ b/networkx/algorithms/operators/tests/test_binary.py
@@ -23,7 +23,7 @@ def test_union_attributes():
         assert gh.nodes[n] == eval(graph).nodes[int(node)]
 
     assert gh.graph["attr"] == "attr"
-    assert gh.graph["name"] == "h"  # h graph attributes take precendent
+    assert gh.graph["name"] == "h"  # h graph attributes take precedent
 
 
 def test_intersection():
diff --git a/networkx/algorithms/operators/tests/test_product.py b/networkx/algorithms/operators/tests/test_product.py
index fb97756..50bc7b7 100644
--- a/networkx/algorithms/operators/tests/test_product.py
+++ b/networkx/algorithms/operators/tests/test_product.py
@@ -90,8 +90,8 @@ def test_tensor_product_random():
     H = nx.erdos_renyi_graph(10, 2 / 10.0)
     GH = nx.tensor_product(G, H)
 
-    for (u_G, u_H) in GH.nodes():
-        for (v_G, v_H) in GH.nodes():
+    for u_G, u_H in GH.nodes():
+        for v_G, v_H in GH.nodes():
             if H.has_edge(u_H, v_H) and G.has_edge(u_G, v_G):
                 assert GH.has_edge((u_G, u_H), (v_G, v_H))
             else:
@@ -196,8 +196,8 @@ def test_cartesian_product_random():
     H = nx.erdos_renyi_graph(10, 2 / 10.0)
     GH = nx.cartesian_product(G, H)
 
-    for (u_G, u_H) in GH.nodes():
-        for (v_G, v_H) in GH.nodes():
+    for u_G, u_H in GH.nodes():
+        for v_G, v_H in GH.nodes():
             if (u_G == v_G and H.has_edge(u_H, v_H)) or (
                 u_H == v_H and G.has_edge(u_G, v_G)
             ):
@@ -274,8 +274,8 @@ def test_lexicographic_product_random():
     H = nx.erdos_renyi_graph(10, 2 / 10.0)
     GH = nx.lexicographic_product(G, H)
 
-    for (u_G, u_H) in GH.nodes():
-        for (v_G, v_H) in GH.nodes():
+    for u_G, u_H in GH.nodes():
+        for v_G, v_H in GH.nodes():
             if G.has_edge(u_G, v_G) or (u_G == v_G and H.has_edge(u_H, v_H)):
                 assert GH.has_edge((u_G, u_H), (v_G, v_H))
             else:
@@ -350,8 +350,8 @@ def test_strong_product_random():
     H = nx.erdos_renyi_graph(10, 2 / 10.0)
     GH = nx.strong_product(G, H)
 
-    for (u_G, u_H) in GH.nodes():
-        for (v_G, v_H) in GH.nodes():
+    for u_G, u_H in GH.nodes():
+        for v_G, v_H in GH.nodes():
             if (
                 (u_G == v_G and H.has_edge(u_H, v_H))
                 or (u_H == v_H and G.has_edge(u_G, v_G))
@@ -425,3 +425,11 @@ def test_rooted_product():
     R = nx.rooted_product(G, H, "a")
     assert len(R) == len(G) * len(H)
     assert R.size() == G.size() + len(G) * H.size()
+
+
+def test_corona_product():
+    G = nx.cycle_graph(3)
+    H = nx.path_graph(2)
+    C = nx.corona_product(G, H)
+    assert len(C) == (len(G) * len(H)) + len(G)
+    assert C.size() == G.size() + len(G) * H.size() + len(G) * len(H)
diff --git a/networkx/algorithms/planar_drawing.py b/networkx/algorithms/planar_drawing.py
index acbef23..47f94f1 100644
--- a/networkx/algorithms/planar_drawing.py
+++ b/networkx/algorithms/planar_drawing.py
@@ -108,7 +108,7 @@ def combinatorial_embedding_to_pos(embedding, fully_triangulate=False):
             left_t_child[vk] = None
 
     # 2. Phase: Set absolute positions
-    pos = dict()
+    pos = {}
     pos[v1] = (0, y_coordinate[v1])
     remaining_nodes = [v1]
     while remaining_nodes:
@@ -213,7 +213,7 @@ def get_canonical_ordering(embedding, outer_face):
         return outer_face_ccw_nbr[x] == y or outer_face_cw_nbr[x] == y
 
     def is_on_outer_face(x):
-        return x not in marked_nodes and (x in outer_face_ccw_nbr.keys() or x == v1)
+        return x not in marked_nodes and (x in outer_face_ccw_nbr or x == v1)
 
     # Initialize number of chords
     for v in outer_face:
@@ -223,7 +223,7 @@ def get_canonical_ordering(embedding, outer_face):
                 ready_to_pick.discard(v)
 
     # Initialize canonical_ordering
-    canonical_ordering = [None] * len(embedding.nodes())  # type: list
+    canonical_ordering = [None] * len(embedding.nodes())
     canonical_ordering[0] = (v1, [])
     canonical_ordering[1] = (v2, [])
     ready_to_pick.discard(v1)
@@ -316,7 +316,7 @@ def triangulate_face(embedding, v1, v2):
     """
     _, v3 = embedding.next_face_half_edge(v1, v2)
     _, v4 = embedding.next_face_half_edge(v2, v3)
-    if v1 == v2 or v1 == v3:
+    if v1 in (v2, v3):
         # The component has less than 3 nodes
         return
     while v1 != v4:
diff --git a/networkx/algorithms/planarity.py b/networkx/algorithms/planarity.py
index bcde0f3..9a5e4af 100644
--- a/networkx/algorithms/planarity.py
+++ b/networkx/algorithms/planarity.py
@@ -867,7 +867,7 @@ class PlanarEmbedding(nx.DiGraph):
         set_data
 
         """
-        embedding = dict()
+        embedding = {}
         for v in self:
             embedding[v] = list(self.neighbors_cw_order(v))
         return embedding
diff --git a/networkx/algorithms/polynomials.py b/networkx/algorithms/polynomials.py
index 35c0166..27dc580 100644
--- a/networkx/algorithms/polynomials.py
+++ b/networkx/algorithms/polynomials.py
@@ -5,6 +5,19 @@ variety of structural information. Examples include the Tutte polynomial,
 chromatic polynomial, characteristic polynomial, and matching polynomial. An
 extensive treatment is provided in [1]_.
 
+For a simple example, the `~sympy.matrices.matrices.MatrixDeterminant.charpoly`
+method can be used to compute the characteristic polynomial from the adjacency
+matrix of a graph. Consider the complete graph ``K_4``:
+
+>>> import sympy
+>>> x = sympy.Symbol("x")
+>>> G = nx.complete_graph(4)
+>>> A = nx.adjacency_matrix(G)
+>>> M = sympy.SparseMatrix(A.todense())
+>>> M.charpoly(x).as_expr()
+x**4 - 6*x**2 - 8*x - 3
+
+
 .. [1] Y. Shi, M. Dehmer, X. Li, I. Gutman,
    "Graph Polynomials"
 """
diff --git a/networkx/algorithms/reciprocity.py b/networkx/algorithms/reciprocity.py
index 1b7761b..d58b607 100644
--- a/networkx/algorithms/reciprocity.py
+++ b/networkx/algorithms/reciprocity.py
@@ -1,4 +1,5 @@
 """Algorithms to calculate reciprocity in a directed graph."""
+import networkx as nx
 from networkx import NetworkXError
 
 from ..utils import not_implemented_for
@@ -6,6 +7,7 @@ from ..utils import not_implemented_for
 __all__ = ["reciprocity", "overall_reciprocity"]
 
 
+@nx._dispatch
 @not_implemented_for("undirected", "multigraph")
 def reciprocity(G, nodes=None):
     r"""Compute the reciprocity in a directed graph.
@@ -73,6 +75,7 @@ def _reciprocity_iter(G, nodes):
             yield (node, reciprocity)
 
 
+@nx._dispatch
 @not_implemented_for("undirected", "multigraph")
 def overall_reciprocity(G):
     """Compute the reciprocity for the whole graph.
diff --git a/networkx/algorithms/regular.py b/networkx/algorithms/regular.py
index 3f76d40..da6e4b5 100644
--- a/networkx/algorithms/regular.py
+++ b/networkx/algorithms/regular.py
@@ -5,6 +5,7 @@ from networkx.utils import not_implemented_for
 __all__ = ["is_regular", "is_k_regular", "k_factor"]
 
 
+@nx._dispatch
 def is_regular(G):
     """Determines whether the graph ``G`` is a regular graph.
 
@@ -21,6 +22,12 @@ def is_regular(G):
     bool
         Whether the given graph or digraph is regular.
 
+    Examples
+    --------
+    >>> G = nx.DiGraph([(1, 2), (2, 3), (3, 4), (4, 1)])
+    >>> nx.is_regular(G)
+    True
+
     """
     n1 = nx.utils.arbitrary_element(G)
     if not G.is_directed():
@@ -34,6 +41,7 @@ def is_regular(G):
         return in_regular and out_regular
 
 
+@nx._dispatch
 @not_implemented_for("directed")
 def is_k_regular(G, k):
     """Determines whether the graph ``G`` is a k-regular graph.
@@ -49,6 +57,12 @@ def is_k_regular(G, k):
     bool
         Whether the given graph is k-regular.
 
+    Examples
+    --------
+    >>> G = nx.Graph([(1, 2), (2, 3), (3, 4), (4, 1)])
+    >>> nx.is_k_regular(G, k=3)
+    False
+
     """
     return all(d == k for n, d in G.degree)
 
@@ -78,6 +92,13 @@ def k_factor(G, k, matching_weight="weight"):
     G2 : NetworkX graph
         A k-factor of G
 
+    Examples
+    --------
+    >>> G = nx.Graph([(1, 2), (2, 3), (3, 4), (4, 1)])
+    >>> G2 = nx.k_factor(G, k=1)
+    >>> G2.edges()
+    EdgeView([(1, 2), (3, 4)])
+
     References
     ----------
     .. [1] "An algorithm for computing simple k-factors.",
@@ -101,7 +122,7 @@ def k_factor(G, k, matching_weight="weight"):
             adj_view = self.g[self.original]
             neighbors = list(adj_view.keys())
             edge_attrs = list(adj_view.values())
-            for (outer, neighbor, edge_attrs) in zip(
+            for outer, neighbor, edge_attrs in zip(
                 self.outer_vertices, neighbors, edge_attrs
             ):
                 self.g.add_edge(outer, neighbor, **edge_attrs)
@@ -134,7 +155,7 @@ def k_factor(G, k, matching_weight="weight"):
 
         def replace_node(self):
             adj_view = self.g[self.original]
-            for (outer, inner, (neighbor, edge_attrs)) in zip(
+            for outer, inner, (neighbor, edge_attrs) in zip(
                 self.outer_vertices, self.inner_vertices, list(adj_view.items())
             ):
                 self.g.add_edge(outer, inner)
diff --git a/networkx/algorithms/shortest_paths/astar.py b/networkx/algorithms/shortest_paths/astar.py
index 5d5a847..6f0e442 100644
--- a/networkx/algorithms/shortest_paths/astar.py
+++ b/networkx/algorithms/shortest_paths/astar.py
@@ -46,7 +46,7 @@ def astar_path(G, source, target, heuristic=None, weight="weight"):
        returned by the function. The function must accept exactly three
        positional arguments: the two endpoints of an edge and the
        dictionary of edge attributes for that edge. The function must
-       return a number.
+       return a number or None to indicate a hidden edge.
 
     Raises
     ------
@@ -67,6 +67,14 @@ def astar_path(G, source, target, heuristic=None, weight="weight"):
     >>> print(nx.astar_path(G, (0, 0), (2, 2), heuristic=dist, weight="cost"))
     [(0, 0), (0, 1), (0, 2), (1, 2), (2, 2)]
 
+    Notes
+    -----
+    Edge weight attributes must be numerical.
+    Distances are calculated as sums of weighted edges traversed.
+
+    The weight function can be used to hide edges by returning None.
+    So ``weight = lambda u, v, d: 1 if d['color']=="red" else None``
+    will find the shortest red path.
 
     See Also
     --------
@@ -127,7 +135,10 @@ def astar_path(G, source, target, heuristic=None, weight="weight"):
         explored[curnode] = parent
 
         for neighbor, w in G[curnode].items():
-            ncost = dist + weight(curnode, neighbor, w)
+            cost = weight(curnode, neighbor, w)
+            if cost is None:
+                continue
+            ncost = dist + cost
             if neighbor in enqueued:
                 qcost, h = enqueued[neighbor]
                 # if qcost <= ncost, a less costly path from the
@@ -179,7 +190,7 @@ def astar_path_length(G, source, target, heuristic=None, weight="weight"):
        returned by the function. The function must accept exactly three
        positional arguments: the two endpoints of an edge and the
        dictionary of edge attributes for that edge. The function must
-       return a number.
+       return a number or None to indicate a hidden edge.
     Raises
     ------
     NetworkXNoPath
diff --git a/networkx/algorithms/shortest_paths/dense.py b/networkx/algorithms/shortest_paths/dense.py
index 8965171..f578796 100644
--- a/networkx/algorithms/shortest_paths/dense.py
+++ b/networkx/algorithms/shortest_paths/dense.py
@@ -10,6 +10,7 @@ __all__ = [
 ]
 
 
+@nx._dispatch
 def floyd_warshall_numpy(G, nodelist=None, weight="weight"):
     """Find all-pairs shortest path lengths using Floyd's algorithm.
 
@@ -73,6 +74,7 @@ def floyd_warshall_numpy(G, nodelist=None, weight="weight"):
     return A
 
 
+@nx._dispatch
 def floyd_warshall_predecessor_and_distance(G, weight="weight"):
     """Find all-pairs shortest path lengths using Floyd's algorithm.
 
@@ -198,6 +200,7 @@ def reconstruct_path(source, target, predecessors):
     return list(reversed(path))
 
 
+@nx._dispatch
 def floyd_warshall(G, weight="weight"):
     """Find all-pairs shortest path lengths using Floyd's algorithm.
 
diff --git a/networkx/algorithms/shortest_paths/generic.py b/networkx/algorithms/shortest_paths/generic.py
index 129f741..ff09fbd 100644
--- a/networkx/algorithms/shortest_paths/generic.py
+++ b/networkx/algorithms/shortest_paths/generic.py
@@ -4,6 +4,7 @@ Compute the shortest paths and path lengths between nodes in the graph.
 These algorithms work with undirected and directed graphs.
 
 """
+import warnings
 
 import networkx as nx
 
@@ -16,6 +17,7 @@ __all__ = [
 ]
 
 
+@nx._dispatch
 def has_path(G, source, target):
     """Returns *True* if *G* has a path from *source* to *target*.
 
@@ -36,6 +38,7 @@ def has_path(G, source, target):
     return True
 
 
+@nx._dispatch
 def shortest_path(G, source=None, target=None, weight=None, method="dijkstra"):
     """Compute shortest paths in the graph.
 
@@ -130,6 +133,9 @@ def shortest_path(G, source=None, target=None, weight=None, method="dijkstra"):
     method = "unweighted" if weight is None else method
     if source is None:
         if target is None:
+            msg = "shortest_path for all_pairs will return an iterator in v3.3"
+            warnings.warn(msg, DeprecationWarning)
+
             # Find paths between all pairs.
             if method == "unweighted":
                 paths = dict(nx.all_pairs_shortest_path(G))
@@ -170,6 +176,7 @@ def shortest_path(G, source=None, target=None, weight=None, method="dijkstra"):
     return paths
 
 
+@nx._dispatch
 def shortest_path_length(G, source=None, target=None, weight=None, method="dijkstra"):
     """Compute shortest path lengths in the graph.
 
@@ -320,12 +327,16 @@ def average_shortest_path_length(G, weight=None, method=None):
 
     .. math::
 
-       a =\sum_{s,t \in V} \frac{d(s, t)}{n(n-1)}
+       a =\sum_{\substack{s,t \in V \\ s\neq t}} \frac{d(s, t)}{n(n-1)}
 
     where `V` is the set of nodes in `G`,
     `d(s, t)` is the shortest path from `s` to `t`,
     and `n` is the number of nodes in `G`.
 
+    .. versionchanged:: 3.0
+       An exception is raised for directed graphs that are not strongly
+       connected.
+
     Parameters
     ----------
     G : NetworkX graph
@@ -354,7 +365,7 @@ def average_shortest_path_length(G, weight=None, method=None):
         If `G` is the null graph (that is, the graph on zero nodes).
 
     NetworkXError
-        If `G` is not connected (or not weakly connected, in the case
+        If `G` is not connected (or not strongly connected, in the case
         of a directed graph).
 
     ValueError
@@ -397,9 +408,10 @@ def average_shortest_path_length(G, weight=None, method=None):
     # For the special case of the trivial graph, return zero immediately.
     if n == 1:
         return 0
-    # Shortest path length is undefined if the graph is disconnected.
-    if G.is_directed() and not nx.is_weakly_connected(G):
-        raise nx.NetworkXError("Graph is not weakly connected.")
+    # Shortest path length is undefined if the graph is not strongly connected.
+    if G.is_directed() and not nx.is_strongly_connected(G):
+        raise nx.NetworkXError("Graph is not strongly connected.")
+    # Shortest path length is undefined if the graph is not connected.
     if not G.is_directed() and not nx.is_connected(G):
         raise nx.NetworkXError("Graph is not connected.")
 
diff --git a/networkx/algorithms/shortest_paths/tests/test_astar.py b/networkx/algorithms/shortest_paths/tests/test_astar.py
index e622502..680f76e 100644
--- a/networkx/algorithms/shortest_paths/tests/test_astar.py
+++ b/networkx/algorithms/shortest_paths/tests/test_astar.py
@@ -44,6 +44,25 @@ class TestAStar:
         assert nx.astar_path(self.XG, "s", "v") == ["s", "x", "u", "v"]
         assert nx.astar_path_length(self.XG, "s", "v") == 9
 
+    def test_astar_directed_weight_function(self):
+        w1 = lambda u, v, d: d["weight"]
+        assert nx.astar_path(self.XG, "x", "u", weight=w1) == ["x", "u"]
+        assert nx.astar_path_length(self.XG, "x", "u", weight=w1) == 3
+        assert nx.astar_path(self.XG, "s", "v", weight=w1) == ["s", "x", "u", "v"]
+        assert nx.astar_path_length(self.XG, "s", "v", weight=w1) == 9
+
+        w2 = lambda u, v, d: None if (u, v) == ("x", "u") else d["weight"]
+        assert nx.astar_path(self.XG, "x", "u", weight=w2) == ["x", "y", "s", "u"]
+        assert nx.astar_path_length(self.XG, "x", "u", weight=w2) == 19
+        assert nx.astar_path(self.XG, "s", "v", weight=w2) == ["s", "x", "v"]
+        assert nx.astar_path_length(self.XG, "s", "v", weight=w2) == 10
+
+        w3 = lambda u, v, d: d["weight"] + 10
+        assert nx.astar_path(self.XG, "x", "u", weight=w3) == ["x", "u"]
+        assert nx.astar_path_length(self.XG, "x", "u", weight=w3) == 13
+        assert nx.astar_path(self.XG, "s", "v", weight=w3) == ["s", "x", "v"]
+        assert nx.astar_path_length(self.XG, "s", "v", weight=w3) == 30
+
     def test_astar_multigraph(self):
         G = nx.MultiDiGraph(self.XG)
         G.add_weighted_edges_from((u, v, 1000) for (u, v) in list(G.edges()))
@@ -175,3 +194,17 @@ class TestAStar:
         G.add_edges_from(pairwise(nodes, cyclic=True))
         path = nx.astar_path(G, nodes[0], nodes[2])
         assert len(path) == 3
+
+    def test_astar_NetworkXNoPath(self):
+        """Tests that exception is raised when there exists no
+        path between source and target"""
+        G = nx.gnp_random_graph(10, 0.2, seed=10)
+        with pytest.raises(nx.NetworkXNoPath):
+            nx.astar_path(G, 4, 9)
+
+    def test_astar_NodeNotFound(self):
+        """Tests that exception is raised when either
+        source or target is not in graph"""
+        G = nx.gnp_random_graph(10, 0.2, seed=10)
+        with pytest.raises(nx.NodeNotFound):
+            nx.astar_path_length(G, 11, 9)
diff --git a/networkx/algorithms/shortest_paths/tests/test_generic.py b/networkx/algorithms/shortest_paths/tests/test_generic.py
index 093fd9c..91b0e30 100644
--- a/networkx/algorithms/shortest_paths/tests/test_generic.py
+++ b/networkx/algorithms/shortest_paths/tests/test_generic.py
@@ -324,13 +324,16 @@ class TestAverageShortestPathLength:
         )
         assert ans == pytest.approx(4, abs=1e-7)
 
-    def test_disconnected(self):
+    def test_directed_not_strongly_connected(self):
+        G = nx.DiGraph([(0, 1)])
+        with pytest.raises(nx.NetworkXError, match="Graph is not strongly connected"):
+            nx.average_shortest_path_length(G)
+
+    def test_undirected_not_connected(self):
         g = nx.Graph()
         g.add_nodes_from(range(3))
         g.add_edge(0, 1)
         pytest.raises(nx.NetworkXError, nx.average_shortest_path_length, g)
-        g = g.to_directed()
-        pytest.raises(nx.NetworkXError, nx.average_shortest_path_length, g)
 
     def test_trivial_graph(self):
         """Tests that the trivial graph has average path length zero,
diff --git a/networkx/algorithms/shortest_paths/tests/test_weighted.py b/networkx/algorithms/shortest_paths/tests/test_weighted.py
index 7d5dae4..d1bfea2 100644
--- a/networkx/algorithms/shortest_paths/tests/test_weighted.py
+++ b/networkx/algorithms/shortest_paths/tests/test_weighted.py
@@ -342,6 +342,10 @@ class TestWeightedPath(WeightedTestBase):
         G.add_edge(9, 10)
         pytest.raises(ValueError, nx.bidirectional_dijkstra, G, 8, 10)
 
+    def test_negative_edge_cycle_empty(self):
+        G = nx.DiGraph()
+        assert not nx.negative_edge_cycle(G)
+
     def test_negative_edge_cycle_custom_weight_key(self):
         d = nx.DiGraph()
         d.add_edge("a", "b", w=-2)
diff --git a/networkx/algorithms/shortest_paths/unweighted.py b/networkx/algorithms/shortest_paths/unweighted.py
index 9d1dff5..7a964f0 100644
--- a/networkx/algorithms/shortest_paths/unweighted.py
+++ b/networkx/algorithms/shortest_paths/unweighted.py
@@ -1,6 +1,8 @@
 """
 Shortest path algorithms for unweighted graphs.
 """
+import warnings
+
 import networkx as nx
 
 __all__ = [
@@ -15,6 +17,7 @@ __all__ = [
 ]
 
 
+@nx._dispatch
 def single_source_shortest_path_length(G, source, cutoff=None):
     """Compute the shortest path lengths from source to all reachable nodes.
 
@@ -55,8 +58,8 @@ def single_source_shortest_path_length(G, source, cutoff=None):
         raise nx.NodeNotFound(f"Source {source} is not in G")
     if cutoff is None:
         cutoff = float("inf")
-    nextlevel = {source: 1}
-    return dict(_single_shortest_path_length(G.adj, nextlevel, cutoff))
+    nextlevel = [source]
+    return dict(_single_shortest_path_length(G._adj, nextlevel, cutoff))
 
 
 def _single_shortest_path_length(adj, firstlevel, cutoff):
@@ -67,32 +70,32 @@ def _single_shortest_path_length(adj, firstlevel, cutoff):
     ----------
         adj : dict
             Adjacency dict or view
-        firstlevel : dict
-            starting nodes, e.g. {source: 1} or {target: 1}
+        firstlevel : list
+            starting nodes, e.g. [source] or [target]
         cutoff : int or float
             level at which we stop the process
     """
-    seen = {}  # level (number of hops) when seen in BFS
-    level = 0  # the current level
-    nextlevel = set(firstlevel)  # set of nodes to check at next level
+    seen = set(firstlevel)
+    nextlevel = firstlevel
+    level = 0
     n = len(adj)
-    while nextlevel and cutoff >= level:
-        thislevel = nextlevel  # advance to next level
-        nextlevel = set()  # and start a new set (fringe)
-        found = []
-        for v in thislevel:
-            if v not in seen:
-                seen[v] = level  # set the level of vertex v
-                found.append(v)
-                yield (v, level)
-        if len(seen) == n:
-            return
-        for v in found:
-            nextlevel.update(adj[v])
+    for v in nextlevel:
+        yield (v, level)
+    while nextlevel and cutoff > level:
         level += 1
-    del seen
+        thislevel = nextlevel
+        nextlevel = []
+        for v in thislevel:
+            for w in adj[v]:
+                if w not in seen:
+                    seen.add(w)
+                    nextlevel.append(w)
+                    yield (w, level)
+            if len(seen) == n:
+                return
 
 
+@nx._dispatch
 def single_target_shortest_path_length(G, target, cutoff=None):
     """Compute the shortest path lengths to target from all reachable nodes.
 
@@ -132,14 +135,20 @@ def single_target_shortest_path_length(G, target, cutoff=None):
     if target not in G:
         raise nx.NodeNotFound(f"Target {target} is not in G")
 
+    msg = "single_target_shortest_path_length will return a dict starting in v3.3"
+    warnings.warn(msg, DeprecationWarning)
+
     if cutoff is None:
         cutoff = float("inf")
     # handle either directed or undirected
-    adj = G.pred if G.is_directed() else G.adj
-    nextlevel = {target: 1}
+    adj = G._pred if G.is_directed() else G._adj
+    nextlevel = [target]
+    # for version 3.3 we will return a dict like this:
+    # return dict(_single_shortest_path_length(adj, nextlevel, cutoff))
     return _single_shortest_path_length(adj, nextlevel, cutoff)
 
 
+@nx._dispatch
 def all_pairs_shortest_path_length(G, cutoff=None):
     """Computes the shortest path lengths between all nodes in `G`.
 
@@ -207,6 +216,13 @@ def bidirectional_shortest_path(G, source, target):
     NetworkXNoPath
        If no path exists between source and target.
 
+    Examples
+    --------
+    >>> G = nx.Graph()
+    >>> nx.add_path(G, [0, 1, 2, 3, 0, 4, 5, 6, 7, 4])
+    >>> nx.bidirectional_shortest_path(G, 2, 6)
+    [2, 1, 0, 4, 5, 6]
+
     See Also
     --------
     shortest_path
@@ -292,6 +308,7 @@ def _bidirectional_pred_succ(G, source, target):
     raise nx.NetworkXNoPath(f"No path between {source} and {target}.")
 
 
+@nx._dispatch
 def single_source_shortest_path(G, source, cutoff=None):
     """Compute shortest path between source
     and all other nodes reachable from source.
@@ -308,7 +325,7 @@ def single_source_shortest_path(G, source, cutoff=None):
 
     Returns
     -------
-    lengths : dictionary
+    paths : dictionary
         Dictionary, keyed by target, of shortest paths.
 
     Examples
@@ -375,6 +392,7 @@ def _single_shortest_path(adj, firstlevel, paths, cutoff, join):
     return paths
 
 
+@nx._dispatch
 def single_target_shortest_path(G, target, cutoff=None):
     """Compute shortest path to target from all nodes that reach target.
 
@@ -390,7 +408,7 @@ def single_target_shortest_path(G, target, cutoff=None):
 
     Returns
     -------
-    lengths : dictionary
+    paths : dictionary
         Dictionary, keyed by target, of shortest paths.
 
     Examples
@@ -426,6 +444,7 @@ def single_target_shortest_path(G, target, cutoff=None):
     return dict(_single_shortest_path(adj, nextlevel, paths, cutoff, join))
 
 
+@nx._dispatch
 def all_pairs_shortest_path(G, cutoff=None):
     """Compute shortest paths between all nodes.
 
@@ -439,7 +458,7 @@ def all_pairs_shortest_path(G, cutoff=None):
 
     Returns
     -------
-    lengths : dictionary
+    paths : iterator
         Dictionary, keyed by source and target, of shortest paths.
 
     Examples
diff --git a/networkx/algorithms/shortest_paths/weighted.py b/networkx/algorithms/shortest_paths/weighted.py
index ef0ee63..9d87f8e 100644
--- a/networkx/algorithms/shortest_paths/weighted.py
+++ b/networkx/algorithms/shortest_paths/weighted.py
@@ -105,7 +105,7 @@ def dijkstra_path(G, source, target, weight="weight"):
         returned by the function. The function must accept exactly three
         positional arguments: the two endpoints of an edge and the
         dictionary of edge attributes for that edge. The function must
-        return a number.
+        return a number or None to indicate a hidden edge.
 
     Returns
     -------
@@ -126,6 +126,15 @@ def dijkstra_path(G, source, target, weight="weight"):
     >>> print(nx.dijkstra_path(G, 0, 4))
     [0, 1, 2, 3, 4]
 
+    Find edges of shortest path in Multigraph
+
+    >>> G = nx.MultiDiGraph()
+    >>> G.add_weighted_edges_from([(1, 2, 0.75), (1, 2, 0.5), (2, 3, 0.5), (1, 3, 1.5)])
+    >>> nodes = nx.dijkstra_path(G, 1, 3)
+    >>> edges = nx.utils.pairwise(nodes)
+    >>> list((u, v, min(G[u][v], key=lambda k: G[u][v][k].get('weight', 1))) for u, v in edges)
+    [(1, 2, 1), (2, 3, 0)]
+
     Notes
     -----
     Edge weight attributes must be numerical.
@@ -186,7 +195,7 @@ def dijkstra_path_length(G, source, target, weight="weight"):
         returned by the function. The function must accept exactly three
         positional arguments: the two endpoints of an edge and the
         dictionary of edge attributes for that edge. The function must
-        return a number.
+        return a number or None to indicate a hidden edge.
 
     Returns
     -------
@@ -266,7 +275,7 @@ def single_source_dijkstra_path(G, source, cutoff=None, weight="weight"):
         returned by the function. The function must accept exactly three
         positional arguments: the two endpoints of an edge and the
         dictionary of edge attributes for that edge. The function must
-        return a number.
+        return a number or None to indicate a hidden edge.
 
     Returns
     -------
@@ -330,7 +339,7 @@ def single_source_dijkstra_path_length(G, source, cutoff=None, weight="weight"):
         returned by the function. The function must accept exactly three
         positional arguments: the two endpoints of an edge and the
         dictionary of edge attributes for that edge. The function must
-        return a number.
+        return a number or None to indicate a hidden edge.
 
     Returns
     -------
@@ -408,7 +417,7 @@ def single_source_dijkstra(G, source, target=None, cutoff=None, weight="weight")
         returned by the function. The function must accept exactly three
         positional arguments: the two endpoints of an edge and the
         dictionary of edge attributes for that edge. The function must
-        return a number.
+        return a number or None to indicate a hidden edge.
 
     Returns
     -------
@@ -506,7 +515,7 @@ def multi_source_dijkstra_path(G, sources, cutoff=None, weight="weight"):
         returned by the function. The function must accept exactly three
         positional arguments: the two endpoints of an edge and the
         dictionary of edge attributes for that edge. The function must
-        return a number.
+        return a number or None to indicate a hidden edge.
 
     Returns
     -------
@@ -579,7 +588,7 @@ def multi_source_dijkstra_path_length(G, sources, cutoff=None, weight="weight"):
         returned by the function. The function must accept exactly three
         positional arguments: the two endpoints of an edge and the
         dictionary of edge attributes for that edge. The function must
-        return a number.
+        return a number or None to indicate a hidden edge.
 
     Returns
     -------
@@ -664,7 +673,7 @@ def multi_source_dijkstra(G, sources, target=None, cutoff=None, weight="weight")
         returned by the function. The function must accept exactly three
         positional arguments: the two endpoints of an edge and the
         dictionary of edge attributes for that edge. The function must
-        return a number.
+        return a number or None to indicate a hidden edge.
 
     Returns
     -------
@@ -778,7 +787,8 @@ def _dijkstra_multisource(
         nodes.
 
     weight: function
-        Function with (u, v, data) input that returns that edges weight
+        Function with (u, v, data) input that returns that edge's weight
+        or None to indicate a hidden edge
 
     pred: dict of lists, optional(default=None)
         dict to store a list of predecessors keyed by that node
@@ -892,7 +902,7 @@ def dijkstra_predecessor_and_distance(G, source, cutoff=None, weight="weight"):
         returned by the function. The function must accept exactly three
         positional arguments: the two endpoints of an edge and the
         dictionary of edge attributes for that edge. The function must
-        return a number.
+        return a number or None to indicate a hidden edge.
 
     Returns
     -------
@@ -957,7 +967,7 @@ def all_pairs_dijkstra(G, cutoff=None, weight="weight"):
         returned by the function. The function must accept exactly three
         positional arguments: the two endpoints of an edge and the
         dictionary of edge attributes for that edge. The function must
-        return a number.
+        return a number or None to indicate a hidden edge.
 
     Yields
     ------
@@ -1025,7 +1035,7 @@ def all_pairs_dijkstra_path_length(G, cutoff=None, weight="weight"):
         returned by the function. The function must accept exactly three
         positional arguments: the two endpoints of an edge and the
         dictionary of edge attributes for that edge. The function must
-        return a number.
+        return a number or None to indicate a hidden edge.
 
     Returns
     -------
@@ -1083,12 +1093,13 @@ def all_pairs_dijkstra_path(G, cutoff=None, weight="weight"):
         returned by the function. The function must accept exactly three
         positional arguments: the two endpoints of an edge and the
         dictionary of edge attributes for that edge. The function must
-        return a number.
+        return a number or None to indicate a hidden edge.
 
     Returns
     -------
-    distance : dictionary
-        Dictionary, keyed by source and target, of shortest paths.
+    paths : iterator
+        (source, dictionary) iterator with dictionary keyed by target and
+        shortest path as the key value.
 
     Examples
     --------
@@ -1113,6 +1124,7 @@ def all_pairs_dijkstra_path(G, cutoff=None, weight="weight"):
         yield (n, path(G, n, cutoff=cutoff, weight=weight))
 
 
+@nx._dispatch
 def bellman_ford_predecessor_and_distance(
     G, source, target=None, weight="weight", heuristic=False
 ):
@@ -1453,6 +1465,7 @@ def _inner_bellman_ford(
     return None
 
 
+@nx._dispatch
 def bellman_ford_path(G, source, target, weight="weight"):
     """Returns the shortest path from source to target in a weighted graph G.
 
@@ -1511,6 +1524,7 @@ def bellman_ford_path(G, source, target, weight="weight"):
     return path
 
 
+@nx._dispatch
 def bellman_ford_path_length(G, source, target, weight="weight"):
     """Returns the shortest path length from source to target
     in a weighted graph.
@@ -1581,6 +1595,7 @@ def bellman_ford_path_length(G, source, target, weight="weight"):
         raise nx.NetworkXNoPath(f"node {target} not reachable from {source}") from err
 
 
+@nx._dispatch
 def single_source_bellman_ford_path(G, source, weight="weight"):
     """Compute shortest path between source and all other reachable
     nodes for a weighted graph.
@@ -1636,6 +1651,7 @@ def single_source_bellman_ford_path(G, source, weight="weight"):
     return path
 
 
+@nx._dispatch
 def single_source_bellman_ford_path_length(G, source, weight="weight"):
     """Compute the shortest path length between source and all other
     reachable nodes for a weighted graph.
@@ -1662,8 +1678,8 @@ def single_source_bellman_ford_path_length(G, source, weight="weight"):
 
     Returns
     -------
-    length : iterator
-        (target, shortest path length) iterator
+    length : dictionary
+        Dictionary of shortest path length keyed by target
 
     Raises
     ------
@@ -1673,7 +1689,7 @@ def single_source_bellman_ford_path_length(G, source, weight="weight"):
     Examples
     --------
     >>> G = nx.path_graph(5)
-    >>> length = dict(nx.single_source_bellman_ford_path_length(G, 0))
+    >>> length = nx.single_source_bellman_ford_path_length(G, 0)
     >>> length[4]
     4
     >>> for node in [0, 1, 2, 3, 4]:
@@ -1698,6 +1714,7 @@ def single_source_bellman_ford_path_length(G, source, weight="weight"):
     return _bellman_ford(G, [source], weight)
 
 
+@nx._dispatch
 def single_source_bellman_ford(G, source, target=None, weight="weight"):
     """Compute shortest paths and lengths in a weighted graph G.
 
@@ -1791,6 +1808,7 @@ def single_source_bellman_ford(G, source, target=None, weight="weight"):
         raise nx.NetworkXNoPath(msg) from err
 
 
+@nx._dispatch
 def all_pairs_bellman_ford_path_length(G, weight="weight"):
     """Compute shortest path lengths between all nodes in a weighted graph.
 
@@ -1845,6 +1863,7 @@ def all_pairs_bellman_ford_path_length(G, weight="weight"):
         yield (n, dict(length(G, n, weight=weight)))
 
 
+@nx._dispatch
 def all_pairs_bellman_ford_path(G, weight="weight"):
     """Compute shortest paths between all nodes in a weighted graph.
 
@@ -1867,8 +1886,9 @@ def all_pairs_bellman_ford_path(G, weight="weight"):
 
     Returns
     -------
-    distance : dictionary
-        Dictionary, keyed by source and target, of shortest paths.
+    paths : iterator
+        (source, dictionary) iterator with dictionary keyed by target and
+        shortest path as the key value.
 
     Examples
     --------
@@ -2068,6 +2088,7 @@ def goldberg_radzik(G, source, weight="weight"):
     return pred, d
 
 
+@nx._dispatch
 def negative_edge_cycle(G, weight="weight", heuristic=True):
     """Returns True if there exists a negative edge cycle anywhere in G.
 
@@ -2117,6 +2138,9 @@ def negative_edge_cycle(G, weight="weight", heuristic=True):
     every node, and starting bellman_ford_predecessor_and_distance on that
     node.  It then removes that extra node.
     """
+    if G.size() == 0:
+        return False
+
     # find unused node to use temporarily
     newnode = -1
     while newnode in G:
@@ -2251,7 +2275,7 @@ def bidirectional_dijkstra(G, source, target, weight="weight"):
         returned by the function. The function must accept exactly three
         positional arguments: the two endpoints of an edge and the
         dictionary of edge attributes for that edge. The function must
-        return a number.
+        return a number or None to indicate a hidden edge.
 
     Returns
     -------
diff --git a/networkx/algorithms/similarity.py b/networkx/algorithms/similarity.py
index fe6e0f2..cb2994d 100644
--- a/networkx/algorithms/similarity.py
+++ b/networkx/algorithms/similarity.py
@@ -16,9 +16,8 @@ alternative GED algorithms, in order to improve the choices available.
 import math
 import time
 import warnings
-from functools import reduce
+from dataclasses import dataclass
 from itertools import product
-from operator import mul
 
 import networkx as nx
 
@@ -28,7 +27,6 @@ __all__ = [
     "optimize_graph_edit_distance",
     "optimize_edit_paths",
     "simrank_similarity",
-    "simrank_similarity_numpy",
     "panther_similarity",
     "generate_random_paths",
 ]
@@ -188,7 +186,7 @@ def graph_edit_distance(
 
     """
     bestcost = None
-    for vertex_path, edge_path, cost in optimize_edit_paths(
+    for _, _, cost in optimize_edit_paths(
         G1,
         G2,
         node_match,
@@ -347,7 +345,7 @@ def optimal_edit_paths(
        https://hal.archives-ouvertes.fr/hal-01168816
 
     """
-    paths = list()
+    paths = []
     bestcost = None
     for vertex_path, edge_path, cost in optimize_edit_paths(
         G1,
@@ -365,7 +363,7 @@ def optimal_edit_paths(
     ):
         # assert bestcost is None or cost <= bestcost
         if bestcost is not None and cost < bestcost:
-            paths = list()
+            paths = []
         paths.append((vertex_path, edge_path))
         bestcost = cost
     return paths, bestcost
@@ -504,7 +502,7 @@ def optimize_graph_edit_distance(
        <10.5220/0005209202710278>. <hal-01168816>
        https://hal.archives-ouvertes.fr/hal-01168816
     """
-    for vertex_path, edge_path, cost in optimize_edit_paths(
+    for _, _, cost in optimize_edit_paths(
         G1,
         G2,
         node_match,
@@ -673,18 +671,12 @@ def optimize_edit_paths(
     import scipy as sp
     import scipy.optimize  # call as sp.optimize
 
+    @dataclass
     class CostMatrix:
-        def __init__(self, C, lsa_row_ind, lsa_col_ind, ls):
-            # assert C.shape[0] == len(lsa_row_ind)
-            # assert C.shape[1] == len(lsa_col_ind)
-            # assert len(lsa_row_ind) == len(lsa_col_ind)
-            # assert set(lsa_row_ind) == set(range(len(lsa_row_ind)))
-            # assert set(lsa_col_ind) == set(range(len(lsa_col_ind)))
-            # assert ls == C[lsa_row_ind, lsa_col_ind].sum()
-            self.C = C
-            self.lsa_row_ind = lsa_row_ind
-            self.lsa_col_ind = lsa_col_ind
-            self.ls = ls
+        C: ...
+        lsa_row_ind: ...
+        lsa_col_ind: ...
+        ls: ...
 
     def make_CostMatrix(C, m, n):
         # assert(C.shape == (m + n, m + n))
@@ -695,9 +687,9 @@ def optimize_edit_paths(
         # NOTE: fast reduce of Cv relies on it
         # assert len(lsa_row_ind) == len(lsa_col_ind)
         indexes = zip(range(len(lsa_row_ind)), lsa_row_ind, lsa_col_ind)
-        subst_ind = list(k for k, i, j in indexes if i < m and j < n)
+        subst_ind = [k for k, i, j in indexes if i < m and j < n]
         indexes = zip(range(len(lsa_row_ind)), lsa_row_ind, lsa_col_ind)
-        dummy_ind = list(k for k, i, j in indexes if i >= m and j >= n)
+        dummy_ind = [k for k, i, j in indexes if i >= m and j >= n]
         # assert len(subst_ind) == len(dummy_ind)
         lsa_row_ind[dummy_ind] = lsa_col_ind[subst_ind] + m
         lsa_col_ind[dummy_ind] = lsa_row_ind[subst_ind] + n
@@ -725,7 +717,7 @@ def optimize_edit_paths(
             rind[rind >= k] -= 1
         return rind
 
-    def match_edges(u, v, pending_g, pending_h, Ce, matched_uv=[]):
+    def match_edges(u, v, pending_g, pending_h, Ce, matched_uv=None):
         """
         Parameters:
             u, v: matched vertices, u=None or v=None for
@@ -749,7 +741,10 @@ def optimize_edit_paths(
         # only attempt to match edges after one node match has been made
         # this will stop self-edges on the first node being automatically deleted
         # even when a substitution is the better option
-        if matched_uv:
+        if matched_uv is None or len(matched_uv) == 0:
+            g_ind = []
+            h_ind = []
+        else:
             g_ind = [
                 i
                 for i in range(M)
@@ -766,9 +761,6 @@ def optimize_edit_paths(
                     pending_h[j][:2] in ((q, v), (v, q), (q, q)) for p, q in matched_uv
                 )
             ]
-        else:
-            g_ind = []
-            h_ind = []
 
         m = len(g_ind)
         n = len(h_ind)
@@ -779,9 +771,9 @@ def optimize_edit_paths(
 
             # Forbid structurally invalid matches
             # NOTE: inf remembered from Ce construction
-            for k, i in zip(range(m), g_ind):
+            for k, i in enumerate(g_ind):
                 g = pending_g[i][:2]
-                for l, j in zip(range(n), h_ind):
+                for l, j in enumerate(h_ind):
                     h = pending_h[j][:2]
                     if nx.is_directed(G1) or nx.is_directed(G2):
                         if any(
@@ -802,14 +794,14 @@ def optimize_edit_paths(
                     C[k, l] = inf
 
             localCe = make_CostMatrix(C, m, n)
-            ij = list(
+            ij = [
                 (
                     g_ind[k] if k < m else M + h_ind[l],
                     h_ind[l] if l < n else N + g_ind[k],
                 )
                 for k, l in zip(localCe.lsa_row_ind, localCe.lsa_col_ind)
                 if k < m or l < n
-            )
+            ]
 
         else:
             ij = []
@@ -823,8 +815,7 @@ def optimize_edit_paths(
             m_i = m - sum(1 for t in i if t < m)
             n_j = n - sum(1 for t in j if t < n)
             return make_CostMatrix(reduce_C(Ce.C, i, j, m, n), m_i, n_j)
-        else:
-            return Ce
+        return Ce
 
     def get_edit_ops(
         matched_uv, pending_u, pending_v, Cv, pending_g, pending_h, Ce, matched_cost
@@ -882,7 +873,7 @@ def optimize_edit_paths(
             yield (i, j), Cv_ij, xy, Ce_xy, Cv.C[i, j] + localCe.ls
 
         # 2) other candidates, sorted by lower-bound cost estimate
-        other = list()
+        other = []
         fixed_i, fixed_j = i, j
         if m <= n:
             candidates = (
@@ -983,8 +974,9 @@ def optimize_edit_paths(
             # assert not len(pending_g)
             # assert not len(pending_h)
             # path completed!
-            # assert matched_cost <= maxcost.value
-            maxcost.value = min(maxcost.value, matched_cost)
+            # assert matched_cost <= maxcost_value
+            nonlocal maxcost_value
+            maxcost_value = min(maxcost_value, matched_cost)
             yield matched_uv, matched_gh, matched_cost
 
         else:
@@ -1017,16 +1009,16 @@ def optimize_edit_paths(
                             pending_h[y] if y < len_h else None,
                         )
                     )
-                sortedx = list(sorted(x for x, y in xy))
-                sortedy = list(sorted(y for x, y in xy))
-                G = list(
+                sortedx = sorted(x for x, y in xy)
+                sortedy = sorted(y for x, y in xy)
+                G = [
                     (pending_g.pop(x) if x < len(pending_g) else None)
                     for x in reversed(sortedx)
-                )
-                H = list(
+                ]
+                H = [
                     (pending_h.pop(y) if y < len(pending_h) else None)
                     for y in reversed(sortedy)
-                )
+                ]
 
                 yield from get_edit_paths(
                     matched_uv,
@@ -1052,7 +1044,7 @@ def optimize_edit_paths(
                 for y, h in zip(sortedy, reversed(H)):
                     if h is not None:
                         pending_h.insert(y, h)
-                for t in xy:
+                for _ in xy:
                     matched_gh.pop()
 
     # Initialization
@@ -1168,13 +1160,7 @@ def optimize_edit_paths(
     # debug_print(Ce.C)
     # debug_print()
 
-    class MaxCost:
-        def __init__(self):
-            # initial upper-bound estimate
-            # NOTE: should work for empty graph
-            self.value = Cv.C.sum() + Ce.C.sum() + 1
-
-    maxcost = MaxCost()
+    maxcost_value = Cv.C.sum() + Ce.C.sum() + 1
 
     if timeout is not None:
         if timeout <= 0:
@@ -1188,10 +1174,11 @@ def optimize_edit_paths(
         if upper_bound is not None:
             if cost > upper_bound:
                 return True
-        if cost > maxcost.value:
+        if cost > maxcost_value:
             return True
-        elif strictly_decreasing and cost >= maxcost.value:
+        if strictly_decreasing and cost >= maxcost_value:
             return True
+        return False
 
     # Now go!
 
@@ -1205,7 +1192,7 @@ def optimize_edit_paths(
         # assert sorted(G1.edges) == sorted(g for g, h in edge_path if g is not None)
         # assert sorted(G2.edges) == sorted(h for g, h in edge_path if h is not None)
         # print(vertex_path, edge_path, cost, file = sys.stderr)
-        # assert cost == maxcost.value
+        # assert cost == maxcost_value
         yield list(vertex_path), list(edge_path), cost
 
 
@@ -1325,9 +1312,9 @@ def simrank_similarity(
 
     if isinstance(x, np.ndarray):
         if x.ndim == 1:
-            return {node: val for node, val in zip(G, x)}
-        else:  # x.ndim == 2:
-            return {u: dict(zip(G, row)) for u, row in zip(G, x)}
+            return dict(zip(G, x))
+        # else x.ndim == 2
+        return {u: dict(zip(G, row)) for u, row in zip(G, x)}
     return x
 
 
@@ -1500,34 +1487,6 @@ def _simrank_similarity_numpy(
     return newsim
 
 
-def simrank_similarity_numpy(
-    G,
-    source=None,
-    target=None,
-    importance_factor=0.9,
-    max_iterations=100,
-    tolerance=1e-4,
-):
-    """Calculate SimRank of nodes in ``G`` using matrices with ``numpy``.
-
-    .. deprecated:: 2.6
-        simrank_similarity_numpy is deprecated and will be removed in networkx 3.0.
-        Use simrank_similarity
-
-    """
-    warnings.warn(
-        (
-            "networkx.simrank_similarity_numpy is deprecated and will be removed"
-            "in NetworkX 3.0, use networkx.simrank_similarity instead."
-        ),
-        DeprecationWarning,
-        stacklevel=2,
-    )
-    return _simrank_similarity_numpy(
-        G, source, target, importance_factor, max_iterations, tolerance
-    )
-
-
 def panther_similarity(G, source, k=5, path_length=5, c=0.5, delta=0.1, eps=None):
     r"""Returns the Panther similarity of nodes in the graph `G` to node ``v``.
 
@@ -1625,7 +1584,7 @@ def panther_similarity(G, source, k=5, path_length=5, c=0.5, delta=0.1, eps=None
     top_k_sorted = top_k_unsorted[np.argsort(S[top_k_unsorted])][::-1]
 
     # Add back the similarity scores
-    top_k_sorted_names = map(lambda n: node_map[n], top_k_sorted)
+    top_k_sorted_names = (node_map[n] for n in top_k_sorted)
     top_k_with_val = dict(zip(top_k_sorted_names, S[top_k_sorted]))
 
     # Remove the self-similarity
diff --git a/networkx/algorithms/simple_paths.py b/networkx/algorithms/simple_paths.py
index e19e4e4..f88b03a 100644
--- a/networkx/algorithms/simple_paths.py
+++ b/networkx/algorithms/simple_paths.py
@@ -13,6 +13,7 @@ __all__ = [
 ]
 
 
+@nx._dispatch
 def is_simple_path(G, nodes):
     """Returns True if and only if `nodes` form a simple path in `G`.
 
@@ -71,13 +72,23 @@ def is_simple_path(G, nodes):
     # NetworkXPointlessConcept here.
     if len(nodes) == 0:
         return False
+
     # If the list is a single node, just check that the node is actually
     # in the graph.
     if len(nodes) == 1:
         return nodes[0] in G
-    # Test that no node appears more than once, and that each
-    # adjacent pair of nodes is adjacent.
-    return len(set(nodes)) == len(nodes) and all(v in G[u] for u, v in pairwise(nodes))
+
+    # check that all nodes in the list are in the graph, if at least one
+    # is not in the graph, then this is not a simple path
+    if not all(n in G for n in nodes):
+        return False
+
+    # If the list contains repeated nodes, then it's not a simple path
+    if len(set(nodes)) != len(nodes):
+        return False
+
+    # Test that each adjacent pair of nodes is adjacent.
+    return all(v in G[u] for u, v in pairwise(nodes))
 
 
 def all_simple_paths(G, source, target, cutoff=None):
@@ -256,7 +267,7 @@ def _empty_generator():
 
 
 def _all_simple_paths_graph(G, source, targets, cutoff):
-    visited = dict.fromkeys([source])
+    visited = {source: True}
     stack = [iter(G[source])]
     while stack:
         children = stack[-1]
@@ -269,7 +280,7 @@ def _all_simple_paths_graph(G, source, targets, cutoff):
                 continue
             if child in targets:
                 yield list(visited) + [child]
-            visited[child] = None
+            visited[child] = True
             if targets - set(visited.keys()):  # expand stack until find all targets
                 stack.append(iter(G[child]))
             else:
@@ -282,7 +293,7 @@ def _all_simple_paths_graph(G, source, targets, cutoff):
 
 
 def _all_simple_paths_multigraph(G, source, targets, cutoff):
-    visited = dict.fromkeys([source])
+    visited = {source: True}
     stack = [(v for u, v in G.edges(source))]
     while stack:
         children = stack[-1]
@@ -295,7 +306,7 @@ def _all_simple_paths_multigraph(G, source, targets, cutoff):
                 continue
             if child in targets:
                 yield list(visited) + [child]
-            visited[child] = None
+            visited[child] = True
             if targets - set(visited.keys()):
                 stack.append((v for u, v in G.edges(child)))
             else:
@@ -422,7 +433,7 @@ def _all_simple_edge_paths_multigraph(G, source, targets, cutoff):
                 visited.append(child)
                 stack.append(iter(G.edges(child[1], keys=True)))
         else:  # len(visited) == cutoff:
-            for (u, v, k) in [child] + list(children):
+            for u, v, k in [child] + list(children):
                 if v in targets:
                     yield visited[1:] + [(u, v, k)]
             stack.pop()
@@ -536,7 +547,7 @@ def shortest_simple_paths(G, source, target, weight=None):
 
         shortest_path_func = _bidirectional_dijkstra
 
-    listA = list()
+    listA = []
     listB = PathBuffer()
     prev_path = None
     while True:
@@ -579,7 +590,7 @@ def shortest_simple_paths(G, source, target, weight=None):
 class PathBuffer:
     def __init__(self):
         self.paths = set()
-        self.sortedpaths = list()
+        self.sortedpaths = []
         self.counter = count()
 
     def __len__(self):
diff --git a/networkx/algorithms/smallworld.py b/networkx/algorithms/smallworld.py
index fc64d13..9b039e3 100644
--- a/networkx/algorithms/smallworld.py
+++ b/networkx/algorithms/smallworld.py
@@ -46,6 +46,11 @@ def random_reference(G, niter=1, connectivity=True, seed=None):
     G : graph
         The randomized graph.
 
+    Raises
+    ------
+    NetworkXError
+        If there are fewer than 4 nodes or 2 edges in `G`
+
     Notes
     -----
     The implementation is adapted from the algorithm by Maslov and Sneppen
@@ -58,7 +63,9 @@ def random_reference(G, niter=1, connectivity=True, seed=None):
            Science 296.5569 (2002): 910-913.
     """
     if len(G) < 4:
-        raise nx.NetworkXError("Graph has less than four nodes.")
+        raise nx.NetworkXError("Graph has fewer than four nodes.")
+    if len(G.edges) < 2:
+        raise nx.NetworkXError("Graph has fewer that 2 edges")
 
     from networkx.utils import cumulative_distribution, discrete_sequence
 
@@ -119,10 +126,10 @@ def lattice_reference(G, niter=5, D=None, connectivity=True, seed=None):
     Parameters
     ----------
     G : graph
-        An undirected graph with 4 or more nodes.
+        An undirected graph.
 
     niter : integer (optional, default=1)
-        An edge is rewired approximatively niter times.
+        An edge is rewired approximately niter times.
 
     D : numpy.array (optional, default=None)
         Distance to the diagonal matrix.
@@ -139,6 +146,11 @@ def lattice_reference(G, niter=5, D=None, connectivity=True, seed=None):
     G : graph
         The latticized graph.
 
+    Raises
+    ------
+    NetworkXError
+        If there are fewer than 4 nodes or 2 edges in `G`
+
     Notes
     -----
     The implementation is adapted from the algorithm by Sporns et al. [1]_.
@@ -160,7 +172,9 @@ def lattice_reference(G, niter=5, D=None, connectivity=True, seed=None):
     local_conn = nx.connectivity.local_edge_connectivity
 
     if len(G) < 4:
-        raise nx.NetworkXError("Graph has less than four nodes.")
+        raise nx.NetworkXError("Graph has fewer than four nodes.")
+    if len(G.edges) < 2:
+        raise nx.NetworkXError("Graph has fewer that 2 edges")
     # Instead of choosing uniformly at random from a generated edge list,
     # this algorithm chooses nonuniformly from the set of nodes with
     # probability weighted by degree.
diff --git a/networkx/algorithms/smetric.py b/networkx/algorithms/smetric.py
index b851e1e..785b2da 100644
--- a/networkx/algorithms/smetric.py
+++ b/networkx/algorithms/smetric.py
@@ -3,6 +3,7 @@ import networkx as nx
 __all__ = ["s_metric"]
 
 
+@nx._dispatch
 def s_metric(G, normalized=True):
     """Returns the s-metric of graph.
 
diff --git a/networkx/algorithms/structuralholes.py b/networkx/algorithms/structuralholes.py
index 55cdfe4..9f67f59 100644
--- a/networkx/algorithms/structuralholes.py
+++ b/networkx/algorithms/structuralholes.py
@@ -5,6 +5,7 @@ import networkx as nx
 __all__ = ["constraint", "local_constraint", "effective_size"]
 
 
+@nx._dispatch
 def mutual_weight(G, u, v, weight=None):
     """Returns the sum of the weights of the edge from `u` to `v` and
     the edge from `v` to `u` in `G`.
diff --git a/networkx/algorithms/summarization.py b/networkx/algorithms/summarization.py
index 16c7e62..73c5423 100644
--- a/networkx/algorithms/summarization.py
+++ b/networkx/algorithms/summarization.py
@@ -20,7 +20,7 @@ nodes called compressor or virtual nodes to reduce the total number of edges in
 a graph. Edge-grouping techniques can be lossless, meaning that they can be
 used to re-create the original graph, or techniques can be lossy, requiring
 less space to store the summary graph, but at the expense of lower
-recontruction accuracy of the original graph.
+reconstruction accuracy of the original graph.
 
 Bit-compression techniques minimize the amount of information needed to
 describe the original graph, while revealing structural patterns in the
@@ -177,20 +177,20 @@ def dedensify(G, threshold, prefix=None, copy=True):
     high_degree_nodes = {n for n, d in degrees if d > threshold}
     low_degree_nodes = G.nodes() - high_degree_nodes
 
-    auxillary = {}
+    auxiliary = {}
     for node in G:
         high_degree_neighbors = frozenset(high_degree_nodes & set(G[node]))
         if high_degree_neighbors:
-            if high_degree_neighbors in auxillary:
-                auxillary[high_degree_neighbors].add(node)
+            if high_degree_neighbors in auxiliary:
+                auxiliary[high_degree_neighbors].add(node)
             else:
-                auxillary[high_degree_neighbors] = {node}
+                auxiliary[high_degree_neighbors] = {node}
 
     if copy:
         G = G.copy()
 
     compressor_nodes = set()
-    for index, (high_degree_nodes, low_degree_nodes) in enumerate(auxillary.items()):
+    for index, (high_degree_nodes, low_degree_nodes) in enumerate(auxiliary.items()):
         low_degree_node_count = len(low_degree_nodes)
         high_degree_node_count = len(high_degree_nodes)
         old_edges = high_degree_node_count * low_degree_node_count
@@ -259,7 +259,7 @@ def _snap_build_graph(
     summary graph: Networkx graph
     """
     output = G.__class__()
-    node_label_lookup = dict()
+    node_label_lookup = {}
     for index, group_id in enumerate(groups):
         group_set = groups[group_id]
         supernode = f"{prefix}{index}"
diff --git a/networkx/algorithms/swap.py b/networkx/algorithms/swap.py
index 26a1f31..4c74838 100644
--- a/networkx/algorithms/swap.py
+++ b/networkx/algorithms/swap.py
@@ -6,7 +6,127 @@ import math
 import networkx as nx
 from networkx.utils import py_random_state
 
-__all__ = ["double_edge_swap", "connected_double_edge_swap"]
+__all__ = ["double_edge_swap", "connected_double_edge_swap", "directed_edge_swap"]
+
+
+@py_random_state(3)
+@nx.utils.not_implemented_for("undirected")
+def directed_edge_swap(G, *, nswap=1, max_tries=100, seed=None):
+    """Swap three edges in a directed graph while keeping the node degrees fixed.
+
+    A directed edge swap swaps three edges such that a -> b -> c -> d becomes
+    a -> c -> b -> d. This pattern of swapping allows all possible states with the
+    same in- and out-degree distribution in a directed graph to be reached.
+
+    If the swap would create parallel edges (e.g. if a -> c already existed in the
+    previous example), another attempt is made to find a suitable trio of edges.
+
+    Parameters
+    ----------
+    G : DiGraph
+       A directed graph
+
+    nswap : integer (optional, default=1)
+       Number of three-edge (directed) swaps to perform
+
+    max_tries : integer (optional, default=100)
+       Maximum number of attempts to swap edges
+
+    seed : integer, random_state, or None (default)
+        Indicator of random number generation state.
+        See :ref:`Randomness<randomness>`.
+
+    Returns
+    -------
+    G : DiGraph
+       The graph after the edges are swapped.
+
+    Raises
+    ------
+    NetworkXError
+        If `G` is not directed, or
+        If nswap > max_tries, or
+        If there are fewer than 4 nodes or 3 edges in `G`.
+    NetworkXAlgorithmError
+        If the number of swap attempts exceeds `max_tries` before `nswap` swaps are made
+
+    Notes
+    -----
+    Does not enforce any connectivity constraints.
+
+    The graph G is modified in place.
+
+    References
+    ----------
+    .. [1] Erdős, Péter L., et al. “A Simple Havel-Hakimi Type Algorithm to Realize
+           Graphical Degree Sequences of Directed Graphs.” ArXiv:0905.4913 [Math],
+           Jan. 2010. https://doi.org/10.48550/arXiv.0905.4913.
+           Published  2010 in Elec. J. Combinatorics (17(1)). R66.
+           http://www.combinatorics.org/Volume_17/PDF/v17i1r66.pdf
+    .. [2] “Combinatorics - Reaching All Possible Simple Directed Graphs with a given
+           Degree Sequence with 2-Edge Swaps.” Mathematics Stack Exchange,
+           https://math.stackexchange.com/questions/22272/. Accessed 30 May 2022.
+    """
+    if nswap > max_tries:
+        raise nx.NetworkXError("Number of swaps > number of tries allowed.")
+    if len(G) < 4:
+        raise nx.NetworkXError("DiGraph has fewer than four nodes.")
+    if len(G.edges) < 3:
+        raise nx.NetworkXError("DiGraph has fewer than 3 edges")
+
+    # Instead of choosing uniformly at random from a generated edge list,
+    # this algorithm chooses nonuniformly from the set of nodes with
+    # probability weighted by degree.
+    tries = 0
+    swapcount = 0
+    keys, degrees = zip(*G.degree())  # keys, degree
+    cdf = nx.utils.cumulative_distribution(degrees)  # cdf of degree
+    discrete_sequence = nx.utils.discrete_sequence
+
+    while swapcount < nswap:
+        # choose source node index from discrete distribution
+        start_index = discrete_sequence(1, cdistribution=cdf, seed=seed)[0]
+        start = keys[start_index]
+        tries += 1
+
+        if tries > max_tries:
+            msg = f"Maximum number of swap attempts ({tries}) exceeded before desired swaps achieved ({nswap})."
+            raise nx.NetworkXAlgorithmError(msg)
+
+        # If the given node doesn't have any out edges, then there isn't anything to swap
+        if G.out_degree(start) == 0:
+            continue
+        second = seed.choice(list(G.succ[start]))
+        if start == second:
+            continue
+
+        if G.out_degree(second) == 0:
+            continue
+        third = seed.choice(list(G.succ[second]))
+        if second == third:
+            continue
+
+        if G.out_degree(third) == 0:
+            continue
+        fourth = seed.choice(list(G.succ[third]))
+        if third == fourth:
+            continue
+
+        if (
+            third not in G.succ[start]
+            and fourth not in G.succ[second]
+            and second not in G.succ[third]
+        ):
+            # Swap nodes
+            G.add_edge(start, third)
+            G.add_edge(third, second)
+            G.add_edge(second, fourth)
+            G.remove_edge(start, second)
+            G.remove_edge(second, third)
+            G.remove_edge(third, fourth)
+            swapcount += 1
+
+    return G
 
 
 @py_random_state(3)
@@ -43,6 +163,15 @@ def double_edge_swap(G, nswap=1, max_tries=100, seed=None):
     G : graph
        The graph after double edge swaps.
 
+    Raises
+    ------
+    NetworkXError
+        If `G` is directed, or
+        If `nswap` > `max_tries`, or
+        If there are fewer than 4 nodes or 2 edges in `G`.
+    NetworkXAlgorithmError
+        If the number of swap attempts exceeds `max_tries` before `nswap` swaps are made
+
     Notes
     -----
     Does not enforce any connectivity constraints.
@@ -50,11 +179,15 @@ def double_edge_swap(G, nswap=1, max_tries=100, seed=None):
     The graph G is modified in place.
     """
     if G.is_directed():
-        raise nx.NetworkXError("double_edge_swap() not defined for directed graphs.")
+        raise nx.NetworkXError(
+            "double_edge_swap() not defined for directed graphs. Use directed_edge_swap instead."
+        )
     if nswap > max_tries:
         raise nx.NetworkXError("Number of swaps > number of tries allowed.")
     if len(G) < 4:
-        raise nx.NetworkXError("Graph has less than four nodes.")
+        raise nx.NetworkXError("Graph has fewer than four nodes.")
+    if len(G.edges) < 2:
+        raise nx.NetworkXError("Graph has fewer than 2 edges")
     # Instead of choosing uniformly at random from a generated edge list,
     # this algorithm chooses nonuniformly from the set of nodes with
     # probability weighted by degree.
@@ -165,13 +298,13 @@ def connected_double_edge_swap(G, nswap=1, _window_threshold=3, seed=None):
     if not nx.is_connected(G):
         raise nx.NetworkXError("Graph not connected")
     if len(G) < 4:
-        raise nx.NetworkXError("Graph has less than four nodes.")
+        raise nx.NetworkXError("Graph has fewer than four nodes.")
     n = 0
     swapcount = 0
     deg = G.degree()
     # Label key for nodes
-    dk = list(n for n, d in G.degree())
-    cdf = nx.utils.cumulative_distribution(list(d for n, d in G.degree()))
+    dk = [n for n, d in G.degree()]
+    cdf = nx.utils.cumulative_distribution([d for n, d in G.degree()])
     discrete_sequence = nx.utils.discrete_sequence
     window = 1
     while n < nswap:
@@ -230,7 +363,7 @@ def connected_double_edge_swap(G, nswap=1, _window_threshold=3, seed=None):
             while wcount < window and n < nswap:
                 # Pick two random edges without creating the edge list. Choose
                 # source nodes from the discrete degree distribution.
-                (ui, xi) = nx.utils.discrete_sequence(2, cdistribution=cdf)
+                (ui, xi) = discrete_sequence(2, cdistribution=cdf, seed=seed)
                 # If the source nodes are the same, skip this pair.
                 if ui == xi:
                     continue
diff --git a/networkx/algorithms/tests/test_asteroidal.py b/networkx/algorithms/tests/test_asteroidal.py
index b0487af..67131b2 100644
--- a/networkx/algorithms/tests/test_asteroidal.py
+++ b/networkx/algorithms/tests/test_asteroidal.py
@@ -2,7 +2,6 @@ import networkx as nx
 
 
 def test_is_at_free():
-
     is_at_free = nx.asteroidal.is_at_free
 
     cycle = nx.cycle_graph(6)
diff --git a/networkx/algorithms/tests/test_chordal.py b/networkx/algorithms/tests/test_chordal.py
index c72699c..132698a 100644
--- a/networkx/algorithms/tests/test_chordal.py
+++ b/networkx/algorithms/tests/test_chordal.py
@@ -89,22 +89,22 @@ class TestMCS:
             frozenset([2, 3, 4]),
             frozenset([3, 4, 5, 6]),
         }
-        assert nx.chordal_graph_cliques(self.chordal_G) == cliques
+        assert set(nx.chordal_graph_cliques(self.chordal_G)) == cliques
         with pytest.raises(nx.NetworkXError, match="Input graph is not chordal"):
-            nx.chordal_graph_cliques(self.non_chordal_G)
+            set(nx.chordal_graph_cliques(self.non_chordal_G))
         with pytest.raises(nx.NetworkXError, match="Input graph is not chordal"):
-            nx.chordal_graph_cliques(self.self_loop_G)
+            set(nx.chordal_graph_cliques(self.self_loop_G))
 
     def test_chordal_find_cliques_path(self):
         G = nx.path_graph(10)
         cliqueset = nx.chordal_graph_cliques(G)
-        for (u, v) in G.edges():
+        for u, v in G.edges():
             assert frozenset([u, v]) in cliqueset or frozenset([v, u]) in cliqueset
 
     def test_chordal_find_cliquesCC(self):
         cliques = {frozenset([1, 2, 3]), frozenset([2, 3, 4]), frozenset([3, 4, 5, 6])}
         cgc = nx.chordal_graph_cliques
-        assert cgc(self.connected_chordal_G) == cliques
+        assert set(cgc(self.connected_chordal_G)) == cliques
 
     def test_complete_to_chordal_graph(self):
         fgrg = nx.fast_gnp_random_graph
diff --git a/networkx/algorithms/tests/test_clique.py b/networkx/algorithms/tests/test_clique.py
index f6d5335..1bcbd4f 100644
--- a/networkx/algorithms/tests/test_clique.py
+++ b/networkx/algorithms/tests/test_clique.py
@@ -69,80 +69,96 @@ class TestCliques:
 
     def test_clique_number(self):
         G = self.G
-        assert nx.graph_clique_number(G) == 4
-        assert nx.graph_clique_number(G, cliques=self.cl) == 4
+        with pytest.deprecated_call():
+            assert nx.graph_clique_number(G) == 4
+        with pytest.deprecated_call():
+            assert nx.graph_clique_number(G, cliques=self.cl) == 4
 
     def test_clique_number2(self):
         G = nx.Graph()
         G.add_nodes_from([1, 2, 3])
-        assert nx.graph_clique_number(G) == 1
+        with pytest.deprecated_call():
+            assert nx.graph_clique_number(G) == 1
 
     def test_clique_number3(self):
         G = nx.Graph()
-        assert nx.graph_clique_number(G) == 0
+        with pytest.deprecated_call():
+            assert nx.graph_clique_number(G) == 0
 
     def test_number_of_cliques(self):
         G = self.G
-        assert nx.graph_number_of_cliques(G) == 5
-        assert nx.graph_number_of_cliques(G, cliques=self.cl) == 5
-        assert nx.number_of_cliques(G, 1) == 1
-        assert list(nx.number_of_cliques(G, [1]).values()) == [1]
-        assert list(nx.number_of_cliques(G, [1, 2]).values()) == [1, 2]
-        assert nx.number_of_cliques(G, [1, 2]) == {1: 1, 2: 2}
-        assert nx.number_of_cliques(G, 2) == 2
-        assert nx.number_of_cliques(G) == {
-            1: 1,
-            2: 2,
-            3: 1,
-            4: 2,
-            5: 1,
-            6: 2,
-            7: 1,
-            8: 1,
-            9: 1,
-            10: 1,
-            11: 1,
-        }
-        assert nx.number_of_cliques(G, nodes=list(G)) == {
-            1: 1,
-            2: 2,
-            3: 1,
-            4: 2,
-            5: 1,
-            6: 2,
-            7: 1,
-            8: 1,
-            9: 1,
-            10: 1,
-            11: 1,
-        }
-        assert nx.number_of_cliques(G, nodes=[2, 3, 4]) == {2: 2, 3: 1, 4: 2}
-        assert nx.number_of_cliques(G, cliques=self.cl) == {
-            1: 1,
-            2: 2,
-            3: 1,
-            4: 2,
-            5: 1,
-            6: 2,
-            7: 1,
-            8: 1,
-            9: 1,
-            10: 1,
-            11: 1,
-        }
-        assert nx.number_of_cliques(G, list(G), cliques=self.cl) == {
-            1: 1,
-            2: 2,
-            3: 1,
-            4: 2,
-            5: 1,
-            6: 2,
-            7: 1,
-            8: 1,
-            9: 1,
-            10: 1,
-            11: 1,
-        }
+        with pytest.deprecated_call():
+            assert nx.graph_number_of_cliques(G) == 5
+        with pytest.deprecated_call():
+            assert nx.graph_number_of_cliques(G, cliques=self.cl) == 5
+        with pytest.deprecated_call():
+            assert nx.number_of_cliques(G, 1) == 1
+        with pytest.deprecated_call():
+            assert list(nx.number_of_cliques(G, [1]).values()) == [1]
+        with pytest.deprecated_call():
+            assert list(nx.number_of_cliques(G, [1, 2]).values()) == [1, 2]
+        with pytest.deprecated_call():
+            assert nx.number_of_cliques(G, [1, 2]) == {1: 1, 2: 2}
+        with pytest.deprecated_call():
+            assert nx.number_of_cliques(G, 2) == 2
+        with pytest.deprecated_call():
+            assert nx.number_of_cliques(G) == {
+                1: 1,
+                2: 2,
+                3: 1,
+                4: 2,
+                5: 1,
+                6: 2,
+                7: 1,
+                8: 1,
+                9: 1,
+                10: 1,
+                11: 1,
+            }
+        with pytest.deprecated_call():
+            assert nx.number_of_cliques(G, nodes=list(G)) == {
+                1: 1,
+                2: 2,
+                3: 1,
+                4: 2,
+                5: 1,
+                6: 2,
+                7: 1,
+                8: 1,
+                9: 1,
+                10: 1,
+                11: 1,
+            }
+        with pytest.deprecated_call():
+            assert nx.number_of_cliques(G, nodes=[2, 3, 4]) == {2: 2, 3: 1, 4: 2}
+        with pytest.deprecated_call():
+            assert nx.number_of_cliques(G, cliques=self.cl) == {
+                1: 1,
+                2: 2,
+                3: 1,
+                4: 2,
+                5: 1,
+                6: 2,
+                7: 1,
+                8: 1,
+                9: 1,
+                10: 1,
+                11: 1,
+            }
+        with pytest.deprecated_call():
+            assert nx.number_of_cliques(G, list(G), cliques=self.cl) == {
+                1: 1,
+                2: 2,
+                3: 1,
+                4: 2,
+                5: 1,
+                6: 2,
+                7: 1,
+                8: 1,
+                9: 1,
+                10: 1,
+                11: 1,
+            }
 
     def test_node_clique_number(self):
         G = self.G
@@ -182,23 +198,31 @@ class TestCliques:
 
     def test_cliques_containing_node(self):
         G = self.G
-        assert nx.cliques_containing_node(G, 1) == [[2, 6, 1, 3]]
-        assert list(nx.cliques_containing_node(G, [1]).values()) == [[[2, 6, 1, 3]]]
-        assert [
-            sorted(c) for c in list(nx.cliques_containing_node(G, [1, 2]).values())
-        ] == [[[2, 6, 1, 3]], [[2, 6, 1, 3], [2, 6, 4]]]
-        result = nx.cliques_containing_node(G, [1, 2])
+        with pytest.deprecated_call():
+            assert nx.cliques_containing_node(G, 1) == [[2, 6, 1, 3]]
+        with pytest.deprecated_call():
+            assert list(nx.cliques_containing_node(G, [1]).values()) == [[[2, 6, 1, 3]]]
+        with pytest.deprecated_call():
+            assert [
+                sorted(c) for c in list(nx.cliques_containing_node(G, [1, 2]).values())
+            ] == [[[2, 6, 1, 3]], [[2, 6, 1, 3], [2, 6, 4]]]
+        with pytest.deprecated_call():
+            result = nx.cliques_containing_node(G, [1, 2])
         for k, v in result.items():
             result[k] = sorted(v)
         assert result == {1: [[2, 6, 1, 3]], 2: [[2, 6, 1, 3], [2, 6, 4]]}
-        assert nx.cliques_containing_node(G, 1) == [[2, 6, 1, 3]]
+        with pytest.deprecated_call():
+            assert nx.cliques_containing_node(G, 1) == [[2, 6, 1, 3]]
         expected = [{2, 6, 1, 3}, {2, 6, 4}]
-        answer = [set(c) for c in nx.cliques_containing_node(G, 2)]
+        with pytest.deprecated_call():
+            answer = [set(c) for c in nx.cliques_containing_node(G, 2)]
         assert answer in (expected, list(reversed(expected)))
 
-        answer = [set(c) for c in nx.cliques_containing_node(G, 2, cliques=self.cl)]
+        with pytest.deprecated_call():
+            answer = [set(c) for c in nx.cliques_containing_node(G, 2, cliques=self.cl)]
         assert answer in (expected, list(reversed(expected)))
-        assert len(nx.cliques_containing_node(G)) == 11
+        with pytest.deprecated_call():
+            assert len(nx.cliques_containing_node(G)) == 11
 
     def test_make_clique_bipartite(self):
         G = self.G
@@ -233,6 +257,17 @@ class TestCliques:
         with pytest.raises(nx.NetworkXNotImplemented):
             next(nx.find_cliques(nx.DiGraph()))
 
+    def test_find_cliques_trivial(self):
+        G = nx.Graph()
+        assert sorted(nx.find_cliques(G)) == []
+        assert sorted(nx.find_cliques_recursive(G)) == []
+
+    def test_make_max_clique_graph_create_using(self):
+        G = nx.Graph([(1, 2), (3, 1), (4, 1), (5, 6)])
+        E = nx.Graph([(0, 1), (0, 2), (1, 2)])
+        E.add_node(3)
+        assert nx.is_isomorphic(nx.make_max_clique_graph(G, create_using=nx.Graph), E)
+
 
 class TestEnumerateAllCliques:
     def test_paper_figure_4(self):
diff --git a/networkx/algorithms/tests/test_communicability.py b/networkx/algorithms/tests/test_communicability.py
index bf21988..0f44709 100644
--- a/networkx/algorithms/tests/test_communicability.py
+++ b/networkx/algorithms/tests/test_communicability.py
@@ -26,7 +26,6 @@ class TestCommunicability:
                 assert answer[k1][k2] == pytest.approx(result[k1][k2], abs=1e-7)
 
     def test_communicability2(self):
-
         answer_orig = {
             ("1", "1"): 1.6445956054135658,
             ("1", "Albert"): 0.7430186221096251,
diff --git a/networkx/algorithms/tests/test_core.py b/networkx/algorithms/tests/test_core.py
index db2d277..535af31 100644
--- a/networkx/algorithms/tests/test_core.py
+++ b/networkx/algorithms/tests/test_core.py
@@ -1,3 +1,5 @@
+import pytest
+
 import networkx as nx
 from networkx.utils import nodes_equal
 
@@ -67,6 +69,12 @@ class TestCore:
         assert nodes_equal(nodes_by_core[1], [1, 3])
         assert nodes_equal(nodes_by_core[2], [2, 4, 5, 6])
 
+    def test_core_number_self_loop(self):
+        G = nx.cycle_graph(3)
+        G.add_edge(0, 0)
+        with pytest.raises(nx.NetworkXError, match="Input graph has self loops"):
+            nx.core_number(G)
+
     def test_directed_core_number(self):
         """core number had a bug for directed graphs found in issue #1959"""
         # small example where too timid edge removal can make cn[2] = 3
@@ -169,3 +177,9 @@ class TestCore:
         assert nodes_equal(nodes_by_layer[3], [9, 11])
         assert nodes_equal(nodes_by_layer[4], [1, 2, 4, 5, 6, 8])
         assert nodes_equal(nodes_by_layer[5], [3, 7])
+
+    def test_onion_self_loop(self):
+        G = nx.cycle_graph(3)
+        G.add_edge(0, 0)
+        with pytest.raises(nx.NetworkXError, match="Input graph contains self loops"):
+            nx.onion_layers(G)
diff --git a/networkx/algorithms/tests/test_cycles.py b/networkx/algorithms/tests/test_cycles.py
index db62b28..3c4108f 100644
--- a/networkx/algorithms/tests/test_cycles.py
+++ b/networkx/algorithms/tests/test_cycles.py
@@ -1,3 +1,6 @@
+from itertools import chain, islice, tee
+from random import shuffle
+
 import pytest
 
 import networkx
@@ -68,16 +71,13 @@ class TestCycles:
         for c in cc:
             assert any(self.is_cyclic_permutation(c, rc) for rc in ca)
 
-    def test_simple_cycles_graph(self):
-        with pytest.raises(nx.NetworkXNotImplemented):
-            G = nx.Graph()
-            c = sorted(nx.simple_cycles(G))
-
     def test_unsortable(self):
-        #  TODO What does this test do?  das 6/2013
+        # this test ensures that graphs whose nodes without an intrinsic
+        # ordering do not cause issues
         G = nx.DiGraph()
         nx.add_cycle(G, ["a", 1])
         c = list(nx.simple_cycles(G))
+        assert len(c) == 1
 
     def test_simple_cycles_small(self):
         G = nx.DiGraph()
@@ -96,13 +96,6 @@ class TestCycles:
         G = nx.DiGraph()
         assert list(nx.simple_cycles(G)) == []
 
-    def test_complete_directed_graph(self):
-        # see table 2 in Johnson's paper
-        ncircuits = [1, 5, 20, 84, 409, 2365, 16064]
-        for n, c in zip(range(2, 9), ncircuits):
-            G = nx.DiGraph(nx.complete_graph(n))
-            assert len(list(nx.simple_cycles(G))) == c
-
     def worst_case_graph(self, k):
         # see figure 1 in Johnson's paper
         # this graph has exactly 3k simple cycles
@@ -169,6 +162,522 @@ class TestCycles:
             assert any(self.is_cyclic_permutation(rc, c) for c in cc)
 
 
+def pairwise(iterable):
+    a, b = tee(iterable)
+    next(b, None)
+    return zip(a, b)
+
+
+def cycle_edges(c):
+    return pairwise(chain(c, islice(c, 1)))
+
+
+def directed_cycle_edgeset(c):
+    return frozenset(cycle_edges(c))
+
+
+def undirected_cycle_edgeset(c):
+    if len(c) == 1:
+        return frozenset(cycle_edges(c))
+    return frozenset(map(frozenset, cycle_edges(c)))
+
+
+def multigraph_cycle_edgeset(c):
+    if len(c) <= 2:
+        return frozenset(cycle_edges(c))
+    else:
+        return frozenset(map(frozenset, cycle_edges(c)))
+
+
+class TestCycleEnumeration:
+    @staticmethod
+    def K(n):
+        return nx.complete_graph(n)
+
+    @staticmethod
+    def D(n):
+        return nx.complete_graph(n).to_directed()
+
+    @staticmethod
+    def edgeset_function(g):
+        if g.is_directed():
+            return directed_cycle_edgeset
+        elif g.is_multigraph():
+            return multigraph_cycle_edgeset
+        else:
+            return undirected_cycle_edgeset
+
+    def check_cycle(self, g, c, es, cache, source, original_c, length_bound, chordless):
+        if length_bound is not None and len(c) > length_bound:
+            raise RuntimeError(
+                f"computed cycle {original_c} exceeds length bound {length_bound}"
+            )
+        if source == "computed":
+            if es in cache:
+                raise RuntimeError(
+                    f"computed cycle {original_c} has already been found!"
+                )
+            else:
+                cache[es] = tuple(original_c)
+        else:
+            if es in cache:
+                cache.pop(es)
+            else:
+                raise RuntimeError(f"expected cycle {original_c} was not computed")
+
+        if not all(g.has_edge(*e) for e in es):
+            raise RuntimeError(
+                f"{source} claimed cycle {original_c} is not a cycle of g"
+            )
+        if chordless and len(g.subgraph(c).edges) > len(c):
+            raise RuntimeError(f"{source} cycle {original_c} is not chordless")
+
+    def check_cycle_algorithm(
+        self,
+        g,
+        expected_cycles,
+        length_bound=None,
+        chordless=False,
+        algorithm=None,
+    ):
+        if algorithm is None:
+            algorithm = nx.chordless_cycles if chordless else nx.simple_cycles
+
+        # note: we shuffle the labels of g to rule out accidentally-correct
+        # behavior which occurred during the development of chordless cycle
+        # enumeration algorithms
+
+        relabel = list(range(len(g)))
+        shuffle(relabel)
+        label = dict(zip(g, relabel))
+        unlabel = dict(zip(relabel, g))
+        h = nx.relabel_nodes(g, label, copy=True)
+
+        edgeset = self.edgeset_function(h)
+
+        params = {}
+        if length_bound is not None:
+            params["length_bound"] = length_bound
+
+        cycle_cache = {}
+        for c in algorithm(h, **params):
+            original_c = [unlabel[x] for x in c]
+            es = edgeset(c)
+            self.check_cycle(
+                h, c, es, cycle_cache, "computed", original_c, length_bound, chordless
+            )
+
+        if isinstance(expected_cycles, int):
+            if len(cycle_cache) != expected_cycles:
+                raise RuntimeError(
+                    f"expected {expected_cycles} cycles, got {len(cycle_cache)}"
+                )
+            return
+        for original_c in expected_cycles:
+            c = [label[x] for x in original_c]
+            es = edgeset(c)
+            self.check_cycle(
+                h, c, es, cycle_cache, "expected", original_c, length_bound, chordless
+            )
+
+        if len(cycle_cache):
+            for c in cycle_cache.values():
+                raise RuntimeError(
+                    f"computed cycle {c} is valid but not in the expected cycle set!"
+                )
+
+    def check_cycle_enumeration_integer_sequence(
+        self,
+        g_family,
+        cycle_counts,
+        length_bound=None,
+        chordless=False,
+        algorithm=None,
+    ):
+        for g, num_cycles in zip(g_family, cycle_counts):
+            self.check_cycle_algorithm(
+                g,
+                num_cycles,
+                length_bound=length_bound,
+                chordless=chordless,
+                algorithm=algorithm,
+            )
+
+    def test_directed_chordless_cycle_digons(self):
+        g = nx.DiGraph()
+        nx.add_cycle(g, range(5))
+        nx.add_cycle(g, range(5)[::-1])
+        g.add_edge(0, 0)
+        expected_cycles = [(0,), (1, 2), (2, 3), (3, 4)]
+        self.check_cycle_algorithm(g, expected_cycles, chordless=True)
+
+        self.check_cycle_algorithm(g, expected_cycles, chordless=True, length_bound=2)
+
+        expected_cycles = [c for c in expected_cycles if len(c) < 2]
+        self.check_cycle_algorithm(g, expected_cycles, chordless=True, length_bound=1)
+
+    def test_directed_chordless_cycle_undirected(self):
+        g = nx.DiGraph([(1, 2), (2, 3), (3, 4), (4, 5), (5, 0), (5, 1), (0, 2)])
+        expected_cycles = [(0, 2, 3, 4, 5), (1, 2, 3, 4, 5)]
+        self.check_cycle_algorithm(g, expected_cycles, chordless=True)
+
+        g = nx.DiGraph()
+        nx.add_cycle(g, range(5))
+        nx.add_cycle(g, range(4, 9))
+        g.add_edge(7, 3)
+        expected_cycles = [(0, 1, 2, 3, 4), (3, 4, 5, 6, 7), (4, 5, 6, 7, 8)]
+        self.check_cycle_algorithm(g, expected_cycles, chordless=True)
+
+        g.add_edge(3, 7)
+        expected_cycles = [(0, 1, 2, 3, 4), (3, 7), (4, 5, 6, 7, 8)]
+        self.check_cycle_algorithm(g, expected_cycles, chordless=True)
+
+        expected_cycles = [(3, 7)]
+        self.check_cycle_algorithm(g, expected_cycles, chordless=True, length_bound=4)
+
+        g.remove_edge(7, 3)
+        expected_cycles = [(0, 1, 2, 3, 4), (4, 5, 6, 7, 8)]
+        self.check_cycle_algorithm(g, expected_cycles, chordless=True)
+
+        g = nx.DiGraph((i, j) for i in range(10) for j in range(i))
+        expected_cycles = []
+        self.check_cycle_algorithm(g, expected_cycles, chordless=True)
+
+    def test_chordless_cycles_directed(self):
+        G = nx.DiGraph()
+        nx.add_cycle(G, range(5))
+        nx.add_cycle(G, range(4, 12))
+        expected = [[*range(5)], [*range(4, 12)]]
+        self.check_cycle_algorithm(G, expected, chordless=True)
+        self.check_cycle_algorithm(
+            G, [c for c in expected if len(c) <= 5], length_bound=5, chordless=True
+        )
+
+        G.add_edge(7, 3)
+        expected.append([*range(3, 8)])
+        self.check_cycle_algorithm(G, expected, chordless=True)
+        self.check_cycle_algorithm(
+            G, [c for c in expected if len(c) <= 5], length_bound=5, chordless=True
+        )
+
+        G.add_edge(3, 7)
+        expected[-1] = [7, 3]
+        self.check_cycle_algorithm(G, expected, chordless=True)
+        self.check_cycle_algorithm(
+            G, [c for c in expected if len(c) <= 5], length_bound=5, chordless=True
+        )
+
+        expected.pop()
+        G.remove_edge(7, 3)
+        self.check_cycle_algorithm(G, expected, chordless=True)
+        self.check_cycle_algorithm(
+            G, [c for c in expected if len(c) <= 5], length_bound=5, chordless=True
+        )
+
+    def test_directed_chordless_cycle_diclique(self):
+        g_family = [self.D(n) for n in range(10)]
+        expected_cycles = [(n * n - n) // 2 for n in range(10)]
+        self.check_cycle_enumeration_integer_sequence(
+            g_family, expected_cycles, chordless=True
+        )
+
+        expected_cycles = [(n * n - n) // 2 for n in range(10)]
+        self.check_cycle_enumeration_integer_sequence(
+            g_family, expected_cycles, length_bound=2
+        )
+
+    def test_directed_chordless_loop_blockade(self):
+        g = nx.DiGraph((i, i) for i in range(10))
+        nx.add_cycle(g, range(10))
+        expected_cycles = [(i,) for i in range(10)]
+        self.check_cycle_algorithm(g, expected_cycles, chordless=True)
+
+        self.check_cycle_algorithm(g, expected_cycles, length_bound=1)
+
+        g = nx.MultiDiGraph(g)
+        g.add_edges_from((i, i) for i in range(0, 10, 2))
+        expected_cycles = [(i,) for i in range(1, 10, 2)]
+        self.check_cycle_algorithm(g, expected_cycles, chordless=True)
+
+    def test_simple_cycles_notable_clique_sequences(self):
+        # A000292: Number of labeled graphs on n+3 nodes that are triangles.
+        g_family = [self.K(n) for n in range(2, 12)]
+        expected = [0, 1, 4, 10, 20, 35, 56, 84, 120, 165, 220]
+        self.check_cycle_enumeration_integer_sequence(
+            g_family, expected, length_bound=3
+        )
+
+        def triangles(g, **kwargs):
+            yield from (c for c in nx.simple_cycles(g, **kwargs) if len(c) == 3)
+
+        # directed complete graphs have twice as many triangles thanks to reversal
+        g_family = [self.D(n) for n in range(2, 12)]
+        expected = [2 * e for e in expected]
+        self.check_cycle_enumeration_integer_sequence(
+            g_family, expected, length_bound=3, algorithm=triangles
+        )
+
+        def four_cycles(g, **kwargs):
+            yield from (c for c in nx.simple_cycles(g, **kwargs) if len(c) == 4)
+
+        # A050534: the number of 4-cycles in the complete graph K_{n+1}
+        expected = [0, 0, 0, 3, 15, 45, 105, 210, 378, 630, 990]
+        g_family = [self.K(n) for n in range(1, 12)]
+        self.check_cycle_enumeration_integer_sequence(
+            g_family, expected, length_bound=4, algorithm=four_cycles
+        )
+
+        # directed complete graphs have twice as many 4-cycles thanks to reversal
+        expected = [2 * e for e in expected]
+        g_family = [self.D(n) for n in range(1, 15)]
+        self.check_cycle_enumeration_integer_sequence(
+            g_family, expected, length_bound=4, algorithm=four_cycles
+        )
+
+        # A006231: the number of elementary circuits in a complete directed graph with n nodes
+        expected = [0, 1, 5, 20, 84, 409, 2365]
+        g_family = [self.D(n) for n in range(1, 8)]
+        self.check_cycle_enumeration_integer_sequence(g_family, expected)
+
+        # A002807: Number of cycles in the complete graph on n nodes K_{n}.
+        expected = [0, 0, 0, 1, 7, 37, 197, 1172]
+        g_family = [self.K(n) for n in range(8)]
+        self.check_cycle_enumeration_integer_sequence(g_family, expected)
+
+    def test_directed_chordless_cycle_parallel_multiedges(self):
+        g = nx.MultiGraph()
+
+        nx.add_cycle(g, range(5))
+        expected = [[*range(5)]]
+        self.check_cycle_algorithm(g, expected, chordless=True)
+
+        nx.add_cycle(g, range(5))
+        expected = [*cycle_edges(range(5))]
+        self.check_cycle_algorithm(g, expected, chordless=True)
+
+        nx.add_cycle(g, range(5))
+        expected = []
+        self.check_cycle_algorithm(g, expected, chordless=True)
+
+        g = nx.MultiDiGraph()
+
+        nx.add_cycle(g, range(5))
+        expected = [[*range(5)]]
+        self.check_cycle_algorithm(g, expected, chordless=True)
+
+        nx.add_cycle(g, range(5))
+        self.check_cycle_algorithm(g, [], chordless=True)
+
+        nx.add_cycle(g, range(5))
+        self.check_cycle_algorithm(g, [], chordless=True)
+
+        g = nx.MultiDiGraph()
+
+        nx.add_cycle(g, range(5))
+        nx.add_cycle(g, range(5)[::-1])
+        expected = [*cycle_edges(range(5))]
+        self.check_cycle_algorithm(g, expected, chordless=True)
+
+        nx.add_cycle(g, range(5))
+        self.check_cycle_algorithm(g, [], chordless=True)
+
+    def test_chordless_cycles_graph(self):
+        G = nx.Graph()
+        nx.add_cycle(G, range(5))
+        nx.add_cycle(G, range(4, 12))
+        expected = [[*range(5)], [*range(4, 12)]]
+        self.check_cycle_algorithm(G, expected, chordless=True)
+        self.check_cycle_algorithm(
+            G, [c for c in expected if len(c) <= 5], length_bound=5, chordless=True
+        )
+
+        G.add_edge(7, 3)
+        expected.append([*range(3, 8)])
+        expected.append([4, 3, 7, 8, 9, 10, 11])
+        self.check_cycle_algorithm(G, expected, chordless=True)
+        self.check_cycle_algorithm(
+            G, [c for c in expected if len(c) <= 5], length_bound=5, chordless=True
+        )
+
+    def test_chordless_cycles_giant_hamiltonian(self):
+        # ... o - e - o - e - o ... # o = odd, e = even
+        # ... ---/ \-----/ \--- ... # <-- "long" edges
+        #
+        # each long edge belongs to exactly one triangle, and one giant cycle
+        # of length n/2.  The remaining edges each belong to a triangle
+
+        n = 1000
+        assert n % 2 == 0
+        G = nx.Graph()
+        for v in range(n):
+            if not v % 2:
+                G.add_edge(v, (v + 2) % n)
+            G.add_edge(v, (v + 1) % n)
+
+        expected = [[*range(0, n, 2)]] + [
+            [x % n for x in range(i, i + 3)] for i in range(0, n, 2)
+        ]
+        self.check_cycle_algorithm(G, expected, chordless=True)
+        self.check_cycle_algorithm(
+            G, [c for c in expected if len(c) <= 3], length_bound=3, chordless=True
+        )
+
+        # ... o -> e -> o -> e -> o ... # o = odd, e = even
+        # ... <---/ \---<---/ \---< ... # <-- "long" edges
+        #
+        # this time, we orient the short and long edges in opposition
+        # the cycle structure of this graph is the same, but we need to reverse
+        # the long one in our representation.  Also, we need to drop the size
+        # because our partitioning algorithm uses strongly connected components
+        # instead of separating graphs by their strong articulation points
+
+        n = 100
+        assert n % 2 == 0
+        G = nx.DiGraph()
+        for v in range(n):
+            G.add_edge(v, (v + 1) % n)
+            if not v % 2:
+                G.add_edge((v + 2) % n, v)
+
+        expected = [[*range(n - 2, -2, -2)]] + [
+            [x % n for x in range(i, i + 3)] for i in range(0, n, 2)
+        ]
+        self.check_cycle_algorithm(G, expected, chordless=True)
+        self.check_cycle_algorithm(
+            G, [c for c in expected if len(c) <= 3], length_bound=3, chordless=True
+        )
+
+    def test_simple_cycles_acyclic_tournament(self):
+        n = 10
+        G = nx.DiGraph((x, y) for x in range(n) for y in range(x))
+        self.check_cycle_algorithm(G, [])
+        self.check_cycle_algorithm(G, [], chordless=True)
+
+        for k in range(n + 1):
+            self.check_cycle_algorithm(G, [], length_bound=k)
+            self.check_cycle_algorithm(G, [], length_bound=k, chordless=True)
+
+    def test_simple_cycles_graph(self):
+        testG = nx.cycle_graph(8)
+        cyc1 = tuple(range(8))
+        self.check_cycle_algorithm(testG, [cyc1])
+
+        testG.add_edge(4, -1)
+        nx.add_path(testG, [3, -2, -3, -4])
+        self.check_cycle_algorithm(testG, [cyc1])
+
+        testG.update(nx.cycle_graph(range(8, 16)))
+        cyc2 = tuple(range(8, 16))
+        self.check_cycle_algorithm(testG, [cyc1, cyc2])
+
+        testG.update(nx.cycle_graph(range(4, 12)))
+        cyc3 = tuple(range(4, 12))
+        expected = {
+            (0, 1, 2, 3, 4, 5, 6, 7),  # cyc1
+            (8, 9, 10, 11, 12, 13, 14, 15),  # cyc2
+            (4, 5, 6, 7, 8, 9, 10, 11),  # cyc3
+            (4, 5, 6, 7, 8, 15, 14, 13, 12, 11),  # cyc2 + cyc3
+            (0, 1, 2, 3, 4, 11, 10, 9, 8, 7),  # cyc1 + cyc3
+            (0, 1, 2, 3, 4, 11, 12, 13, 14, 15, 8, 7),  # cyc1 + cyc2 + cyc3
+        }
+        self.check_cycle_algorithm(testG, expected)
+        assert len(expected) == (2**3 - 1) - 1  # 1 disjoint comb: cyc1 + cyc2
+
+        # Basis size = 5 (2 loops overlapping gives 5 small loops
+        #        E
+        #       / \         Note: A-F = 10-15
+        #    1-2-3-4-5
+        #    / |   |  \   cyc1=012DAB -- left
+        #   0  D   F  6   cyc2=234E   -- top
+        #   \  |   |  /   cyc3=45678F -- right
+        #    B-A-9-8-7    cyc4=89AC   -- bottom
+        #       \ /       cyc5=234F89AD -- middle
+        #        C
+        #
+        # combinations of 5 basis elements: 2^5 - 1  (one includes no cycles)
+        #
+        # disjoint combs: (11 total) not simple cycles
+        #   Any pair not including cyc5 => choose(4, 2) = 6
+        #   Any triple not including cyc5 => choose(4, 3) = 4
+        #   Any quad not including cyc5 => choose(4, 4) = 1
+        #
+        # we expect 31 - 11 = 20 simple cycles
+        #
+        testG = nx.cycle_graph(12)
+        testG.update(nx.cycle_graph([12, 10, 13, 2, 14, 4, 15, 8]).edges)
+        expected = (2**5 - 1) - 11  # 11 disjoint combinations
+        self.check_cycle_algorithm(testG, expected)
+
+    def test_simple_cycles_bounded(self):
+        # iteratively construct a cluster of nested cycles running in the same direction
+        # there should be one cycle of every length
+        d = nx.DiGraph()
+        expected = []
+        for n in range(10):
+            nx.add_cycle(d, range(n))
+            expected.append(n)
+            for k, e in enumerate(expected):
+                self.check_cycle_algorithm(d, e, length_bound=k)
+
+        # iteratively construct a path of undirected cycles, connected at articulation
+        # points.  there should be one cycle of every length except 2: no digons
+        g = nx.Graph()
+        top = 0
+        expected = []
+        for n in range(10):
+            expected.append(n if n < 2 else n - 1)
+            if n == 2:
+                # no digons in undirected graphs
+                continue
+            nx.add_cycle(g, range(top, top + n))
+            top += n
+            for k, e in enumerate(expected):
+                self.check_cycle_algorithm(g, e, length_bound=k)
+
+    def test_simple_cycles_bound_corner_cases(self):
+        G = nx.cycle_graph(4)
+        DG = nx.cycle_graph(4, create_using=nx.DiGraph)
+        assert list(nx.simple_cycles(G, length_bound=0)) == []
+        assert list(nx.simple_cycles(DG, length_bound=0)) == []
+        assert list(nx.chordless_cycles(G, length_bound=0)) == []
+        assert list(nx.chordless_cycles(DG, length_bound=0)) == []
+
+    def test_simple_cycles_bound_error(self):
+        with pytest.raises(ValueError):
+            G = nx.DiGraph()
+            for c in nx.simple_cycles(G, -1):
+                assert False
+
+        with pytest.raises(ValueError):
+            G = nx.Graph()
+            for c in nx.simple_cycles(G, -1):
+                assert False
+
+        with pytest.raises(ValueError):
+            G = nx.Graph()
+            for c in nx.chordless_cycles(G, -1):
+                assert False
+
+        with pytest.raises(ValueError):
+            G = nx.DiGraph()
+            for c in nx.chordless_cycles(G, -1):
+                assert False
+
+    def test_chordless_cycles_clique(self):
+        g_family = [self.K(n) for n in range(2, 15)]
+        expected = [0, 1, 4, 10, 20, 35, 56, 84, 120, 165, 220, 286, 364]
+        self.check_cycle_enumeration_integer_sequence(
+            g_family, expected, chordless=True
+        )
+
+        # directed cliques have as many digons as undirected graphs have edges
+        expected = [(n * n - n) // 2 for n in range(15)]
+        g_family = [self.D(n) for n in range(15)]
+        self.check_cycle_enumeration_integer_sequence(
+            g_family, expected, chordless=True
+        )
+
+
 # These tests might fail with hash randomization since they depend on
 # edge_dfs. For more information, see the comments in:
 #    networkx/algorithms/traversal/tests/test_edgedfs.py
@@ -349,7 +858,7 @@ class TestMinimumCycles:
     def test_complete_graph(self):
         cg = nx.complete_graph(5)
         mcb = minimum_cycle_basis(cg)
-        assert all([len(cycle) == 3 for cycle in mcb])
+        assert all(len(cycle) == 3 for cycle in mcb)
 
     def test_tree_graph(self):
         tg = nx.balanced_tree(3, 3)
diff --git a/networkx/algorithms/tests/test_d_separation.py b/networkx/algorithms/tests/test_d_separation.py
index 23367a0..74c16ae 100644
--- a/networkx/algorithms/tests/test_d_separation.py
+++ b/networkx/algorithms/tests/test_d_separation.py
@@ -132,11 +132,16 @@ def test_undirected_graphs_are_not_supported():
     """
     Test that undirected graphs are not supported.
 
-    d-separation does not apply in the case of undirected graphs.
+    d-separation and its related algorithms do not apply in
+    the case of undirected graphs.
     """
+    g = nx.path_graph(3, nx.Graph)
     with pytest.raises(nx.NetworkXNotImplemented):
-        g = nx.path_graph(3, nx.Graph)
         nx.d_separated(g, {0}, {1}, {2})
+    with pytest.raises(nx.NetworkXNotImplemented):
+        nx.is_minimal_d_separator(g, {0}, {1}, {2})
+    with pytest.raises(nx.NetworkXNotImplemented):
+        nx.minimal_d_separator(g, {0}, {1})
 
 
 def test_cyclic_graphs_raise_error():
@@ -145,9 +150,13 @@ def test_cyclic_graphs_raise_error():
 
     This is because PGMs assume a directed acyclic graph.
     """
+    g = nx.cycle_graph(3, nx.DiGraph)
     with pytest.raises(nx.NetworkXError):
-        g = nx.cycle_graph(3, nx.DiGraph)
         nx.d_separated(g, {0}, {1}, {2})
+    with pytest.raises(nx.NetworkXError):
+        nx.minimal_d_separator(g, {0}, {1})
+    with pytest.raises(nx.NetworkXError):
+        nx.is_minimal_d_separator(g, {0}, {1}, {2})
 
 
 def test_invalid_nodes_raise_error(asia_graph):
@@ -156,3 +165,38 @@ def test_invalid_nodes_raise_error(asia_graph):
     """
     with pytest.raises(nx.NodeNotFound):
         nx.d_separated(asia_graph, {0}, {1}, {2})
+    with pytest.raises(nx.NodeNotFound):
+        nx.is_minimal_d_separator(asia_graph, 0, 1, {2})
+    with pytest.raises(nx.NodeNotFound):
+        nx.minimal_d_separator(asia_graph, 0, 1)
+
+
+def test_minimal_d_separator():
+    # Case 1:
+    # create a graph A -> B <- C
+    # B -> D -> E;
+    # B -> F;
+    # G -> E;
+    edge_list = [("A", "B"), ("C", "B"), ("B", "D"), ("D", "E"), ("B", "F"), ("G", "E")]
+    G = nx.DiGraph(edge_list)
+    assert not nx.d_separated(G, {"B"}, {"E"}, set())
+
+    # minimal set of the corresponding graph
+    # for B and E should be (D,)
+    Zmin = nx.minimal_d_separator(G, "B", "E")
+
+    # the minimal separating set should pass the test for minimality
+    assert nx.is_minimal_d_separator(G, "B", "E", Zmin)
+    assert Zmin == {"D"}
+
+    # Case 2:
+    # create a graph A -> B -> C
+    # B -> D -> C;
+    edge_list = [("A", "B"), ("B", "C"), ("B", "D"), ("D", "C")]
+    G = nx.DiGraph(edge_list)
+    assert not nx.d_separated(G, {"A"}, {"C"}, set())
+    Zmin = nx.minimal_d_separator(G, "A", "C")
+
+    # the minimal separating set should pass the test for minimality
+    assert nx.is_minimal_d_separator(G, "A", "C", Zmin)
+    assert Zmin == {"B"}
diff --git a/networkx/algorithms/tests/test_dag.py b/networkx/algorithms/tests/test_dag.py
index b39b033..7ad6a77 100644
--- a/networkx/algorithms/tests/test_dag.py
+++ b/networkx/algorithms/tests/test_dag.py
@@ -60,6 +60,31 @@ class TestDagLongestPath:
         # this will raise NotImplementedError when nodes need to be ordered
         nx.dag_longest_path(G)
 
+    def test_multigraph_unweighted(self):
+        edges = [(1, 2), (2, 3), (2, 3), (3, 4), (4, 5), (1, 3), (1, 5), (3, 5)]
+        G = nx.MultiDiGraph(edges)
+        assert nx.dag_longest_path(G) == [1, 2, 3, 4, 5]
+
+    def test_multigraph_weighted(self):
+        G = nx.MultiDiGraph()
+        edges = [
+            (1, 2, 2),
+            (2, 3, 2),
+            (1, 3, 1),
+            (1, 3, 5),
+            (1, 3, 2),
+        ]
+        G.add_weighted_edges_from(edges)
+        assert nx.dag_longest_path(G) == [1, 3]
+
+    def test_multigraph_weighted_default_weight(self):
+        G = nx.MultiDiGraph([(1, 2), (2, 3)])  # Unweighted edges
+        G.add_weighted_edges_from([(1, 3, 1), (1, 3, 5), (1, 3, 2)])
+
+        # Default value for default weight is 1
+        assert nx.dag_longest_path(G) == [1, 3]
+        assert nx.dag_longest_path(G, default_weight=3) == [1, 2, 3]
+
 
 class TestDagLongestPathLength:
     """Unit tests for computing the length of a longest path in a
@@ -91,6 +116,23 @@ class TestDagLongestPathLength:
         G.add_weighted_edges_from(edges)
         assert nx.dag_longest_path_length(G) == 5
 
+    def test_multigraph_unweighted(self):
+        edges = [(1, 2), (2, 3), (2, 3), (3, 4), (4, 5), (1, 3), (1, 5), (3, 5)]
+        G = nx.MultiDiGraph(edges)
+        assert nx.dag_longest_path_length(G) == 4
+
+    def test_multigraph_weighted(self):
+        G = nx.MultiDiGraph()
+        edges = [
+            (1, 2, 2),
+            (2, 3, 2),
+            (1, 3, 1),
+            (1, 3, 5),
+            (1, 3, 2),
+        ]
+        G.add_weighted_edges_from(edges)
+        assert nx.dag_longest_path_length(G) == 5
+
 
 class TestDAG:
     @classmethod
@@ -708,3 +750,22 @@ def test_ancestors_descendants_undirected():
     undirected graphs."""
     G = nx.path_graph(5)
     nx.ancestors(G, 2) == nx.descendants(G, 2) == {0, 1, 3, 4}
+
+
+def test_compute_v_structures_raise():
+    G = nx.Graph()
+    pytest.raises(nx.NetworkXNotImplemented, nx.compute_v_structures, G)
+
+
+def test_compute_v_structures():
+    edges = [(0, 1), (0, 2), (3, 2)]
+    G = nx.DiGraph(edges)
+
+    v_structs = set(nx.compute_v_structures(G))
+    assert len(v_structs) == 1
+    assert (0, 2, 3) in v_structs
+
+    edges = [("A", "B"), ("C", "B"), ("B", "D"), ("D", "E"), ("G", "E")]
+    G = nx.DiGraph(edges)
+    v_structs = set(nx.compute_v_structures(G))
+    assert len(v_structs) == 2
diff --git a/networkx/algorithms/tests/test_distance_measures.py b/networkx/algorithms/tests/test_distance_measures.py
index d7cec15..a5066f6 100644
--- a/networkx/algorithms/tests/test_distance_measures.py
+++ b/networkx/algorithms/tests/test_distance_measures.py
@@ -7,15 +7,6 @@ from networkx import convert_node_labels_to_integers as cnlti
 from networkx.algorithms.distance_measures import _extrema_bounding
 
 
-@pytest.mark.parametrize(
-    "compute", ("diameter", "radius", "periphery", "center", "eccentricities")
-)
-def test_extrema_bounding_deprecated(compute):
-    G = nx.complete_graph(3)
-    with pytest.deprecated_call():
-        nx.extrema_bounding(G, compute=compute)
-
-
 def test__extrema_bounding_invalid_compute_kwarg():
     G = nx.path_graph(3)
     with pytest.raises(ValueError, match="compute must be one of"):
@@ -106,6 +97,228 @@ class TestDistance:
             nx.eccentricity(DG)
 
 
+class TestWeightedDistance:
+    def setup_method(self):
+        G = nx.Graph()
+        G.add_edge(0, 1, weight=0.6, cost=0.6, high_cost=6)
+        G.add_edge(0, 2, weight=0.2, cost=0.2, high_cost=2)
+        G.add_edge(2, 3, weight=0.1, cost=0.1, high_cost=1)
+        G.add_edge(2, 4, weight=0.7, cost=0.7, high_cost=7)
+        G.add_edge(2, 5, weight=0.9, cost=0.9, high_cost=9)
+        G.add_edge(1, 5, weight=0.3, cost=0.3, high_cost=3)
+        self.G = G
+        self.weight_fn = lambda v, u, e: 2
+
+    def test_eccentricity_weight_None(self):
+        assert nx.eccentricity(self.G, 1, weight=None) == 3
+        e = nx.eccentricity(self.G, weight=None)
+        assert e[1] == 3
+
+        e = nx.eccentricity(self.G, v=1, weight=None)
+        assert e == 3
+
+        # This behavior changed in version 1.8 (ticket #739)
+        e = nx.eccentricity(self.G, v=[1, 1], weight=None)
+        assert e[1] == 3
+        e = nx.eccentricity(self.G, v=[1, 2], weight=None)
+        assert e[1] == 3
+
+    def test_eccentricity_weight_attr(self):
+        assert nx.eccentricity(self.G, 1, weight="weight") == 1.5
+        e = nx.eccentricity(self.G, weight="weight")
+        assert (
+            e
+            == nx.eccentricity(self.G, weight="cost")
+            != nx.eccentricity(self.G, weight="high_cost")
+        )
+        assert e[1] == 1.5
+
+        e = nx.eccentricity(self.G, v=1, weight="weight")
+        assert e == 1.5
+
+        # This behavior changed in version 1.8 (ticket #739)
+        e = nx.eccentricity(self.G, v=[1, 1], weight="weight")
+        assert e[1] == 1.5
+        e = nx.eccentricity(self.G, v=[1, 2], weight="weight")
+        assert e[1] == 1.5
+
+    def test_eccentricity_weight_fn(self):
+        assert nx.eccentricity(self.G, 1, weight=self.weight_fn) == 6
+        e = nx.eccentricity(self.G, weight=self.weight_fn)
+        assert e[1] == 6
+
+        e = nx.eccentricity(self.G, v=1, weight=self.weight_fn)
+        assert e == 6
+
+        # This behavior changed in version 1.8 (ticket #739)
+        e = nx.eccentricity(self.G, v=[1, 1], weight=self.weight_fn)
+        assert e[1] == 6
+        e = nx.eccentricity(self.G, v=[1, 2], weight=self.weight_fn)
+        assert e[1] == 6
+
+    def test_diameter_weight_None(self):
+        assert nx.diameter(self.G, weight=None) == 3
+
+    def test_diameter_weight_attr(self):
+        assert (
+            nx.diameter(self.G, weight="weight")
+            == nx.diameter(self.G, weight="cost")
+            == 1.6
+            != nx.diameter(self.G, weight="high_cost")
+        )
+
+    def test_diameter_weight_fn(self):
+        assert nx.diameter(self.G, weight=self.weight_fn) == 6
+
+    def test_radius_weight_None(self):
+        assert pytest.approx(nx.radius(self.G, weight=None)) == 2
+
+    def test_radius_weight_attr(self):
+        assert (
+            pytest.approx(nx.radius(self.G, weight="weight"))
+            == pytest.approx(nx.radius(self.G, weight="cost"))
+            == 0.9
+            != nx.radius(self.G, weight="high_cost")
+        )
+
+    def test_radius_weight_fn(self):
+        assert nx.radius(self.G, weight=self.weight_fn) == 4
+
+    def test_periphery_weight_None(self):
+        for v in set(nx.periphery(self.G, weight=None)):
+            assert nx.eccentricity(self.G, v, weight=None) == nx.diameter(
+                self.G, weight=None
+            )
+
+    def test_periphery_weight_attr(self):
+        periphery = set(nx.periphery(self.G, weight="weight"))
+        assert (
+            periphery
+            == set(nx.periphery(self.G, weight="cost"))
+            == set(nx.periphery(self.G, weight="high_cost"))
+        )
+        for v in periphery:
+            assert (
+                nx.eccentricity(self.G, v, weight="high_cost")
+                != nx.eccentricity(self.G, v, weight="weight")
+                == nx.eccentricity(self.G, v, weight="cost")
+                == nx.diameter(self.G, weight="weight")
+                == nx.diameter(self.G, weight="cost")
+                != nx.diameter(self.G, weight="high_cost")
+            )
+            assert nx.eccentricity(self.G, v, weight="high_cost") == nx.diameter(
+                self.G, weight="high_cost"
+            )
+
+    def test_periphery_weight_fn(self):
+        for v in set(nx.periphery(self.G, weight=self.weight_fn)):
+            assert nx.eccentricity(self.G, v, weight=self.weight_fn) == nx.diameter(
+                self.G, weight=self.weight_fn
+            )
+
+    def test_center_weight_None(self):
+        for v in set(nx.center(self.G, weight=None)):
+            assert pytest.approx(nx.eccentricity(self.G, v, weight=None)) == nx.radius(
+                self.G, weight=None
+            )
+
+    def test_center_weight_attr(self):
+        center = set(nx.center(self.G, weight="weight"))
+        assert (
+            center
+            == set(nx.center(self.G, weight="cost"))
+            != set(nx.center(self.G, weight="high_cost"))
+        )
+        for v in center:
+            assert (
+                nx.eccentricity(self.G, v, weight="high_cost")
+                != pytest.approx(nx.eccentricity(self.G, v, weight="weight"))
+                == pytest.approx(nx.eccentricity(self.G, v, weight="cost"))
+                == nx.radius(self.G, weight="weight")
+                == nx.radius(self.G, weight="cost")
+                != nx.radius(self.G, weight="high_cost")
+            )
+            assert nx.eccentricity(self.G, v, weight="high_cost") == nx.radius(
+                self.G, weight="high_cost"
+            )
+
+    def test_center_weight_fn(self):
+        for v in set(nx.center(self.G, weight=self.weight_fn)):
+            assert nx.eccentricity(self.G, v, weight=self.weight_fn) == nx.radius(
+                self.G, weight=self.weight_fn
+            )
+
+    def test_bound_diameter_weight_None(self):
+        assert nx.diameter(self.G, usebounds=True, weight=None) == 3
+
+    def test_bound_diameter_weight_attr(self):
+        assert (
+            nx.diameter(self.G, usebounds=True, weight="high_cost")
+            != nx.diameter(self.G, usebounds=True, weight="weight")
+            == nx.diameter(self.G, usebounds=True, weight="cost")
+            == 1.6
+            != nx.diameter(self.G, usebounds=True, weight="high_cost")
+        )
+        assert nx.diameter(self.G, usebounds=True, weight="high_cost") == nx.diameter(
+            self.G, usebounds=True, weight="high_cost"
+        )
+
+    def test_bound_diameter_weight_fn(self):
+        assert nx.diameter(self.G, usebounds=True, weight=self.weight_fn) == 6
+
+    def test_bound_radius_weight_None(self):
+        assert pytest.approx(nx.radius(self.G, usebounds=True, weight=None)) == 2
+
+    def test_bound_radius_weight_attr(self):
+        assert (
+            nx.radius(self.G, usebounds=True, weight="high_cost")
+            != pytest.approx(nx.radius(self.G, usebounds=True, weight="weight"))
+            == pytest.approx(nx.radius(self.G, usebounds=True, weight="cost"))
+            == 0.9
+            != nx.radius(self.G, usebounds=True, weight="high_cost")
+        )
+        assert nx.radius(self.G, usebounds=True, weight="high_cost") == nx.radius(
+            self.G, usebounds=True, weight="high_cost"
+        )
+
+    def test_bound_radius_weight_fn(self):
+        assert nx.radius(self.G, usebounds=True, weight=self.weight_fn) == 4
+
+    def test_bound_periphery_weight_None(self):
+        result = {1, 3, 4}
+        assert set(nx.periphery(self.G, usebounds=True, weight=None)) == result
+
+    def test_bound_periphery_weight_attr(self):
+        result = {4, 5}
+        assert (
+            set(nx.periphery(self.G, usebounds=True, weight="weight"))
+            == set(nx.periphery(self.G, usebounds=True, weight="cost"))
+            == result
+        )
+
+    def test_bound_periphery_weight_fn(self):
+        result = {1, 3, 4}
+        assert (
+            set(nx.periphery(self.G, usebounds=True, weight=self.weight_fn)) == result
+        )
+
+    def test_bound_center_weight_None(self):
+        result = {0, 2, 5}
+        assert set(nx.center(self.G, usebounds=True, weight=None)) == result
+
+    def test_bound_center_weight_attr(self):
+        result = {0}
+        assert (
+            set(nx.center(self.G, usebounds=True, weight="weight"))
+            == set(nx.center(self.G, usebounds=True, weight="cost"))
+            == result
+        )
+
+    def test_bound_center_weight_fn(self):
+        result = {0, 2, 5}
+        assert set(nx.center(self.G, usebounds=True, weight=self.weight_fn)) == result
+
+
 class TestResistanceDistance:
     @classmethod
     def setup_class(cls):
diff --git a/networkx/algorithms/tests/test_dominance.py b/networkx/algorithms/tests/test_dominance.py
index a8d0882..f026e4b 100644
--- a/networkx/algorithms/tests/test_dominance.py
+++ b/networkx/algorithms/tests/test_dominance.py
@@ -119,7 +119,7 @@ class TestDominanceFrontiers:
         # Software Practice & Experience, 4:110, 2001.
         edges = [(1, 2), (2, 1), (3, 2), (4, 1), (5, 3), (5, 4)]
         G = nx.DiGraph(edges)
-        assert {u: df for u, df in nx.dominance_frontiers(G, 5).items()} == {
+        assert dict(nx.dominance_frontiers(G, 5).items()) == {
             1: {2},
             2: {1},
             3: {2},
diff --git a/networkx/algorithms/tests/test_euler.py b/networkx/algorithms/tests/test_euler.py
index 7dfe2d1..cba66ee 100644
--- a/networkx/algorithms/tests/test_euler.py
+++ b/networkx/algorithms/tests/test_euler.py
@@ -273,3 +273,23 @@ class TestEulerize:
         G = nx.complete_graph(4)
         assert nx.is_eulerian(nx.eulerize(G))
         assert nx.is_eulerian(nx.eulerize(nx.MultiGraph(G)))
+
+    def test_on_non_eulerian_graph(self):
+        G = nx.cycle_graph(18)
+        G.add_edge(0, 18)
+        G.add_edge(18, 19)
+        G.add_edge(17, 19)
+        G.add_edge(4, 20)
+        G.add_edge(20, 21)
+        G.add_edge(21, 22)
+        G.add_edge(22, 23)
+        G.add_edge(23, 24)
+        G.add_edge(24, 25)
+        G.add_edge(25, 26)
+        G.add_edge(26, 27)
+        G.add_edge(27, 28)
+        G.add_edge(28, 13)
+        assert not nx.is_eulerian(G)
+        G = nx.eulerize(G)
+        assert nx.is_eulerian(G)
+        assert nx.number_of_edges(G) == 39
diff --git a/networkx/algorithms/tests/test_graphical.py b/networkx/algorithms/tests/test_graphical.py
index d55ac8c..99f766f 100644
--- a/networkx/algorithms/tests/test_graphical.py
+++ b/networkx/algorithms/tests/test_graphical.py
@@ -42,7 +42,7 @@ class TestAtlas:
     @classmethod
     def setup_class(cls):
         global atlas
-        import networkx.generators.atlas as atlas
+        from networkx.generators import atlas
 
         cls.GAG = atlas.graph_atlas_g()
 
diff --git a/networkx/algorithms/tests/test_lowest_common_ancestors.py b/networkx/algorithms/tests/test_lowest_common_ancestors.py
index 512a1fe..66d7522 100644
--- a/networkx/algorithms/tests/test_lowest_common_ancestors.py
+++ b/networkx/algorithms/tests/test_lowest_common_ancestors.py
@@ -48,7 +48,7 @@ class TestTreeLCA:
 
     @staticmethod
     def assert_has_same_pairs(d1, d2):
-        for (a, b) in ((min(pair), max(pair)) for pair in chain(d1, d2)):
+        for a, b in ((min(pair), max(pair)) for pair in chain(d1, d2)):
             assert get_pair(d1, a, b) == get_pair(d2, a, b)
 
     def test_tree_all_pairs_lca_default_root(self):
@@ -136,12 +136,6 @@ class TestTreeLCA:
         with pytest.raises(NNI):
             next(all_pairs_lca(G))
         pytest.raises(NNI, nx.lowest_common_ancestor, G, 0, 1)
-        G = nx.MultiDiGraph([(0, 1)])
-        with pytest.raises(NNI):
-            next(tree_all_pairs_lca(G))
-        with pytest.raises(NNI):
-            next(all_pairs_lca(G))
-        pytest.raises(NNI, nx.lowest_common_ancestor, G, 0, 1)
 
     def test_tree_all_pairs_lca_trees_without_LCAs(self):
         G = nx.DiGraph()
@@ -150,6 +144,41 @@ class TestTreeLCA:
         assert ans == [((3, 3), 3)]
 
 
+class TestMultiTreeLCA(TestTreeLCA):
+    @classmethod
+    def setup_class(cls):
+        cls.DG = nx.MultiDiGraph()
+        edges = [(0, 1), (0, 2), (1, 3), (1, 4), (2, 5), (2, 6)]
+        cls.DG.add_edges_from(edges)
+        cls.ans = dict(tree_all_pairs_lca(cls.DG, 0))
+        # add multiedges
+        cls.DG.add_edges_from(edges)
+
+        gold = {(n, n): n for n in cls.DG}
+        gold.update({(0, i): 0 for i in range(1, 7)})
+        gold.update(
+            {
+                (1, 2): 0,
+                (1, 3): 1,
+                (1, 4): 1,
+                (1, 5): 0,
+                (1, 6): 0,
+                (2, 3): 0,
+                (2, 4): 0,
+                (2, 5): 2,
+                (2, 6): 2,
+                (3, 4): 1,
+                (3, 5): 0,
+                (3, 6): 0,
+                (4, 5): 0,
+                (4, 6): 0,
+                (5, 6): 2,
+            }
+        )
+
+        cls.gold = gold
+
+
 class TestDAGLCA:
     @classmethod
     def setup_class(cls):
@@ -316,6 +345,7 @@ class TestDAGLCA:
 
     def test_all_pairs_lca_one_pair_gh4942(self):
         G = nx.DiGraph()
+        # Note: order edge addition is critical to the test
         G.add_edge(0, 1)
         G.add_edge(2, 0)
         G.add_edge(2, 3)
@@ -323,3 +353,75 @@ class TestDAGLCA:
         G.add_edge(5, 2)
 
         assert nx.lowest_common_ancestor(G, 1, 3) == 2
+
+
+class TestMultiDiGraph_DAGLCA(TestDAGLCA):
+    @classmethod
+    def setup_class(cls):
+        cls.DG = nx.MultiDiGraph()
+        nx.add_path(cls.DG, (0, 1, 2, 3))
+        # add multiedges
+        nx.add_path(cls.DG, (0, 1, 2, 3))
+        nx.add_path(cls.DG, (0, 4, 3))
+        nx.add_path(cls.DG, (0, 5, 6, 8, 3))
+        nx.add_path(cls.DG, (5, 7, 8))
+        cls.DG.add_edge(6, 2)
+        cls.DG.add_edge(7, 2)
+
+        cls.root_distance = nx.shortest_path_length(cls.DG, source=0)
+
+        cls.gold = {
+            (1, 1): 1,
+            (1, 2): 1,
+            (1, 3): 1,
+            (1, 4): 0,
+            (1, 5): 0,
+            (1, 6): 0,
+            (1, 7): 0,
+            (1, 8): 0,
+            (2, 2): 2,
+            (2, 3): 2,
+            (2, 4): 0,
+            (2, 5): 5,
+            (2, 6): 6,
+            (2, 7): 7,
+            (2, 8): 7,
+            (3, 3): 3,
+            (3, 4): 4,
+            (3, 5): 5,
+            (3, 6): 6,
+            (3, 7): 7,
+            (3, 8): 8,
+            (4, 4): 4,
+            (4, 5): 0,
+            (4, 6): 0,
+            (4, 7): 0,
+            (4, 8): 0,
+            (5, 5): 5,
+            (5, 6): 5,
+            (5, 7): 5,
+            (5, 8): 5,
+            (6, 6): 6,
+            (6, 7): 5,
+            (6, 8): 6,
+            (7, 7): 7,
+            (7, 8): 7,
+            (8, 8): 8,
+        }
+        cls.gold.update(((0, n), 0) for n in cls.DG)
+
+
+def test_all_pairs_lca_self_ancestors():
+    """Self-ancestors should always be the node itself, i.e. lca of (0, 0) is 0.
+    See gh-4458."""
+    # DAG for test - note order of node/edge addition is relevant
+    G = nx.DiGraph()
+    G.add_nodes_from(range(5))
+    G.add_edges_from([(1, 0), (2, 0), (3, 2), (4, 1), (4, 3)])
+
+    ap_lca = nx.all_pairs_lowest_common_ancestor
+    assert all(u == v == a for (u, v), a in ap_lca(G) if u == v)
+    MG = nx.MultiDiGraph(G)
+    assert all(u == v == a for (u, v), a in ap_lca(MG) if u == v)
+    MG.add_edges_from([(1, 0), (2, 0)])
+    assert all(u == v == a for (u, v), a in ap_lca(MG) if u == v)
diff --git a/networkx/algorithms/tests/test_matching.py b/networkx/algorithms/tests/test_matching.py
index 57603bc..37853e3 100644
--- a/networkx/algorithms/tests/test_matching.py
+++ b/networkx/algorithms/tests/test_matching.py
@@ -117,14 +117,12 @@ class TestMaxWeightMatching:
             nx.max_weight_matching(G), matching_dict_to_set({1: 2, 2: 1})
         )
         assert edges_equal(
-            nx.max_weight_matching(G, 1), matching_dict_to_set({1: 3, 2: 4, 3: 1, 4: 2})
+            nx.max_weight_matching(G, maxcardinality=True),
+            matching_dict_to_set({1: 3, 2: 4, 3: 1, 4: 2}),
         )
         assert edges_equal(
             nx.min_weight_matching(G), matching_dict_to_set({1: 2, 3: 4})
         )
-        assert edges_equal(
-            nx.min_weight_matching(G, 1), matching_dict_to_set({1: 2, 3: 4})
-        )
 
     def test_s_blossom(self):
         """Create S-blossom and use it for augmentation:"""
diff --git a/networkx/algorithms/tests/test_node_classification.py b/networkx/algorithms/tests/test_node_classification.py
index ff99841..2e1fc79 100644
--- a/networkx/algorithms/tests/test_node_classification.py
+++ b/networkx/algorithms/tests/test_node_classification.py
@@ -71,7 +71,7 @@ class TestHarmonicFunction:
         for i in label_removed:
             del G.nodes[i][label_name]
         predicted = node_classification.harmonic_function(G, label_name=label_name)
-        label_not_removed = set(list(range(len(G)))) - label_removed
+        label_not_removed = set(range(len(G))) - label_removed
         for i in label_not_removed:
             assert predicted[i] == G.nodes[i][label_name]
 
diff --git a/networkx/algorithms/tests/test_node_classification_deprecations.py b/networkx/algorithms/tests/test_node_classification_deprecations.py
deleted file mode 100644
index 2d12561..0000000
--- a/networkx/algorithms/tests/test_node_classification_deprecations.py
+++ /dev/null
@@ -1,41 +0,0 @@
-"""TODO: Remove this test module for version 3.0."""
-
-
-import sys
-
-import pytest
-
-# NOTE: It is necessary to prevent previous imports in the test suite from
-# "contaminating" the tests for the deprecation warnings by removing
-# node_classification from sys.modules.
-
-
-def test_hmn_deprecation_warning():
-    sys.modules.pop("networkx.algorithms.node_classification", None)
-    with pytest.warns(DeprecationWarning):
-        from networkx.algorithms.node_classification import hmn
-
-
-def test_lgc_deprecation_warning():
-    sys.modules.pop("networkx.algorithms.node_classification", None)
-    with pytest.warns(DeprecationWarning):
-        from networkx.algorithms.node_classification import lgc
-
-
-def test_no_warn_on_function_import(recwarn):
-    # Accessing the functions shouldn't raise any warning
-    sys.modules.pop("networkx.algorithms.node_classification", None)
-    from networkx.algorithms.node_classification import (
-        harmonic_function,
-        local_and_global_consistency,
-    )
-
-    assert len(recwarn) == 0
-
-
-def test_no_warn_on_package_import(recwarn):
-    # Accessing the package shouldn't raise any warning
-    sys.modules.pop("networkx.algorithms.node_classification", None)
-    from networkx.algorithms import node_classification
-
-    assert len(recwarn) == 0
diff --git a/networkx/algorithms/tests/test_planar_drawing.py b/networkx/algorithms/tests/test_planar_drawing.py
index 2a12c06..c1c45ec 100644
--- a/networkx/algorithms/tests/test_planar_drawing.py
+++ b/networkx/algorithms/tests/test_planar_drawing.py
@@ -231,7 +231,7 @@ class Vector:
             return self.x * other.y < self.y * other.x
 
     def __ne__(self, other):
-        return not self == other
+        return self != other
 
     def __le__(self, other):
         return not other < self
diff --git a/networkx/algorithms/tests/test_planarity.py b/networkx/algorithms/tests/test_planarity.py
index 675a5d9..470b1d2 100644
--- a/networkx/algorithms/tests/test_planarity.py
+++ b/networkx/algorithms/tests/test_planarity.py
@@ -202,7 +202,7 @@ class TestLRPlanarity:
         self.check_graph(G, is_planar=True)
 
     def test_graph1(self):
-        G = nx.OrderedGraph(
+        G = nx.Graph(
             [
                 (3, 10),
                 (2, 13),
@@ -219,7 +219,7 @@ class TestLRPlanarity:
         self.check_graph(G, is_planar=True)
 
     def test_graph2(self):
-        G = nx.OrderedGraph(
+        G = nx.Graph(
             [
                 (1, 2),
                 (4, 13),
@@ -243,7 +243,7 @@ class TestLRPlanarity:
         self.check_graph(G, is_planar=False)
 
     def test_graph3(self):
-        G = nx.OrderedGraph(
+        G = nx.Graph(
             [
                 (0, 7),
                 (3, 11),
diff --git a/networkx/algorithms/tests/test_reciprocity.py b/networkx/algorithms/tests/test_reciprocity.py
index 2c5fc04..eee6f2e 100644
--- a/networkx/algorithms/tests/test_reciprocity.py
+++ b/networkx/algorithms/tests/test_reciprocity.py
@@ -4,7 +4,6 @@ import networkx as nx
 
 
 class TestReciprocity:
-
     # test overall reicprocity by passing whole graph
     def test_reciprocity_digraph(self):
         DG = nx.DiGraph([(1, 2), (2, 1)])
diff --git a/networkx/algorithms/tests/test_similarity.py b/networkx/algorithms/tests/test_similarity.py
index 9b620de..a7ca1d8 100644
--- a/networkx/algorithms/tests/test_similarity.py
+++ b/networkx/algorithms/tests/test_similarity.py
@@ -523,7 +523,10 @@ class TestSimilarity:
         assert graph_edit_distance(G1, G2, node_match=nmatch, edge_match=ematch) == 1
 
     # note: nx.simrank_similarity_numpy not included because returns np.array
-    simrank_algs = [nx.simrank_similarity, nx.similarity._simrank_similarity_python]
+    simrank_algs = [
+        nx.simrank_similarity,
+        nx.algorithms.similarity._simrank_similarity_python,
+    ]
 
     @pytest.mark.parametrize("simrank_similarity", simrank_algs)
     def test_simrank_no_source_no_target(self, simrank_similarity):
diff --git a/networkx/algorithms/tests/test_simple_paths.py b/networkx/algorithms/tests/test_simple_paths.py
index 08348b9..af15861 100644
--- a/networkx/algorithms/tests/test_simple_paths.py
+++ b/networkx/algorithms/tests/test_simple_paths.py
@@ -58,6 +58,10 @@ class TestIsSimplePath:
         G = nx.path_graph(2)
         assert not nx.is_simple_path(G, [0, 2])
 
+    def test_missing_starting_node(self):
+        G = nx.path_graph(2)
+        assert not nx.is_simple_path(G, [2, 0])
+
     def test_directed_path(self):
         G = nx.DiGraph([(0, 1), (1, 2)])
         assert nx.is_simple_path(G, [0, 1, 2])
@@ -234,9 +238,9 @@ def test_hamiltonian_path():
 def test_cutoff_zero():
     G = nx.complete_graph(4)
     paths = nx.all_simple_paths(G, 0, 3, cutoff=0)
-    assert list(list(p) for p in paths) == []
+    assert [list(p) for p in paths] == []
     paths = nx.all_simple_paths(nx.MultiGraph(G), 0, 3, cutoff=0)
-    assert list(list(p) for p in paths) == []
+    assert [list(p) for p in paths] == []
 
 
 def test_source_missing():
@@ -418,15 +422,15 @@ def test_hamiltonian__edge_path():
     G = nx.complete_graph(4)
     paths = hamiltonian_edge_path(G, 0)
     exact = [list(pairwise([0] + list(p))) for p in permutations([1, 2, 3], 3)]
-    assert sorted(exact) == [p for p in sorted(paths)]
+    assert sorted(exact) == sorted(paths)
 
 
 def test_edge_cutoff_zero():
     G = nx.complete_graph(4)
     paths = nx.all_simple_edge_paths(G, 0, 3, cutoff=0)
-    assert list(list(p) for p in paths) == []
+    assert [list(p) for p in paths] == []
     paths = nx.all_simple_edge_paths(nx.MultiGraph(G), 0, 3, cutoff=0)
-    assert list(list(p) for p in paths) == []
+    assert [list(p) for p in paths] == []
 
 
 def test_edge_source_missing():
@@ -457,10 +461,10 @@ def test_shortest_simple_paths():
 def test_shortest_simple_paths_directed():
     G = nx.cycle_graph(7, create_using=nx.DiGraph())
     paths = nx.shortest_simple_paths(G, 0, 3)
-    assert [path for path in paths] == [[0, 1, 2, 3]]
+    assert list(paths) == [[0, 1, 2, 3]]
 
 
-def test_shortest_simple_paths_directed_with_weight_fucntion():
+def test_shortest_simple_paths_directed_with_weight_function():
     def cost(u, v, x):
         return 1
 
@@ -473,13 +477,13 @@ def test_shortest_simple_paths_directed_with_weight_fucntion():
     ] == sorted(len(path) for path in nx.all_simple_paths(G, 1, 12))
 
 
-def test_shortest_simple_paths_with_weight_fucntion():
+def test_shortest_simple_paths_with_weight_function():
     def cost(u, v, x):
         return 1
 
     G = nx.cycle_graph(7, create_using=nx.DiGraph())
     paths = nx.shortest_simple_paths(G, 0, 3, weight=cost)
-    assert [path for path in paths] == [[0, 1, 2, 3]]
+    assert list(paths) == [[0, 1, 2, 3]]
 
 
 def test_Greg_Bernstein():
@@ -717,7 +721,7 @@ def test_bidirectional_dijksta_restricted():
         "s",
         "v",
         11,
-        *_bidirectional_dijkstra(XG, "s", "v", ignore_edges=[("s", "x")])
+        *_bidirectional_dijkstra(XG, "s", "v", ignore_edges=[("s", "x")]),
     )
     pytest.raises(
         nx.NetworkXNoPath,
diff --git a/networkx/algorithms/tests/test_smallworld.py b/networkx/algorithms/tests/test_smallworld.py
index 42ede0e..d115dd9 100644
--- a/networkx/algorithms/tests/test_smallworld.py
+++ b/networkx/algorithms/tests/test_smallworld.py
@@ -68,3 +68,11 @@ def test_omega():
 
     for o in omegas:
         assert -1 <= o <= 1
+
+
+@pytest.mark.parametrize("f", (nx.random_reference, nx.lattice_reference))
+def test_graph_no_edges(f):
+    G = nx.Graph()
+    G.add_nodes_from([0, 1, 2, 3])
+    with pytest.raises(nx.NetworkXError, match="Graph has fewer that 2 edges"):
+        f(G)
diff --git a/networkx/algorithms/tests/test_structuralholes.py b/networkx/algorithms/tests/test_structuralholes.py
index 51447fe..6f92baa 100644
--- a/networkx/algorithms/tests/test_structuralholes.py
+++ b/networkx/algorithms/tests/test_structuralholes.py
@@ -4,6 +4,7 @@ import math
 import pytest
 
 import networkx as nx
+from networkx.classes.tests import dispatch_interface
 
 
 class TestStructuralHoles:
@@ -51,8 +52,11 @@ class TestStructuralHoles:
             ("G", "C"): 10,
         }
 
-    def test_constraint_directed(self):
-        constraint = nx.constraint(self.D)
+    # This additionally tests the @nx._dispatch mechanism, treating
+    # nx.mutual_weight as if it were a re-implementation from another package
+    @pytest.mark.parametrize("wrapper", [lambda x: x, dispatch_interface.convert])
+    def test_constraint_directed(self, wrapper):
+        constraint = nx.constraint(wrapper(self.D))
         assert constraint[0] == pytest.approx(1.003, abs=1e-3)
         assert constraint[1] == pytest.approx(1.003, abs=1e-3)
         assert constraint[2] == pytest.approx(1.389, abs=1e-3)
diff --git a/networkx/algorithms/tests/test_summarization.py b/networkx/algorithms/tests/test_summarization.py
index c951b86..859e0cf 100644
--- a/networkx/algorithms/tests/test_summarization.py
+++ b/networkx/algorithms/tests/test_summarization.py
@@ -253,7 +253,7 @@ class AbstractSNAP:
         node_labels = sorted(node_labels, key=lambda n: sorted(G.nodes[n]["group"])[0])
         node_labels.sort()
 
-        label_mapping = dict()
+        label_mapping = {}
         for index, node in enumerate(node_labels):
             label = "Supernode-%s" % index
             label_mapping[node] = label
@@ -277,18 +277,18 @@ class TestSNAPNoEdgeTypes(AbstractSNAP):
 
     def build_original_graph(self):
         nodes = {
-            "A": dict(color="Red"),
-            "B": dict(color="Red"),
-            "C": dict(color="Red"),
-            "D": dict(color="Red"),
-            "E": dict(color="Blue"),
-            "F": dict(color="Blue"),
-            "G": dict(color="Blue"),
-            "H": dict(color="Blue"),
-            "I": dict(color="Yellow"),
-            "J": dict(color="Yellow"),
-            "K": dict(color="Yellow"),
-            "L": dict(color="Yellow"),
+            "A": {"color": "Red"},
+            "B": {"color": "Red"},
+            "C": {"color": "Red"},
+            "D": {"color": "Red"},
+            "E": {"color": "Blue"},
+            "F": {"color": "Blue"},
+            "G": {"color": "Blue"},
+            "H": {"color": "Blue"},
+            "I": {"color": "Yellow"},
+            "J": {"color": "Yellow"},
+            "K": {"color": "Yellow"},
+            "L": {"color": "Yellow"},
         }
         edges = [
             ("A", "B"),
@@ -316,12 +316,12 @@ class TestSNAPNoEdgeTypes(AbstractSNAP):
 
     def build_summary_graph(self):
         nodes = {
-            "Supernode-0": dict(color="Red"),
-            "Supernode-1": dict(color="Red"),
-            "Supernode-2": dict(color="Blue"),
-            "Supernode-3": dict(color="Blue"),
-            "Supernode-4": dict(color="Yellow"),
-            "Supernode-5": dict(color="Yellow"),
+            "Supernode-0": {"color": "Red"},
+            "Supernode-1": {"color": "Red"},
+            "Supernode-2": {"color": "Blue"},
+            "Supernode-3": {"color": "Blue"},
+            "Supernode-4": {"color": "Yellow"},
+            "Supernode-5": {"color": "Yellow"},
         }
         edges = [
             ("Supernode-0", "Supernode-0"),
@@ -355,18 +355,18 @@ class TestSNAPNoEdgeTypes(AbstractSNAP):
 class TestSNAPUndirected(AbstractSNAP):
     def build_original_graph(self):
         nodes = {
-            "A": dict(color="Red"),
-            "B": dict(color="Red"),
-            "C": dict(color="Red"),
-            "D": dict(color="Red"),
-            "E": dict(color="Blue"),
-            "F": dict(color="Blue"),
-            "G": dict(color="Blue"),
-            "H": dict(color="Blue"),
-            "I": dict(color="Yellow"),
-            "J": dict(color="Yellow"),
-            "K": dict(color="Yellow"),
-            "L": dict(color="Yellow"),
+            "A": {"color": "Red"},
+            "B": {"color": "Red"},
+            "C": {"color": "Red"},
+            "D": {"color": "Red"},
+            "E": {"color": "Blue"},
+            "F": {"color": "Blue"},
+            "G": {"color": "Blue"},
+            "H": {"color": "Blue"},
+            "I": {"color": "Yellow"},
+            "J": {"color": "Yellow"},
+            "K": {"color": "Yellow"},
+            "L": {"color": "Yellow"},
         }
         edges = [
             ("A", "B", "Strong"),
@@ -394,12 +394,12 @@ class TestSNAPUndirected(AbstractSNAP):
 
     def build_summary_graph(self):
         nodes = {
-            "Supernode-0": dict(color="Red"),
-            "Supernode-1": dict(color="Red"),
-            "Supernode-2": dict(color="Blue"),
-            "Supernode-3": dict(color="Blue"),
-            "Supernode-4": dict(color="Yellow"),
-            "Supernode-5": dict(color="Yellow"),
+            "Supernode-0": {"color": "Red"},
+            "Supernode-1": {"color": "Red"},
+            "Supernode-2": {"color": "Blue"},
+            "Supernode-3": {"color": "Blue"},
+            "Supernode-4": {"color": "Yellow"},
+            "Supernode-5": {"color": "Yellow"},
         }
         edges = [
             ("Supernode-0", "Supernode-0", "Strong"),
@@ -416,7 +416,7 @@ class TestSNAPUndirected(AbstractSNAP):
             G.add_node(node, **attributes)
 
         for source, target, type in edges:
-            G.add_edge(source, target, types=[dict(type=type)])
+            G.add_edge(source, target, types=[{"type": type}])
 
         supernodes = {
             "Supernode-0": {"A", "B"},
@@ -433,14 +433,14 @@ class TestSNAPUndirected(AbstractSNAP):
 class TestSNAPDirected(AbstractSNAP):
     def build_original_graph(self):
         nodes = {
-            "A": dict(color="Red"),
-            "B": dict(color="Red"),
-            "C": dict(color="Green"),
-            "D": dict(color="Green"),
-            "E": dict(color="Blue"),
-            "F": dict(color="Blue"),
-            "G": dict(color="Yellow"),
-            "H": dict(color="Yellow"),
+            "A": {"color": "Red"},
+            "B": {"color": "Red"},
+            "C": {"color": "Green"},
+            "D": {"color": "Green"},
+            "E": {"color": "Blue"},
+            "F": {"color": "Blue"},
+            "G": {"color": "Yellow"},
+            "H": {"color": "Yellow"},
         }
         edges = [
             ("A", "C", "Strong"),
@@ -468,10 +468,10 @@ class TestSNAPDirected(AbstractSNAP):
 
     def build_summary_graph(self):
         nodes = {
-            "Supernode-0": dict(color="Red"),
-            "Supernode-1": dict(color="Green"),
-            "Supernode-2": dict(color="Blue"),
-            "Supernode-3": dict(color="Yellow"),
+            "Supernode-0": {"color": "Red"},
+            "Supernode-1": {"color": "Green"},
+            "Supernode-2": {"color": "Blue"},
+            "Supernode-3": {"color": "Yellow"},
         }
         edges = [
             ("Supernode-0", "Supernode-1", [{"type": "Strong"}]),
@@ -503,15 +503,15 @@ class TestSNAPDirected(AbstractSNAP):
 class TestSNAPUndirectedMulti(AbstractSNAP):
     def build_original_graph(self):
         nodes = {
-            "A": dict(color="Red"),
-            "B": dict(color="Red"),
-            "C": dict(color="Red"),
-            "D": dict(color="Blue"),
-            "E": dict(color="Blue"),
-            "F": dict(color="Blue"),
-            "G": dict(color="Yellow"),
-            "H": dict(color="Yellow"),
-            "I": dict(color="Yellow"),
+            "A": {"color": "Red"},
+            "B": {"color": "Red"},
+            "C": {"color": "Red"},
+            "D": {"color": "Blue"},
+            "E": {"color": "Blue"},
+            "F": {"color": "Blue"},
+            "G": {"color": "Yellow"},
+            "H": {"color": "Yellow"},
+            "I": {"color": "Yellow"},
         }
         edges = [
             ("A", "D", ["Weak", "Strong"]),
@@ -536,12 +536,12 @@ class TestSNAPUndirectedMulti(AbstractSNAP):
 
     def build_summary_graph(self):
         nodes = {
-            "Supernode-0": dict(color="Red"),
-            "Supernode-1": dict(color="Blue"),
-            "Supernode-2": dict(color="Yellow"),
-            "Supernode-3": dict(color="Blue"),
-            "Supernode-4": dict(color="Yellow"),
-            "Supernode-5": dict(color="Red"),
+            "Supernode-0": {"color": "Red"},
+            "Supernode-1": {"color": "Blue"},
+            "Supernode-2": {"color": "Yellow"},
+            "Supernode-3": {"color": "Blue"},
+            "Supernode-4": {"color": "Yellow"},
+            "Supernode-5": {"color": "Red"},
         }
         edges = [
             ("Supernode-1", "Supernode-2", [{"type": "Weak"}]),
@@ -574,14 +574,14 @@ class TestSNAPUndirectedMulti(AbstractSNAP):
 class TestSNAPDirectedMulti(AbstractSNAP):
     def build_original_graph(self):
         nodes = {
-            "A": dict(color="Red"),
-            "B": dict(color="Red"),
-            "C": dict(color="Green"),
-            "D": dict(color="Green"),
-            "E": dict(color="Blue"),
-            "F": dict(color="Blue"),
-            "G": dict(color="Yellow"),
-            "H": dict(color="Yellow"),
+            "A": {"color": "Red"},
+            "B": {"color": "Red"},
+            "C": {"color": "Green"},
+            "D": {"color": "Green"},
+            "E": {"color": "Blue"},
+            "F": {"color": "Blue"},
+            "G": {"color": "Yellow"},
+            "H": {"color": "Yellow"},
         }
         edges = [
             ("A", "C", ["Weak", "Strong"]),
@@ -610,10 +610,10 @@ class TestSNAPDirectedMulti(AbstractSNAP):
 
     def build_summary_graph(self):
         nodes = {
-            "Supernode-0": dict(color="Red"),
-            "Supernode-1": dict(color="Blue"),
-            "Supernode-2": dict(color="Yellow"),
-            "Supernode-3": dict(color="Blue"),
+            "Supernode-0": {"color": "Red"},
+            "Supernode-1": {"color": "Blue"},
+            "Supernode-2": {"color": "Yellow"},
+            "Supernode-3": {"color": "Blue"},
         }
         edges = [
             ("Supernode-0", "Supernode-1", ["Weak", "Strong"]),
diff --git a/networkx/algorithms/tests/test_swap.py b/networkx/algorithms/tests/test_swap.py
index 9982b95..49dd5f8 100644
--- a/networkx/algorithms/tests/test_swap.py
+++ b/networkx/algorithms/tests/test_swap.py
@@ -2,8 +2,26 @@ import pytest
 
 import networkx as nx
 
-# import random
-# random.seed(0)
+
+def test_directed_edge_swap():
+    graph = nx.path_graph(200, create_using=nx.DiGraph)
+    in_degrees = sorted((n, d) for n, d in graph.in_degree())
+    out_degrees = sorted((n, d) for n, d in graph.out_degree())
+    G = nx.directed_edge_swap(graph, nswap=40, max_tries=500, seed=1)
+    assert in_degrees == sorted((n, d) for n, d in G.in_degree())
+    assert out_degrees == sorted((n, d) for n, d in G.out_degree())
+
+
+def test_edge_cases_directed_edge_swap():
+    # Tests cases when swaps are impossible, either too few edges exist, or self loops/cycles are unavoidable
+    # TODO: Rewrite function to explicitly check for impossible swaps and raise error
+    e = (
+        "Maximum number of swap attempts \\(11\\) exceeded "
+        "before desired swaps achieved \\(\\d\\)."
+    )
+    graph = nx.DiGraph([(0, 0), (0, 1), (1, 0), (2, 3), (3, 2)])
+    with pytest.raises(nx.NetworkXAlgorithmError, match=e):
+        nx.directed_edge_swap(graph, nswap=1, max_tries=10, seed=1)
 
 
 def test_double_edge_swap():
@@ -54,6 +72,31 @@ def test_connected_double_edge_swap_star_low_window_threshold():
     assert degrees == sorted(d for n, d in graph.degree())
 
 
+def test_directed_edge_swap_small():
+    with pytest.raises(nx.NetworkXError):
+        G = nx.directed_edge_swap(nx.path_graph(3, create_using=nx.DiGraph))
+
+
+def test_directed_edge_swap_tries():
+    with pytest.raises(nx.NetworkXError):
+        G = nx.directed_edge_swap(
+            nx.path_graph(3, create_using=nx.DiGraph), nswap=1, max_tries=0
+        )
+
+
+def test_directed_exception_undirected():
+    graph = nx.Graph([(0, 1), (2, 3)])
+    with pytest.raises(nx.NetworkXNotImplemented):
+        G = nx.directed_edge_swap(graph)
+
+
+def test_directed_edge_max_tries():
+    with pytest.raises(nx.NetworkXAlgorithmError):
+        G = nx.directed_edge_swap(
+            nx.complete_graph(4, nx.DiGraph()), nswap=1, max_tries=5
+        )
+
+
 def test_double_edge_swap_small():
     with pytest.raises(nx.NetworkXError):
         G = nx.double_edge_swap(nx.path_graph(3))
@@ -92,3 +135,22 @@ def test_degree_seq_c4():
     degrees = sorted(d for n, d in G.degree())
     G = nx.double_edge_swap(G, 1, 100)
     assert degrees == sorted(d for n, d in G.degree())
+
+
+def test_fewer_than_4_nodes():
+    G = nx.DiGraph()
+    G.add_nodes_from([0, 1, 2])
+    with pytest.raises(nx.NetworkXError, match=".*fewer than four nodes."):
+        nx.directed_edge_swap(G)
+
+
+def test_less_than_3_edges():
+    G = nx.DiGraph([(0, 1), (1, 2)])
+    G.add_nodes_from([3, 4])
+    with pytest.raises(nx.NetworkXError, match=".*fewer than 3 edges"):
+        nx.directed_edge_swap(G)
+
+    G = nx.Graph()
+    G.add_nodes_from([0, 1, 2, 3])
+    with pytest.raises(nx.NetworkXError, match=".*fewer than 2 edges"):
+        nx.double_edge_swap(G)
diff --git a/networkx/algorithms/tests/test_threshold.py b/networkx/algorithms/tests/test_threshold.py
index c06784a..6bb2123 100644
--- a/networkx/algorithms/tests/test_threshold.py
+++ b/networkx/algorithms/tests/test_threshold.py
@@ -16,11 +16,11 @@ class TestGeneratorThreshold:
     def test_threshold_sequence_graph_test(self):
         G = nx.star_graph(10)
         assert nxt.is_threshold_graph(G)
-        assert nxt.is_threshold_sequence(list(d for n, d in G.degree()))
+        assert nxt.is_threshold_sequence([d for n, d in G.degree()])
 
         G = nx.complete_graph(10)
         assert nxt.is_threshold_graph(G)
-        assert nxt.is_threshold_sequence(list(d for n, d in G.degree()))
+        assert nxt.is_threshold_sequence([d for n, d in G.degree()])
 
         deg = [3, 2, 2, 1, 1, 1]
         assert not nxt.is_threshold_sequence(deg)
diff --git a/networkx/algorithms/tests/test_triads.py b/networkx/algorithms/tests/test_triads.py
index 446c2db..16a1fa8 100644
--- a/networkx/algorithms/tests/test_triads.py
+++ b/networkx/algorithms/tests/test_triads.py
@@ -56,8 +56,8 @@ def test_all_triplets():
         for k in range(j + 1, 7)
     ]
     expected = [set(x.split(",")) for x in expected]
-    actual = list(set(x) for x in nx.all_triplets(G))
-    assert all([any([s1 == s2 for s1 in expected]) for s2 in actual])
+    actual = [set(x) for x in nx.all_triplets(G)]
+    assert all(any(s1 == s2 for s1 in expected) for s2 in actual)
 
 
 def test_all_triads():
@@ -72,7 +72,7 @@ def test_all_triads():
     ]
     expected = [G.subgraph(x.split(",")) for x in expected]
     actual = list(nx.all_triads(G))
-    assert all(any([nx.is_isomorphic(G1, G2) for G1 in expected]) for G2 in actual)
+    assert all(any(nx.is_isomorphic(G1, G2) for G1 in expected) for G2 in actual)
 
 
 def test_triad_type():
diff --git a/networkx/algorithms/threshold.py b/networkx/algorithms/threshold.py
index 5c50394..0d37c90 100644
--- a/networkx/algorithms/threshold.py
+++ b/networkx/algorithms/threshold.py
@@ -37,18 +37,18 @@ def is_threshold_graph(G):
     ----------
     .. [1] Threshold graphs: https://en.wikipedia.org/wiki/Threshold_graph
     """
-    return is_threshold_sequence(list(d for n, d in G.degree()))
+    return is_threshold_sequence([d for n, d in G.degree()])
 
 
 def is_threshold_sequence(degree_sequence):
     """
-    Returns True if the sequence is a threshold degree seqeunce.
+    Returns True if the sequence is a threshold degree sequence.
 
     Uses the property that a threshold graph must be constructed by
     adding either dominating or isolated nodes. Thus, it can be
     deconstructed iteratively by removing a node of degree zero or a
     node that connects to the remaining nodes.  If this deconstruction
-    failes then the sequence is not a threshold sequence.
+    fails then the sequence is not a threshold sequence.
     """
     ds = degree_sequence[:]  # get a copy so we don't destroy original
     ds.sort()
@@ -95,7 +95,7 @@ def creation_sequence(degree_sequence, with_labels=False, compact=False):
         raise ValueError("compact sequences cannot be labeled")
 
     # make an indexed copy
-    if isinstance(degree_sequence, dict):  # labeled degree seqeunce
+    if isinstance(degree_sequence, dict):  # labeled degree sequence
         ds = [[degree, label] for (label, degree) in degree_sequence.items()]
     else:
         ds = [[d, i] for i, d in enumerate(degree_sequence)]
@@ -357,7 +357,7 @@ def find_alternating_4_cycle(G):
     Otherwise returns the cycle as [a,b,c,d] where (a,b)
     and (c,d) are edges and (a,c) and (b,d) are not.
     """
-    for (u, v) in G.edges():
+    for u, v in G.edges():
         for w in G.nodes():
             if not G.has_edge(u, w) and u != w:
                 for x in G.neighbors(w):
@@ -668,13 +668,13 @@ def betweenness_sequence(creation_sequence, normalized=True):
     cs = creation_sequence
     seq = []  # betweenness
     lastchar = "d"  # first node is always a 'd'
-    dr = float(cs.count("d"))  # number of d's to the right of curren pos
+    dr = float(cs.count("d"))  # number of d's to the right of current pos
     irun = 0  # number of i's in the last run
     drun = 0  # number of d's in the last run
     dlast = 0.0  # betweenness of last d
     for i, c in enumerate(cs):
         if c == "d":  # cs[i]=="d":
-            # betweennees = amt shared with eariler d's and i's
+            # betweennees = amt shared with earlier d's and i's
             #             + new isolated nodes covered
             #             + new paths to all previous nodes
             b = dlast + (irun - 1) * irun / dr + 2 * irun * (i - drun - irun) / dr
@@ -825,7 +825,7 @@ def random_threshold_sequence(n, p, seed=None):
     """
     Create a random threshold sequence of size n.
     A creation sequence is built by randomly choosing d's with
-    probabiliy p and i's with probability 1-p.
+    probability p and i's with probability 1-p.
 
     s=nx.random_threshold_sequence(10,0.5)
 
diff --git a/networkx/algorithms/tournament.py b/networkx/algorithms/tournament.py
index 278a1c4..ef1d8a0 100644
--- a/networkx/algorithms/tournament.py
+++ b/networkx/algorithms/tournament.py
@@ -61,6 +61,7 @@ def index_satisfying(iterable, condition):
         raise ValueError("iterable must be non-empty") from err
 
 
+@nx._dispatch
 @not_implemented_for("undirected")
 @not_implemented_for("multigraph")
 def is_tournament(G):
@@ -179,6 +180,7 @@ def random_tournament(n, seed=None):
     return nx.DiGraph(edges)
 
 
+@nx._dispatch
 @not_implemented_for("undirected")
 @not_implemented_for("multigraph")
 def score_sequence(G):
@@ -208,6 +210,7 @@ def score_sequence(G):
     return sorted(d for v, d in G.out_degree())
 
 
+@nx._dispatch
 @not_implemented_for("undirected")
 @not_implemented_for("multigraph")
 def tournament_matrix(G):
@@ -237,7 +240,7 @@ def tournament_matrix(G):
 
     Returns
     -------
-    SciPy sparse matrix
+    SciPy sparse array
         The tournament matrix of the tournament graph `G`.
 
     Raises
diff --git a/networkx/algorithms/traversal/breadth_first_search.py b/networkx/algorithms/traversal/breadth_first_search.py
index 55ba1f0..951a9e9 100644
--- a/networkx/algorithms/traversal/breadth_first_search.py
+++ b/networkx/algorithms/traversal/breadth_first_search.py
@@ -88,6 +88,7 @@ def generic_bfs_edges(G, source, neighbors=None, depth_limit=None, sort_neighbor
             queue.popleft()
 
 
+@nx._dispatch
 def bfs_edges(G, source, reverse=False, depth_limit=None, sort_neighbors=None):
     """Iterate over edges in a breadth-first-search starting at source.
 
@@ -235,6 +236,7 @@ def bfs_tree(G, source, reverse=False, depth_limit=None, sort_neighbors=None):
     return T
 
 
+@nx._dispatch
 def bfs_predecessors(G, source, depth_limit=None, sort_neighbors=None):
     """Returns an iterator of predecessors in breadth-first-search from source.
 
@@ -299,6 +301,7 @@ def bfs_predecessors(G, source, depth_limit=None, sort_neighbors=None):
         yield (t, s)
 
 
+@nx._dispatch
 def bfs_successors(G, source, depth_limit=None, sort_neighbors=None):
     """Returns an iterator of successors in breadth-first-search from source.
 
@@ -371,6 +374,7 @@ def bfs_successors(G, source, depth_limit=None, sort_neighbors=None):
     yield (parent, children)
 
 
+@nx._dispatch
 def bfs_layers(G, sources):
     """Returns an iterator of all the layers in breadth-first search traversal.
 
@@ -413,7 +417,7 @@ def bfs_layers(G, sources):
     # same distance from sources at each iteration
     while current_layer:
         yield current_layer
-        next_layer = list()
+        next_layer = []
         for node in current_layer:
             for child in G[node]:
                 if child not in visited:
@@ -422,6 +426,7 @@ def bfs_layers(G, sources):
         current_layer = next_layer
 
 
+@nx._dispatch
 def descendants_at_distance(G, source, distance):
     """Returns all nodes at a fixed `distance` from `source` in `G`.
 
diff --git a/networkx/algorithms/traversal/depth_first_search.py b/networkx/algorithms/traversal/depth_first_search.py
index 0ccca4f..c250787 100644
--- a/networkx/algorithms/traversal/depth_first_search.py
+++ b/networkx/algorithms/traversal/depth_first_search.py
@@ -364,12 +364,15 @@ def dfs_labeled_edges(G, source=None, depth_limit=None):
     edges: generator
        A generator of triples of the form (*u*, *v*, *d*), where (*u*,
        *v*) is the edge being explored in the depth-first search and *d*
-       is one of the strings 'forward', 'nontree', or 'reverse'. A
-       'forward' edge is one in which *u* has been visited but *v* has
+       is one of the strings 'forward', 'nontree', 'reverse', or 'reverse-depth_limit'.
+       A 'forward' edge is one in which *u* has been visited but *v* has
        not. A 'nontree' edge is one in which both *u* and *v* have been
        visited but the edge is not in the DFS tree. A 'reverse' edge is
-       on in which both *u* and *v* have been visited and the edge is in
-       the DFS tree.
+       one in which both *u* and *v* have been visited and the edge is in
+       the DFS tree. When the `depth_limit` is reached via a 'forward' edge,
+       a 'reverse' edge is immediately generated rather than the subtree
+       being explored. To indicate this flavor of 'reverse' edge, the string
+       yielded is 'reverse-depth_limit'.
 
     Examples
     --------
@@ -436,6 +439,8 @@ def dfs_labeled_edges(G, source=None, depth_limit=None):
                     visited.add(child)
                     if depth_now > 1:
                         stack.append((child, depth_now - 1, iter(G[child])))
+                    else:
+                        yield parent, child, "reverse-depth_limit"
             except StopIteration:
                 stack.pop()
                 if stack:
diff --git a/networkx/algorithms/traversal/edgebfs.py b/networkx/algorithms/traversal/edgebfs.py
index 4e1c541..c29ef5e 100644
--- a/networkx/algorithms/traversal/edgebfs.py
+++ b/networkx/algorithms/traversal/edgebfs.py
@@ -16,6 +16,7 @@ REVERSE = "reverse"
 __all__ = ["edge_bfs"]
 
 
+@nx._dispatch
 def edge_bfs(G, source=None, orientation=None):
     """A directed, breadth-first-search of edges in `G`, beginning at `source`.
 
@@ -157,7 +158,7 @@ def edge_bfs(G, source=None, orientation=None):
     check_reverse = directed and orientation in ("reverse", "ignore")
 
     # start BFS
-    visited_nodes = {n for n in nodes}
+    visited_nodes = set(nodes)
     visited_edges = set()
     queue = deque([(n, edges_from(n)) for n in nodes])
     while queue:
diff --git a/networkx/algorithms/traversal/tests/test_beamsearch.py b/networkx/algorithms/traversal/tests/test_beamsearch.py
index 249cc2f..8945b41 100644
--- a/networkx/algorithms/traversal/tests/test_beamsearch.py
+++ b/networkx/algorithms/traversal/tests/test_beamsearch.py
@@ -25,3 +25,8 @@ class TestBeamSearch:
         G = nx.cycle_graph(4)
         edges = nx.bfs_beam_edges(G, 0, identity, width=2)
         assert list(edges) == [(0, 3), (0, 1), (3, 2)]
+
+    def test_width_none(self):
+        G = nx.cycle_graph(4)
+        edges = nx.bfs_beam_edges(G, 0, identity, width=None)
+        assert list(edges) == [(0, 3), (0, 1), (3, 2)]
diff --git a/networkx/algorithms/traversal/tests/test_dfs.py b/networkx/algorithms/traversal/tests/test_dfs.py
index 7652809..0eb698b 100644
--- a/networkx/algorithms/traversal/tests/test_dfs.py
+++ b/networkx/algorithms/traversal/tests/test_dfs.py
@@ -59,11 +59,43 @@ class TestDFS:
         edges = list(nx.dfs_labeled_edges(self.G, source=0))
         forward = [(u, v) for (u, v, d) in edges if d == "forward"]
         assert forward == [(0, 0), (0, 1), (1, 2), (2, 4), (1, 3)]
+        assert edges == [
+            (0, 0, "forward"),
+            (0, 1, "forward"),
+            (1, 0, "nontree"),
+            (1, 2, "forward"),
+            (2, 1, "nontree"),
+            (2, 4, "forward"),
+            (4, 2, "nontree"),
+            (4, 0, "nontree"),
+            (2, 4, "reverse"),
+            (1, 2, "reverse"),
+            (1, 3, "forward"),
+            (3, 1, "nontree"),
+            (3, 0, "nontree"),
+            (1, 3, "reverse"),
+            (0, 1, "reverse"),
+            (0, 3, "nontree"),
+            (0, 4, "nontree"),
+            (0, 0, "reverse"),
+        ]
 
     def test_dfs_labeled_disconnected_edges(self):
         edges = list(nx.dfs_labeled_edges(self.D))
         forward = [(u, v) for (u, v, d) in edges if d == "forward"]
         assert forward == [(0, 0), (0, 1), (2, 2), (2, 3)]
+        assert edges == [
+            (0, 0, "forward"),
+            (0, 1, "forward"),
+            (1, 0, "nontree"),
+            (0, 1, "reverse"),
+            (0, 0, "reverse"),
+            (2, 2, "forward"),
+            (2, 3, "forward"),
+            (3, 2, "nontree"),
+            (2, 3, "reverse"),
+            (2, 2, "reverse"),
+        ]
 
     def test_dfs_tree_isolates(self):
         G = nx.Graph()
@@ -141,12 +173,79 @@ class TestDepthLimitedSearch:
         edges = nx.dfs_edges(self.G, source=9, depth_limit=4)
         assert list(edges) == [(9, 8), (8, 7), (7, 2), (2, 1), (2, 3), (9, 10)]
 
-    def test_dls_labeled_edges(self):
+    def test_dls_labeled_edges_depth_1(self):
         edges = list(nx.dfs_labeled_edges(self.G, source=5, depth_limit=1))
         forward = [(u, v) for (u, v, d) in edges if d == "forward"]
         assert forward == [(5, 5), (5, 4), (5, 6)]
+        # Note: reverse-depth_limit edge types were not reported before gh-6240
+        assert edges == [
+            (5, 5, "forward"),
+            (5, 4, "forward"),
+            (5, 4, "reverse-depth_limit"),
+            (5, 6, "forward"),
+            (5, 6, "reverse-depth_limit"),
+            (5, 5, "reverse"),
+        ]
 
-    def test_dls_labeled_disconnected_edges(self):
+    def test_dls_labeled_edges_depth_2(self):
         edges = list(nx.dfs_labeled_edges(self.G, source=6, depth_limit=2))
         forward = [(u, v) for (u, v, d) in edges if d == "forward"]
         assert forward == [(6, 6), (6, 5), (5, 4)]
+        assert edges == [
+            (6, 6, "forward"),
+            (6, 5, "forward"),
+            (5, 4, "forward"),
+            (5, 4, "reverse-depth_limit"),
+            (5, 6, "nontree"),
+            (6, 5, "reverse"),
+            (6, 6, "reverse"),
+        ]
+
+    def test_dls_labeled_disconnected_edges(self):
+        edges = list(nx.dfs_labeled_edges(self.D, depth_limit=1))
+        assert edges == [
+            (0, 0, "forward"),
+            (0, 1, "forward"),
+            (0, 1, "reverse-depth_limit"),
+            (0, 0, "reverse"),
+            (2, 2, "forward"),
+            (2, 3, "forward"),
+            (2, 3, "reverse-depth_limit"),
+            (2, 7, "forward"),
+            (2, 7, "reverse-depth_limit"),
+            (2, 2, "reverse"),
+            (8, 8, "forward"),
+            (8, 7, "nontree"),
+            (8, 9, "forward"),
+            (8, 9, "reverse-depth_limit"),
+            (8, 8, "reverse"),
+            (10, 10, "forward"),
+            (10, 9, "nontree"),
+            (10, 10, "reverse"),
+        ]
+        # large depth_limit has no impact
+        edges = list(nx.dfs_labeled_edges(self.D, depth_limit=19))
+        assert edges == [
+            (0, 0, "forward"),
+            (0, 1, "forward"),
+            (1, 0, "nontree"),
+            (0, 1, "reverse"),
+            (0, 0, "reverse"),
+            (2, 2, "forward"),
+            (2, 3, "forward"),
+            (3, 2, "nontree"),
+            (2, 3, "reverse"),
+            (2, 7, "forward"),
+            (7, 2, "nontree"),
+            (7, 8, "forward"),
+            (8, 7, "nontree"),
+            (8, 9, "forward"),
+            (9, 8, "nontree"),
+            (9, 10, "forward"),
+            (10, 9, "nontree"),
+            (9, 10, "reverse"),
+            (8, 9, "reverse"),
+            (7, 8, "reverse"),
+            (2, 7, "reverse"),
+            (2, 2, "reverse"),
+        ]
diff --git a/networkx/algorithms/tree/branchings.py b/networkx/algorithms/tree/branchings.py
index 8f8602b..3125a4f 100644
--- a/networkx/algorithms/tree/branchings.py
+++ b/networkx/algorithms/tree/branchings.py
@@ -352,7 +352,7 @@ class Edmonds:
                 d[partition] = data.get(partition)
 
             if preserve_attrs:
-                for (d_k, d_v) in data.items():
+                for d_k, d_v in data.items():
                     if d_k != attr:
                         d[d_k] = d_v
 
@@ -698,7 +698,7 @@ class Edmonds:
             # Optionally, preserve the other edge attributes of the original
             # graph
             if preserve_attrs:
-                for (key, value) in d.items():
+                for key, value in d.items():
                     if key not in [self.attr, self.candidate_attr]:
                         dd[key] = value
 
@@ -932,7 +932,7 @@ class ArborescenceIterator:
         self.partition_queue.put(
             self.Partition(
                 mst_weight if self.minimum else -mst_weight,
-                dict()
+                {}
                 if self.init_partition is None
                 else self.init_partition.partition_dict,
             )
diff --git a/networkx/algorithms/tree/mst.py b/networkx/algorithms/tree/mst.py
index e2ff7c6..d36f411 100644
--- a/networkx/algorithms/tree/mst.py
+++ b/networkx/algorithms/tree/mst.py
@@ -334,12 +334,22 @@ def prim_mst_edges(G, minimum, weight="weight", keys=True, data=True, ignore_nan
                         continue
                     for k2, d2 in keydict.items():
                         new_weight = d2.get(weight, 1) * sign
+                        if isnan(new_weight):
+                            if ignore_nan:
+                                continue
+                            msg = f"NaN found as an edge weight. Edge {(v, w, k2, d2)}"
+                            raise ValueError(msg)
                         push(frontier, (new_weight, next(c), v, w, k2, d2))
             else:
                 for w, d2 in G.adj[v].items():
                     if w in visited:
                         continue
                     new_weight = d2.get(weight, 1) * sign
+                    if isnan(new_weight):
+                        if ignore_nan:
+                            continue
+                        msg = f"NaN found as an edge weight. Edge {(v, w, d2)}"
+                        raise ValueError(msg)
                     push(frontier, (new_weight, next(c), v, w, d2))
 
 
@@ -603,7 +613,7 @@ def partition_spanning_tree(
     """
     Find a spanning tree while respecting a partition of edges.
 
-    Edges can be flagged as either `INLCUDED` which are required to be in the
+    Edges can be flagged as either `INCLUDED` which are required to be in the
     returned tree, `EXCLUDED`, which cannot be in the returned tree and `OPEN`.
 
     This is used in the SpanningTreeIterator to create new partitions following
@@ -732,7 +742,7 @@ def random_spanning_tree(G, weight=None, *, multiplicative=True, seed=None):
     is based on the product of edge weights, and if ``multiplicative=False``
     it is based on the sum of the edge weight. However, since it is
     easier to determine the total weight of all spanning trees for the
-    multiplicative verison, that is significantly faster and should be used if
+    multiplicative version, that is significantly faster and should be used if
     possible. Additionally, setting `weight` to `None` will cause a spanning tree
     to be selected with uniform probability.
 
@@ -847,10 +857,10 @@ def random_spanning_tree(G, weight=None, *, multiplicative=True, seed=None):
         Find the sum of weights of the spanning trees of `G` using the
         approioate `method`.
 
-        This is easy if the choosen method is 'multiplicative', since we can
+        This is easy if the chosen method is 'multiplicative', since we can
         use Kirchhoff's Tree Matrix Theorem directly. However, with the
         'additive' method, this process is slightly more complex and less
-        computatiionally efficent as we have to find the number of spanning
+        computatiionally efficient as we have to find the number of spanning
         trees which contain each possible edge in the graph.
 
         Parameters
@@ -882,7 +892,7 @@ def random_spanning_tree(G, weight=None, *, multiplicative=True, seed=None):
             #    the number of spanning trees which have to include that edge. This
             #    can be accomplished by contracting the edge and finding the
             #    multiplicative total spanning tree weight if the weight of each edge
-            #    is assumed to be 1, which is conviently built into networkx already,
+            #    is assumed to be 1, which is conveniently built into networkx already,
             #    by calling total_spanning_tree_weight with weight=None
             else:
                 total = 0
@@ -1022,7 +1032,7 @@ class SpanningTreeIterator:
         ).size(weight=self.weight)
 
         self.partition_queue.put(
-            self.Partition(mst_weight if self.minimum else -mst_weight, dict())
+            self.Partition(mst_weight if self.minimum else -mst_weight, {})
         )
 
         return self
diff --git a/networkx/algorithms/tree/tests/test_branchings.py b/networkx/algorithms/tree/tests/test_branchings.py
index 3417446..b7bbd9e 100644
--- a/networkx/algorithms/tree/tests/test_branchings.py
+++ b/networkx/algorithms/tree/tests/test_branchings.py
@@ -1,4 +1,5 @@
 import math
+from operator import itemgetter
 
 import pytest
 
@@ -199,6 +200,26 @@ def test_greedy_max1():
     assert_equal_branchings(B, B_)
 
 
+def test_greedy_branching_kwarg_kind():
+    G = G1()
+    with pytest.raises(nx.NetworkXException, match="Unknown value for `kind`."):
+        B = branchings.greedy_branching(G, kind="lol")
+
+
+def test_greedy_branching_for_unsortable_nodes():
+    G = nx.DiGraph()
+    G.add_weighted_edges_from([((2, 3), 5, 1), (3, "a", 1), (2, 4, 5)])
+    edges = [(u, v, data.get("weight", 1)) for (u, v, data) in G.edges(data=True)]
+    with pytest.raises(TypeError):
+        edges.sort(key=itemgetter(2, 0, 1), reverse=True)
+    B = branchings.greedy_branching(G, kind="max").edges(data=True)
+    assert list(B) == [
+        ((2, 3), 5, {"weight": 1}),
+        (3, "a", {"weight": 1}),
+        (2, 4, {"weight": 5}),
+    ]
+
+
 def test_greedy_max2():
     # Different default weight.
     #
@@ -427,6 +448,38 @@ def test_edge_attribute_preservation_multigraph():
     assert B[0][1][0]["otherattr2"] == 3
 
 
+def test_Edmond_kind():
+    G = nx.MultiGraph()
+
+    edgelist = [
+        (0, 1, [("weight", 5), ("otherattr", 1), ("otherattr2", 3)]),
+        (0, 2, [("weight", 5), ("otherattr", 2), ("otherattr2", 2)]),
+        (1, 2, [("weight", 6), ("otherattr", 3), ("otherattr2", 1)]),
+    ]
+    G.add_edges_from(edgelist * 2)  # Make sure we have duplicate edge paths
+    ed = branchings.Edmonds(G)
+    with pytest.raises(nx.NetworkXException, match="Unknown value for `kind`."):
+        ed.find_optimum(kind="lol", preserve_attrs=True)
+
+
+def test_MultiDiGraph_EdgeKey():
+    # test if more than one edges has the same key
+    G = branchings.MultiDiGraph_EdgeKey()
+    G.add_edge(1, 2, "A")
+    with pytest.raises(Exception, match="Key 'A' is already in use."):
+        G.add_edge(3, 4, "A")
+    # test if invalid edge key was specified
+    with pytest.raises(KeyError, match="Invalid edge key 'B'"):
+        G.remove_edge_with_key("B")
+    # test remove_edge_with_key works
+    if G.remove_edge_with_key("A"):
+        assert list(G.edges(data=True)) == []
+    # test that remove_edges_from doesn't work
+    G.add_edge(1, 3, "A")
+    with pytest.raises(NotImplementedError):
+        G.remove_edges_from([(1, 3)])
+
+
 def test_edge_attribute_discard():
     # Test that edge attributes are discarded if we do not specify to keep them
     G = nx.Graph()
diff --git a/networkx/algorithms/tree/tests/test_mst.py b/networkx/algorithms/tree/tests/test_mst.py
index 5ed4161..ee0c3f6 100644
--- a/networkx/algorithms/tree/tests/test_mst.py
+++ b/networkx/algorithms/tree/tests/test_mst.py
@@ -165,7 +165,7 @@ class MinimumSpanningTreeTestBase:
         assert edges_equal(actual, self.maximum_spanning_edgelist)
 
     def test_disconnected(self):
-        G = nx.Graph([(0, 1, dict(weight=1)), (2, 3, dict(weight=2))])
+        G = nx.Graph([(0, 1, {"weight": 1}), (2, 3, {"weight": 2})])
         T = nx.minimum_spanning_tree(G, algorithm=self.algo)
         assert nodes_equal(list(T), list(range(4)))
         assert edges_equal(list(T.edges()), [(0, 1), (2, 3)])
@@ -253,6 +253,36 @@ class TestKruskal(MultigraphMSTTestBase):
 
     algorithm = "kruskal"
 
+    def test_key_data_bool(self):
+        """Tests that the keys and data values are included in
+        MST edges based on whether keys and data parameters are
+        true or false"""
+        G = nx.MultiGraph()
+        G.add_edge(1, 2, key=1, weight=2)
+        G.add_edge(1, 2, key=2, weight=3)
+        G.add_edge(3, 2, key=1, weight=2)
+        G.add_edge(3, 1, key=1, weight=4)
+
+        # keys are included and data is not included
+        mst_edges = nx.minimum_spanning_edges(
+            G, algorithm=self.algo, keys=True, data=False
+        )
+        assert edges_equal([(1, 2, 1), (2, 3, 1)], list(mst_edges))
+
+        # keys are not included and data is included
+        mst_edges = nx.minimum_spanning_edges(
+            G, algorithm=self.algo, keys=False, data=True
+        )
+        assert edges_equal(
+            [(1, 2, {"weight": 2}), (2, 3, {"weight": 2})], list(mst_edges)
+        )
+
+        # both keys and data are not included
+        mst_edges = nx.minimum_spanning_edges(
+            G, algorithm=self.algo, keys=False, data=False
+        )
+        assert edges_equal([(1, 2), (2, 3)], list(mst_edges))
+
 
 class TestPrim(MultigraphMSTTestBase):
     """Unit tests for computing a minimum (or maximum) spanning tree
@@ -261,6 +291,25 @@ class TestPrim(MultigraphMSTTestBase):
 
     algorithm = "prim"
 
+    def test_ignore_nan(self):
+        """Tests that the edges with NaN weights are ignored or
+        raise an Error based on ignore_nan is true or false"""
+        H = nx.MultiGraph()
+        H.add_edge(1, 2, key=1, weight=float("nan"))
+        H.add_edge(1, 2, key=2, weight=3)
+        H.add_edge(3, 2, key=1, weight=2)
+        H.add_edge(3, 1, key=1, weight=4)
+
+        # NaN weight edges are ignored when ignore_nan=True
+        mst_edges = nx.minimum_spanning_edges(H, algorithm=self.algo, ignore_nan=True)
+        assert edges_equal(
+            [(1, 2, 2, {"weight": 3}), (2, 3, 1, {"weight": 2})], list(mst_edges)
+        )
+
+        # NaN weight edges raise Error when ignore_nan=False
+        with pytest.raises(ValueError):
+            list(nx.minimum_spanning_edges(H, algorithm=self.algo, ignore_nan=False))
+
     def test_multigraph_keys_tree(self):
         G = nx.MultiGraph()
         G.add_edge(0, 1, key="a", weight=2)
diff --git a/networkx/algorithms/tree/tests/test_recognition.py b/networkx/algorithms/tree/tests/test_recognition.py
index d9c4943..a9c6c5a 100644
--- a/networkx/algorithms/tree/tests/test_recognition.py
+++ b/networkx/algorithms/tree/tests/test_recognition.py
@@ -4,13 +4,11 @@ import networkx as nx
 
 
 class TestTreeRecognition:
-
     graph = nx.Graph
     multigraph = nx.MultiGraph
 
     @classmethod
     def setup_class(cls):
-
         cls.T1 = cls.graph()
 
         cls.T2 = cls.graph()
diff --git a/networkx/algorithms/triads.py b/networkx/algorithms/triads.py
index 1c107a1..0b40a32 100644
--- a/networkx/algorithms/triads.py
+++ b/networkx/algorithms/triads.py
@@ -6,10 +6,9 @@
 
 from collections import defaultdict
 from itertools import combinations, permutations
-from random import sample
 
 import networkx as nx
-from networkx.utils import not_implemented_for
+from networkx.utils import not_implemented_for, py_random_state
 
 __all__ = [
     "triadic_census",
@@ -149,6 +148,30 @@ def triadic_census(G, nodelist=None):
     census : dict
        Dictionary with triad type as keys and number of occurrences as values.
 
+    Examples
+    --------
+    >>> G = nx.DiGraph([(1, 2), (2, 3), (3, 1), (3, 4), (4, 1), (4, 2)])
+    >>> triadic_census = nx.triadic_census(G)
+    >>> for key, value in triadic_census.items():
+    ...     print(f"{key}: {value}")
+    ...
+    003: 0
+    012: 0
+    102: 0
+    021D: 0
+    021U: 0
+    021C: 0
+    111D: 0
+    111U: 0
+    030T: 2
+    030C: 2
+    201: 0
+    120D: 0
+    120U: 0
+    120C: 0
+    210: 0
+    300: 0
+
     Notes
     -----
     This algorithm has complexity $O(m)$ where $m$ is the number of edges in
@@ -252,6 +275,7 @@ def triadic_census(G, nodelist=None):
     return census
 
 
+@nx._dispatch()
 def is_triad(G):
     """Returns True if the graph G is a triad, else False.
 
@@ -264,6 +288,15 @@ def is_triad(G):
     -------
     istriad : boolean
        Whether G is a valid triad
+
+    Examples
+    --------
+    >>> G = nx.DiGraph([(1, 2), (2, 3), (3, 1)])
+    >>> nx.is_triad(G)
+    True
+    >>> G.add_edge(0, 1)
+    >>> nx.is_triad(G)
+    False
     """
     if isinstance(G, nx.Graph):
         if G.order() == 3 and nx.is_directed(G):
@@ -285,6 +318,13 @@ def all_triplets(G):
     -------
     triplets : generator of 3-tuples
        Generator of tuples of 3 nodes
+
+    Examples
+    --------
+    >>> G = nx.DiGraph([(1, 2), (2, 3), (3, 4)])
+    >>> list(nx.all_triplets(G))
+    [(1, 2, 3), (1, 2, 4), (1, 3, 4), (2, 3, 4)]
+
     """
     triplets = combinations(G.nodes(), 3)
     return triplets
@@ -303,6 +343,17 @@ def all_triads(G):
     -------
     all_triads : generator of DiGraphs
        Generator of triads (order-3 DiGraphs)
+
+    Examples
+    --------
+    >>> G = nx.DiGraph([(1, 2), (2, 3), (3, 1), (3, 4), (4, 1), (4, 2)])
+    >>> for triad in nx.all_triads(G):
+    ...     print(triad.edges)
+    [(1, 2), (2, 3), (3, 1)]
+    [(1, 2), (4, 1), (4, 2)]
+    [(3, 1), (3, 4), (4, 1)]
+    [(2, 3), (3, 4), (4, 2)]
+
     """
     triplets = combinations(G.nodes(), 3)
     for triplet in triplets:
@@ -312,6 +363,29 @@ def all_triads(G):
 @not_implemented_for("undirected")
 def triads_by_type(G):
     """Returns a list of all triads for each triad type in a directed graph.
+    There are exactly 16 different types of triads possible. Suppose 1, 2, 3 are three
+    nodes, they will be classified as a particular triad type if their connections
+    are as follows:
+
+    - 003: 1, 2, 3
+    - 012: 1 -> 2, 3
+    - 102: 1 <-> 2, 3
+    - 021D: 1 <- 2 -> 3
+    - 021U: 1 -> 2 <- 3
+    - 021C: 1 -> 2 -> 3
+    - 111D: 1 <-> 2 <- 3
+    - 111U: 1 <-> 2 -> 3
+    - 030T: 1 -> 2 -> 3, 1 -> 3
+    - 030C: 1 <- 2 <- 3, 1 -> 3
+    - 201: 1 <-> 2 <-> 3
+    - 120D: 1 <- 2 -> 3, 1 <-> 3
+    - 120U: 1 -> 2 <- 3, 1 <-> 3
+    - 120C: 1 -> 2 -> 3, 1 <-> 3
+    - 210: 1 -> 2 <-> 3, 1 <-> 3
+    - 300: 1 <-> 2 <-> 3, 1 <-> 3
+
+    Refer to the :doc:`example gallery </auto_examples/graph/plot_triad_types>`
+    for visual examples of the triad types.
 
     Parameters
     ----------
@@ -322,6 +396,21 @@ def triads_by_type(G):
     -------
     tri_by_type : dict
        Dictionary with triad types as keys and lists of triads as values.
+
+    Examples
+    --------
+    >>> G = nx.DiGraph([(1, 2), (1, 3), (2, 3), (3, 1), (5, 6), (5, 4), (6, 7)])
+    >>> dict = nx.triads_by_type(G)
+    >>> dict['120C'][0].edges()
+    OutEdgeView([(1, 2), (1, 3), (2, 3), (3, 1)])
+    >>> dict['012'][0].edges()
+    OutEdgeView([(1, 2)])
+
+    References
+    ----------
+    .. [1] Snijders, T. (2012). "Transitivity and triads." University of
+        Oxford.
+        https://web.archive.org/web/20170830032057/http://www.stats.ox.ac.uk/~snijders/Trans_Triads_ha.pdf
     """
     # num_triads = o * (o - 1) * (o - 2) // 6
     # if num_triads > TRIAD_LIMIT: print(WARNING)
@@ -347,6 +436,15 @@ def triad_type(G):
     triad_type : str
        A string identifying the triad type
 
+    Examples
+    --------
+    >>> G = nx.DiGraph([(1, 2), (2, 3), (3, 1)])
+    >>> nx.triad_type(G)
+    '030C'
+    >>> G.add_edge(1, 3)
+    >>> nx.triad_type(G)
+    '120C'
+
     Notes
     -----
     There can be 6 unique edges in a triad (order-3 DiGraph) (so 2^^6=64 unique
@@ -360,7 +458,7 @@ def triad_type(G):
 
     {m}     = number of mutual ties (takes 0, 1, 2, 3); a mutual tie is (0,1)
               AND (1,0)
-    {a}     = number of assymmetric ties (takes 0, 1, 2, 3); an assymmetric tie
+    {a}     = number of asymmetric ties (takes 0, 1, 2, 3); an asymmetric tie
               is (0,1) BUT NOT (1,0) or vice versa
     {n}     = number of null ties (takes 0, 1, 2, 3); a null tie is NEITHER
               (0,1) NOR (1,0)
@@ -392,7 +490,7 @@ def triad_type(G):
         elif e1[1] == e2[0] or e2[1] == e1[0]:
             return "021C"
     elif num_edges == 3:
-        for (e1, e2, e3) in permutations(G.edges(), 3):
+        for e1, e2, e3 in permutations(G.edges(), 3):
             if set(e1) == set(e2):
                 if e3[0] in e1:
                     return "111U"
@@ -404,7 +502,7 @@ def triad_type(G):
                 # e3 == (e1[0], e2[1]) and e2 == (e1[1], e3[1]):
                 return "030T"
     elif num_edges == 4:
-        for (e1, e2, e3, e4) in permutations(G.edges(), 4):
+        for e1, e2, e3, e4 in permutations(G.edges(), 4):
             if set(e1) == set(e2):
                 # identify pair of symmetric edges (which necessarily exists)
                 if set(e3) == set(e4):
@@ -422,20 +520,32 @@ def triad_type(G):
 
 
 @not_implemented_for("undirected")
-def random_triad(G):
+@py_random_state(1)
+def random_triad(G, seed=None):
     """Returns a random triad from a directed graph.
 
     Parameters
     ----------
     G : digraph
        A NetworkX DiGraph
+    seed : integer, random_state, or None (default)
+        Indicator of random number generation state.
+        See :ref:`Randomness<randomness>`.
 
     Returns
     -------
     G2 : subgraph
        A randomly selected triad (order-3 NetworkX DiGraph)
+
+    Examples
+    --------
+    >>> G = nx.DiGraph([(1, 2), (1, 3), (2, 3), (3, 1), (5, 6), (5, 4), (6, 7)])
+    >>> triad = nx.random_triad(G, seed=1)
+    >>> triad.edges
+    OutEdgeView([(1, 2)])
+
     """
-    nodes = sample(list(G.nodes()), 3)
+    nodes = seed.sample(list(G.nodes()), 3)
     G2 = G.subgraph(nodes)
     return G2
 
diff --git a/networkx/algorithms/voronoi.py b/networkx/algorithms/voronoi.py
index 184afa2..f188347 100644
--- a/networkx/algorithms/voronoi.py
+++ b/networkx/algorithms/voronoi.py
@@ -9,9 +9,9 @@ def voronoi_cells(G, center_nodes, weight="weight"):
     """Returns the Voronoi cells centered at `center_nodes` with respect
     to the shortest-path distance metric.
 
-    If *C* is a set of nodes in the graph and *c* is an element of *C*,
-    the *Voronoi cell* centered at a node *c* is the set of all nodes
-    *v* that are closer to *c* than to any other center node in *C* with
+    If $C$ is a set of nodes in the graph and $c$ is an element of $C$,
+    the *Voronoi cell* centered at a node $c$ is the set of all nodes
+    $v$ that are closer to $c$ than to any other center node in $C$ with
     respect to the shortest-path distance metric. [1]_
 
     For directed graphs, this will compute the "outward" Voronoi cells,
@@ -62,10 +62,9 @@ def voronoi_cells(G, center_nodes, weight="weight"):
 
     References
     ----------
-    .. [1] Erwig, Martin. (2000),
-           "The graph Voronoi diagram with applications."
-           *Networks*, 36: 156--163.
-           <dx.doi.org/10.1002/1097-0037(200010)36:3<156::AID-NET2>3.0.CO;2-L>
+    .. [1] Erwig, Martin. (2000),"The graph Voronoi diagram with applications."
+        *Networks*, 36: 156--163.
+        https://doi.org/10.1002/1097-0037(200010)36:3<156::AID-NET2>3.0.CO;2-L
 
     """
     # Determine the shortest paths from any one of the center nodes to
diff --git a/networkx/classes/__init__.py b/networkx/classes/__init__.py
index d5bb1d7..cc534cd 100644
--- a/networkx/classes/__init__.py
+++ b/networkx/classes/__init__.py
@@ -2,7 +2,7 @@ from .graph import Graph
 from .digraph import DiGraph
 from .multigraph import MultiGraph
 from .multidigraph import MultiDiGraph
-from .ordered import *
+from .backends import _dispatch
 
 from .function import *
 
@@ -11,3 +11,4 @@ from networkx.classes import filters
 from networkx.classes import coreviews
 from networkx.classes import graphviews
 from networkx.classes import reportviews
+from networkx.classes import backends
diff --git a/networkx/classes/backends.py b/networkx/classes/backends.py
new file mode 100644
index 0000000..761d0a4
--- /dev/null
+++ b/networkx/classes/backends.py
@@ -0,0 +1,243 @@
+"""
+Code to support various backends in a plugin dispatch architecture.
+
+Create a Dispatcher
+-------------------
+
+To be a valid plugin, a package must register an entry_point
+of `networkx.plugins` with a key pointing to the handler.
+
+For example::
+
+    entry_points={'networkx.plugins': 'sparse = networkx_plugin_sparse'}
+
+The plugin must create a Graph-like object which contains an attribute
+``__networkx_plugin__`` with a value of the entry point name.
+
+Continuing the example above::
+
+    class WrappedSparse:
+        __networkx_plugin__ = "sparse"
+        ...
+
+When a dispatchable NetworkX algorithm encounters a Graph-like object
+with a ``__networkx_plugin__`` attribute, it will look for the associated
+dispatch object in the entry_points, load it, and dispatch the work to it.
+
+
+Testing
+-------
+To assist in validating the backend algorithm implementations, if an
+environment variable ``NETWORKX_GRAPH_CONVERT`` is set to a registered
+plugin keys, the dispatch machinery will automatically convert regular
+networkx Graphs and DiGraphs to the backend equivalent by calling
+``<backend dispatcher>.convert_from_nx(G, weight=weight, name=name)``.
+
+The converted object is then passed to the backend implementation of
+the algorithm. The result is then passed to
+``<backend dispatcher>.convert_to_nx(result, name=name)`` to convert back
+to a form expected by the NetworkX tests.
+
+By defining ``convert_from_nx`` and ``convert_to_nx`` methods and setting
+the environment variable, NetworkX will automatically route tests on
+dispatchable algorithms to the backend, allowing the full networkx test
+suite to be run against the backend implementation.
+
+Example pytest invocation::
+
+    NETWORKX_GRAPH_CONVERT=sparse pytest --pyargs networkx
+
+Dispatchable algorithms which are not implemented by the backend
+will cause a ``pytest.xfail()``, giving some indication that not all
+tests are working, while avoiding causing an explicit failure.
+
+A special ``on_start_tests(items)`` function may be defined by the backend.
+It will be called with the list of NetworkX tests discovered. Each item
+is a test object that can be marked as xfail if the backend does not support
+the test using `item.add_marker(pytest.mark.xfail(reason=...))`.
+"""
+import functools
+import inspect
+import os
+import sys
+from importlib.metadata import entry_points
+
+from ..exception import NetworkXNotImplemented
+
+__all__ = ["_dispatch", "_mark_tests"]
+
+
+class PluginInfo:
+    """Lazily loaded entry_points plugin information"""
+
+    def __init__(self):
+        self._items = None
+
+    def __bool__(self):
+        return len(self.items) > 0
+
+    @property
+    def items(self):
+        if self._items is None:
+            if sys.version_info < (3, 10):
+                self._items = entry_points()["networkx.plugins"]
+            else:
+                self._items = entry_points(group="networkx.plugins")
+        return self._items
+
+    def __contains__(self, name):
+        if sys.version_info < (3, 10):
+            return len([ep for ep in self.items if ep.name == name]) > 0
+        return name in self.items.names
+
+    def __getitem__(self, name):
+        if sys.version_info < (3, 10):
+            return [ep for ep in self.items if ep.name == name][0]
+        return self.items[name]
+
+
+plugins = PluginInfo()
+_registered_algorithms = {}
+
+
+def _register_algo(name, wrapped_func):
+    if name in _registered_algorithms:
+        raise KeyError(f"Algorithm already exists in dispatch registry: {name}")
+    _registered_algorithms[name] = wrapped_func
+    wrapped_func.dispatchname = name
+
+
+def _dispatch(func=None, *, name=None):
+    """Dispatches to a backend algorithm
+    when the first argument is a backend graph-like object.
+    """
+    # Allow any of the following decorator forms:
+    #  - @_dispatch
+    #  - @_dispatch()
+    #  - @_dispatch("override_name")
+    #  - @_dispatch(name="override_name")
+    if func is None:
+        if name is None:
+            return _dispatch
+        return functools.partial(_dispatch, name=name)
+    if isinstance(func, str):
+        return functools.partial(_dispatch, name=func)
+    # If name not provided, use the name of the function
+    if name is None:
+        name = func.__name__
+
+    @functools.wraps(func)
+    def wrapper(*args, **kwds):
+        if args:
+            graph = args[0]
+        else:
+            try:
+                graph = kwds["G"]
+            except KeyError:
+                raise TypeError(f"{name}() missing positional argument: 'G'") from None
+        if hasattr(graph, "__networkx_plugin__") and plugins:
+            plugin_name = graph.__networkx_plugin__
+            if plugin_name in plugins:
+                backend = plugins[plugin_name].load()
+                if hasattr(backend, name):
+                    return getattr(backend, name).__call__(*args, **kwds)
+                else:
+                    raise NetworkXNotImplemented(
+                        f"'{name}' not implemented by {plugin_name}"
+                    )
+        return func(*args, **kwds)
+
+    # Keep a handle to the original function to use when testing
+    # the dispatch mechanism internally
+    wrapper._orig_func = func
+
+    _register_algo(name, wrapper)
+    return wrapper
+
+
+def test_override_dispatch(func=None, *, name=None):
+    """Auto-converts the first argument into the backend equivalent,
+    causing the dispatching mechanism to trigger for every
+    decorated algorithm."""
+    if func is None:
+        if name is None:
+            return test_override_dispatch
+        return functools.partial(test_override_dispatch, name=name)
+    if isinstance(func, str):
+        return functools.partial(test_override_dispatch, name=func)
+    # If name not provided, use the name of the function
+    if name is None:
+        name = func.__name__
+
+    sig = inspect.signature(func)
+
+    @functools.wraps(func)
+    def wrapper(*args, **kwds):
+        backend = plugins[plugin_name].load()
+        if not hasattr(backend, name):
+            if plugin_name == "nx-loopback":
+                raise NetworkXNotImplemented(
+                    f"'{name}' not found in {backend.__class__.__name__}"
+                )
+            pytest.xfail(f"'{name}' not implemented by {plugin_name}")
+        bound = sig.bind(*args, **kwds)
+        bound.apply_defaults()
+        if args:
+            graph, *args = args
+        else:
+            try:
+                graph = kwds.pop("G")
+            except KeyError:
+                raise TypeError(f"{name}() missing positional argument: 'G'") from None
+        # Convert graph into backend graph-like object
+        #   Include the weight label, if provided to the algorithm
+        weight = None
+        if "weight" in bound.arguments:
+            weight = bound.arguments["weight"]
+        elif "data" in bound.arguments and "default" in bound.arguments:
+            # This case exists for several MultiGraph edge algorithms
+            if isinstance(bound.arguments["data"], str):
+                weight = bound.arguments["data"]
+            elif bound.arguments["data"]:
+                weight = "weight"
+        graph = backend.convert_from_nx(graph, weight=weight, name=name)
+        result = getattr(backend, name).__call__(graph, *args, **kwds)
+        return backend.convert_to_nx(result, name=name)
+
+    wrapper._orig_func = func
+    _register_algo(name, wrapper)
+    return wrapper
+
+
+# Check for auto-convert testing
+# This allows existing NetworkX tests to be run against a backend
+# implementation without any changes to the testing code. The only
+# required change is to set an environment variable prior to running
+# pytest.
+if os.environ.get("NETWORKX_GRAPH_CONVERT"):
+    plugin_name = os.environ["NETWORKX_GRAPH_CONVERT"]
+    if not plugins:
+        raise Exception("No registered networkx.plugins entry_points")
+    if plugin_name not in plugins:
+        raise Exception(
+            f"No registered networkx.plugins entry_point named {plugin_name}"
+        )
+
+    try:
+        import pytest
+    except ImportError:
+        raise ImportError(
+            f"Missing pytest, which is required when using NETWORKX_GRAPH_CONVERT"
+        )
+
+    # Override `dispatch` for testing
+    _dispatch = test_override_dispatch
+
+
+def _mark_tests(items):
+    """Allow backend to mark tests (skip or xfail) if they aren't able to correctly handle them"""
+    if os.environ.get("NETWORKX_GRAPH_CONVERT"):
+        plugin_name = os.environ["NETWORKX_GRAPH_CONVERT"]
+        backend = plugins[plugin_name].load()
+        if hasattr(backend, "on_start_tests"):
+            getattr(backend, "on_start_tests")(items)
diff --git a/networkx/classes/coreviews.py b/networkx/classes/coreviews.py
index 6c5b8a4..5c4defe 100644
--- a/networkx/classes/coreviews.py
+++ b/networkx/classes/coreviews.py
@@ -2,7 +2,6 @@
 These ``Views`` often restrict element access, with either the entire view or
 layers of nested mappings being read-only.
 """
-import warnings
 from collections.abc import Mapping
 
 __all__ = [
@@ -135,7 +134,7 @@ class UnionAtlas(Mapping):
         self._pred = pred
 
     def __len__(self):
-        return len(self._succ) + len(self._pred)
+        return len(self._succ.keys() | self._pred.keys())
 
     def __iter__(self):
         return iter(set(self._succ.keys()) | set(self._pred.keys()))
@@ -286,25 +285,6 @@ class FilterAtlas(Mapping):  # nodedict, nbrdict, keydict
             return self._atlas[key]
         raise KeyError(f"Key {key} not found")
 
-    # FIXME should this just be removed? we don't use it, but someone might
-    def copy(self):
-        warnings.warn(
-            (
-                "FilterAtlas.copy is deprecated.\n"
-                "It will be removed in NetworkX 3.0.\n"
-                "Please open an Issue on https://github.com/networkx/networkx/issues\n"
-                "if you use this feature. We think that no one does use it."
-            ),
-            DeprecationWarning,
-        )
-        try:  # check that NODE_OK has attr 'nodes'
-            node_ok_shorter = 2 * len(self.NODE_OK.nodes) < len(self._atlas)
-        except AttributeError:
-            node_ok_shorter = False
-        if node_ok_shorter:
-            return {u: self._atlas[u] for u in self.NODE_OK.nodes if u in self._atlas}
-        return {u: d for u, d in self._atlas.items() if self.NODE_OK(u)}
-
     def __str__(self):
         return str({nbr: self[nbr] for nbr in self})
 
@@ -339,38 +319,6 @@ class FilterAdjacency(Mapping):  # edgedict
             return FilterAtlas(self._atlas[node], new_node_ok)
         raise KeyError(f"Key {node} not found")
 
-    # FIXME should this just be removed? we don't use it, but someone might
-    def copy(self):
-        warnings.warn(
-            (
-                "FilterAdjacency.copy is deprecated.\n"
-                "It will be removed in NetworkX 3.0.\n"
-                "Please open an Issue on https://github.com/networkx/networkx/issues\n"
-                "if you use this feature. We think that no one does use it."
-            ),
-            DeprecationWarning,
-        )
-        try:  # check that NODE_OK has attr 'nodes'
-            node_ok_shorter = 2 * len(self.NODE_OK.nodes) < len(self._atlas)
-        except AttributeError:
-            node_ok_shorter = False
-        if node_ok_shorter:
-            return {
-                u: {
-                    v: d
-                    for v, d in self._atlas[u].items()
-                    if self.NODE_OK(v)
-                    if self.EDGE_OK(u, v)
-                }
-                for u in self.NODE_OK.nodes
-                if u in self._atlas
-            }
-        return {
-            u: {v: d for v, d in nbrs.items() if self.NODE_OK(v) if self.EDGE_OK(u, v)}
-            for u, nbrs in self._atlas.items()
-            if self.NODE_OK(u)
-        }
-
     def __str__(self):
         return str({nbr: self[nbr] for nbr in self})
 
@@ -407,33 +355,6 @@ class FilterMultiInner(FilterAdjacency):  # muliedge_seconddict
             return FilterAtlas(self._atlas[nbr], new_node_ok)
         raise KeyError(f"Key {nbr} not found")
 
-    # FIXME should this just be removed? we don't use it, but someone might
-    def copy(self):
-        warnings.warn(
-            (
-                "FilterMultiInner.copy is deprecated.\n"
-                "It will be removed in NetworkX 3.0.\n"
-                "Please open an Issue on https://github.com/networkx/networkx/issues\n"
-                "if you use this feature. We think that no one does use it."
-            ),
-            DeprecationWarning,
-        )
-        try:  # check that NODE_OK has attr 'nodes'
-            node_ok_shorter = 2 * len(self.NODE_OK.nodes) < len(self._atlas)
-        except AttributeError:
-            node_ok_shorter = False
-        if node_ok_shorter:
-            return {
-                v: {k: d for k, d in self._atlas[v].items() if self.EDGE_OK(v, k)}
-                for v in self.NODE_OK.nodes
-                if v in self._atlas
-            }
-        return {
-            v: {k: d for k, d in nbrs.items() if self.EDGE_OK(v, k)}
-            for v, nbrs in self._atlas.items()
-            if self.NODE_OK(v)
-        }
-
 
 class FilterMultiAdjacency(FilterAdjacency):  # multiedgedict
     def __getitem__(self, node):
@@ -444,39 +365,3 @@ class FilterMultiAdjacency(FilterAdjacency):  # multiedgedict
 
             return FilterMultiInner(self._atlas[node], self.NODE_OK, edge_ok)
         raise KeyError(f"Key {node} not found")
-
-    # FIXME should this just be removed? we don't use it, but someone might
-    def copy(self):
-        warnings.warn(
-            (
-                "FilterMultiAdjacency.copy is deprecated.\n"
-                "It will be removed in NetworkX 3.0.\n"
-                "Please open an Issue on https://github.com/networkx/networkx/issues\n"
-                "if you use this feature. We think that no one does use it."
-            ),
-            DeprecationWarning,
-        )
-        try:  # check that NODE_OK has attr 'nodes'
-            node_ok_shorter = 2 * len(self.NODE_OK.nodes) < len(self._atlas)
-        except AttributeError:
-            node_ok_shorter = False
-        if node_ok_shorter:
-            my_nodes = self.NODE_OK.nodes
-            return {
-                u: {
-                    v: {k: d for k, d in kd.items() if self.EDGE_OK(u, v, k)}
-                    for v, kd in self._atlas[u].items()
-                    if v in my_nodes
-                }
-                for u in my_nodes
-                if u in self._atlas
-            }
-        return {
-            u: {
-                v: {k: d for k, d in kd.items() if self.EDGE_OK(u, v, k)}
-                for v, kd in nbrs.items()
-                if self.NODE_OK(v)
-            }
-            for u, nbrs in self._atlas.items()
-            if self.NODE_OK(u)
-        }
diff --git a/networkx/classes/digraph.py b/networkx/classes/digraph.py
index 9528a15..1934dfb 100644
--- a/networkx/classes/digraph.py
+++ b/networkx/classes/digraph.py
@@ -3,7 +3,7 @@ from copy import deepcopy
 from functools import cached_property
 
 import networkx as nx
-import networkx.convert as convert
+from networkx import convert
 from networkx.classes.coreviews import AdjacencyView
 from networkx.classes.graph import Graph
 from networkx.classes.reportviews import (
@@ -99,7 +99,6 @@ class DiGraph(Graph):
     Graph
     MultiGraph
     MultiDiGraph
-    OrderedDiGraph
 
     Examples
     --------
@@ -308,15 +307,10 @@ class DiGraph(Graph):
     >>> G.add_edge(2, 2)
     >>> G[2][1] is G[2][2]
     True
-
-
-    Please see :mod:`~networkx.classes.ordered` for more examples of
-    creating graph subclasses by overwriting the base class `dict` with
-    a dictionary-like object.
     """
 
-    _adj = _CachedPropertyResetterAdjAndSucc()  # type: ignore
-    _succ = _adj  # type: ignore
+    _adj = _CachedPropertyResetterAdjAndSucc()  # type: ignore[assignment]
+    _succ = _adj  # type: ignore[has-type]
     _pred = _CachedPropertyResetterPred()
 
     def __init__(self, incoming_graph_data=None, **attr):
@@ -329,7 +323,7 @@ class DiGraph(Graph):
             graph is created.  The data can be an edge list, or any
             NetworkX graph object.  If the corresponding optional Python
             packages are installed the data can also be a 2D NumPy array, a
-            SciPy sparse matrix, or a PyGraphviz graph.
+            SciPy sparse array, or a PyGraphviz graph.
 
         attr : keyword arguments, optional (default= no attributes)
             Attributes to add to graph as key=value pairs.
@@ -491,6 +485,16 @@ class DiGraph(Graph):
         --------
         add_node
 
+        Notes
+        -----
+        When adding nodes from an iterator over the graph you are changing,
+        a `RuntimeError` can be raised with message:
+        `RuntimeError: dictionary changed size during iteration`. This
+        happens when the graph's underlying dictionary is modified during
+        iteration. To avoid this error, evaluate the iterator into a separate
+        object, e.g. by using `list(iterator_of_nodes)`, and pass this
+        object to `G.add_nodes_from`.
+
         Examples
         --------
         >>> G = nx.Graph()  # or DiGraph, MultiGraph, MultiDiGraph, etc
@@ -515,6 +519,13 @@ class DiGraph(Graph):
         >>> H.nodes[1]["size"]
         11
 
+        Evaluate an iterator over a graph if using it to modify the same graph
+
+        >>> G = nx.DiGraph([(0, 1), (1, 2), (3, 4)])
+        >>> # wrong way - will raise RuntimeError
+        >>> # G.add_nodes_from(n + 1 for n in G.nodes)
+        >>> # correct way
+        >>> G.add_nodes_from(list(n + 1 for n in G.nodes))
         """
         for n in nodes_for_adding:
             try:
@@ -588,6 +599,16 @@ class DiGraph(Graph):
         --------
         remove_node
 
+        Notes
+        -----
+        When removing nodes from an iterator over the graph you are changing,
+        a `RuntimeError` will be raised with message:
+        `RuntimeError: dictionary changed size during iteration`. This
+        happens when the graph's underlying dictionary is modified during
+        iteration. To avoid this error, evaluate the iterator into a separate
+        object, e.g. by using `list(iterator_of_nodes)`, and pass this
+        object to `G.remove_nodes_from`.
+
         Examples
         --------
         >>> G = nx.path_graph(3)  # or DiGraph, MultiGraph, MultiDiGraph, etc
@@ -598,6 +619,13 @@ class DiGraph(Graph):
         >>> list(G.nodes)
         []
 
+        Evaluate an iterator over a graph if using it to modify the same graph
+
+        >>> G = nx.DiGraph([(0, 1), (1, 2), (3, 4)])
+        >>> # this command will fail, as the graph's dict is modified during iteration
+        >>> # G.remove_nodes_from(n for n in G.nodes if n < 2)
+        >>> # this command will work, since the dictionary underlying graph is not modified
+        >>> G.remove_nodes_from(list(n for n in G.nodes if n < 2))
         """
         for n in nodes:
             try:
@@ -708,6 +736,14 @@ class DiGraph(Graph):
         Edge attributes specified in an ebunch take precedence over
         attributes specified via keyword arguments.
 
+        When adding edges from an iterator over the graph you are changing,
+        a `RuntimeError` can be raised with message:
+        `RuntimeError: dictionary changed size during iteration`. This
+        happens when the graph's underlying dictionary is modified during
+        iteration. To avoid this error, evaluate the iterator into a separate
+        object, e.g. by using `list(iterator_of_edges)`, and pass this
+        object to `G.add_edges_from`.
+
         Examples
         --------
         >>> G = nx.Graph()  # or DiGraph, MultiGraph, MultiDiGraph, etc
@@ -719,6 +755,15 @@ class DiGraph(Graph):
 
         >>> G.add_edges_from([(1, 2), (2, 3)], weight=3)
         >>> G.add_edges_from([(3, 4), (1, 4)], label="WN2898")
+
+        Evaluate an iterator over a graph if using it to modify the same graph
+
+        >>> G = nx.DiGraph([(1, 2), (2, 3), (3, 4)])
+        >>> # Grow graph by one new node, adding edges to all existing nodes.
+        >>> # wrong way - will raise RuntimeError
+        >>> # G.add_edges_from(((5, n) for n in G.nodes))
+        >>> # right way - note that there will be no self-edge for node 5
+        >>> G.add_edges_from(list((5, n) for n in G.nodes))
         """
         for e in ebunch_to_add:
             ne = len(e)
@@ -955,7 +1000,7 @@ class DiGraph(Graph):
 
     @cached_property
     def in_edges(self):
-        """An InEdgeView of the Graph as G.in_edges or G.in_edges().
+        """A view of the in edges of the graph as G.in_edges or G.in_edges().
 
         in_edges(self, nbunch=None, data=False, default=None):
 
@@ -973,11 +1018,20 @@ class DiGraph(Graph):
 
         Returns
         -------
-        in_edges : InEdgeView
+        in_edges : InEdgeView or InEdgeDataView
             A view of edge attributes, usually it iterates over (u, v)
             or (u, v, d) tuples of edges, but can also be used for
             attribute lookup as `edges[u, v]['foo']`.
 
+        Examples
+        --------
+        >>> G = nx.DiGraph()
+        >>> G.add_edge(1, 2, color='blue')
+        >>> G.in_edges()
+        InEdgeView([(1, 2)])
+        >>> G.in_edges(nbunch=2)
+        InEdgeDataView([(1, 2)])
+
         See Also
         --------
         edges
diff --git a/networkx/classes/function.py b/networkx/classes/function.py
index 3750fd1..a61635c 100644
--- a/networkx/classes/function.py
+++ b/networkx/classes/function.py
@@ -18,7 +18,6 @@ __all__ = [
     "number_of_edges",
     "density",
     "is_directed",
-    "info",
     "freeze",
     "is_frozen",
     "subgraph",
@@ -202,6 +201,7 @@ def freeze(G):
     G.remove_edge = frozen
     G.remove_edges_from = frozen
     G.clear = frozen
+    G.clear_edges = frozen
     G.frozen = True
     return G
 
@@ -552,51 +552,6 @@ def create_empty_copy(G, with_data=True):
     return H
 
 
-def info(G, n=None):
-    """Return a summary of information for the graph G or a single node n.
-
-    The summary includes the number of nodes and edges, or neighbours for a single
-    node.
-
-    Parameters
-    ----------
-    G : Networkx graph
-       A graph
-    n : node (any hashable)
-       A node in the graph G
-
-    Returns
-    -------
-    info : str
-        A string containing the short summary
-
-    Raises
-    ------
-    NetworkXError
-        If n is not in the graph G
-
-    .. deprecated:: 2.7
-       ``info`` is deprecated and will be removed in NetworkX 3.0.
-    """
-    import warnings
-
-    warnings.warn(
-        ("info is deprecated and will be removed in version 3.0.\n"),
-        DeprecationWarning,
-        stacklevel=2,
-    )
-    if n is None:
-        return str(G)
-    if n not in G:
-        raise nx.NetworkXError(f"node {n} not in graph")
-    info = ""  # append this all to a string
-    info += f"Node {n} has the following properties:\n"
-    info += f"Degree: {G.degree(n)}\n"
-    info += "Neighbors: "
-    info += " ".join(str(nbr) for nbr in G.neighbors(n))
-    return info
-
-
 def set_node_attributes(G, values, name=None):
     """Sets node attributes from a given value or dictionary of values.
 
@@ -1298,10 +1253,7 @@ def is_path(G, path):
         True if `path` is a valid path in `G`
 
     """
-    for node, nbr in nx.utils.pairwise(path):
-        if (node not in G) or (nbr not in G[node]):
-            return False
-    return True
+    return all((node in G and nbr in G[node]) for node, nbr in nx.utils.pairwise(path))
 
 
 def path_weight(G, path, weight):
diff --git a/networkx/classes/graph.py b/networkx/classes/graph.py
index 6edc506..ec2ad07 100644
--- a/networkx/classes/graph.py
+++ b/networkx/classes/graph.py
@@ -11,7 +11,7 @@ from copy import deepcopy
 from functools import cached_property
 
 import networkx as nx
-import networkx.convert as convert
+from networkx import convert
 from networkx.classes.coreviews import AdjacencyView
 from networkx.classes.reportviews import DegreeView, EdgeView, NodeView
 from networkx.exception import NetworkXError
@@ -95,7 +95,6 @@ class Graph:
     DiGraph
     MultiGraph
     MultiDiGraph
-    OrderedGraph
 
     Examples
     --------
@@ -302,10 +301,6 @@ class Graph:
     >>> G.add_edge(2, 2)
     >>> G[2][1] is G[2][2]
     True
-
-    Please see :mod:`~networkx.classes.ordered` for more examples of
-    creating graph subclasses by overwriting the base class `dict` with
-    a dictionary-like object.
     """
 
     _adj = _CachedPropertyResetterAdj()
@@ -344,7 +339,7 @@ class Graph:
             graph is created.  The data can be an edge list, or any
             NetworkX graph object.  If the corresponding optional Python
             packages are installed the data can also be a 2D NumPy array, a
-            SciPy sparse matrix, or a PyGraphviz graph.
+            SciPy sparse array, or a PyGraphviz graph.
 
         attr : keyword arguments, optional (default= no attributes)
             Attributes to add to graph as key=value pairs.
@@ -415,7 +410,8 @@ class Graph:
         Returns
         -------
         info : string
-            Graph information as provided by `nx.info`
+            Graph information including the graph name (if any), graph type, and the
+            number of nodes and edges.
 
         Examples
         --------
@@ -583,6 +579,16 @@ class Graph:
         --------
         add_node
 
+        Notes
+        -----
+        When adding nodes from an iterator over the graph you are changing,
+        a `RuntimeError` can be raised with message:
+        `RuntimeError: dictionary changed size during iteration`. This
+        happens when the graph's underlying dictionary is modified during
+        iteration. To avoid this error, evaluate the iterator into a separate
+        object, e.g. by using `list(iterator_of_nodes)`, and pass this
+        object to `G.add_nodes_from`.
+
         Examples
         --------
         >>> G = nx.Graph()  # or DiGraph, MultiGraph, MultiDiGraph, etc
@@ -607,6 +613,13 @@ class Graph:
         >>> H.nodes[1]["size"]
         11
 
+        Evaluate an iterator over a graph if using it to modify the same graph
+
+        >>> G = nx.Graph([(0, 1), (1, 2), (3, 4)])
+        >>> # wrong way - will raise RuntimeError
+        >>> # G.add_nodes_from(n + 1 for n in G.nodes)
+        >>> # correct way
+        >>> G.add_nodes_from(list(n + 1 for n in G.nodes))
         """
         for n in nodes_for_adding:
             try:
@@ -678,6 +691,16 @@ class Graph:
         --------
         remove_node
 
+        Notes
+        -----
+        When removing nodes from an iterator over the graph you are changing,
+        a `RuntimeError` will be raised with message:
+        `RuntimeError: dictionary changed size during iteration`. This
+        happens when the graph's underlying dictionary is modified during
+        iteration. To avoid this error, evaluate the iterator into a separate
+        object, e.g. by using `list(iterator_of_nodes)`, and pass this
+        object to `G.remove_nodes_from`.
+
         Examples
         --------
         >>> G = nx.path_graph(3)  # or DiGraph, MultiGraph, MultiDiGraph, etc
@@ -688,6 +711,13 @@ class Graph:
         >>> list(G.nodes)
         []
 
+        Evaluate an iterator over a graph if using it to modify the same graph
+
+        >>> G = nx.Graph([(0, 1), (1, 2), (3, 4)])
+        >>> # this command will fail, as the graph's dict is modified during iteration
+        >>> # G.remove_nodes_from(n for n in G.nodes if n < 2)
+        >>> # this command will work, since the dictionary underlying graph is not modified
+        >>> G.remove_nodes_from(list(n for n in G.nodes if n < 2))
         """
         adj = self._adj
         for n in nodes:
@@ -954,6 +984,14 @@ class Graph:
         Edge attributes specified in an ebunch take precedence over
         attributes specified via keyword arguments.
 
+        When adding edges from an iterator over the graph you are changing,
+        a `RuntimeError` can be raised with message:
+        `RuntimeError: dictionary changed size during iteration`. This
+        happens when the graph's underlying dictionary is modified during
+        iteration. To avoid this error, evaluate the iterator into a separate
+        object, e.g. by using `list(iterator_of_edges)`, and pass this
+        object to `G.add_edges_from`.
+
         Examples
         --------
         >>> G = nx.Graph()  # or DiGraph, MultiGraph, MultiDiGraph, etc
@@ -965,6 +1003,15 @@ class Graph:
 
         >>> G.add_edges_from([(1, 2), (2, 3)], weight=3)
         >>> G.add_edges_from([(3, 4), (1, 4)], label="WN2898")
+
+        Evaluate an iterator over a graph if using it to modify the same graph
+
+        >>> G = nx.Graph([(1, 2), (2, 3), (3, 4)])
+        >>> # Grow graph by one new node, adding edges to all existing nodes.
+        >>> # wrong way - will raise RuntimeError
+        >>> # G.add_edges_from(((5, n) for n in G.nodes))
+        >>> # correct way - note that there will be no self-edge for node 5
+        >>> G.add_edges_from(list((5, n) for n in G.nodes))
         """
         for e in ebunch_to_add:
             ne = len(e)
@@ -1016,10 +1063,28 @@ class Graph:
         the edge data. For MultiGraph/MultiDiGraph, duplicate edges
         are stored.
 
+        When adding edges from an iterator over the graph you are changing,
+        a `RuntimeError` can be raised with message:
+        `RuntimeError: dictionary changed size during iteration`. This
+        happens when the graph's underlying dictionary is modified during
+        iteration. To avoid this error, evaluate the iterator into a separate
+        object, e.g. by using `list(iterator_of_edges)`, and pass this
+        object to `G.add_weighted_edges_from`.
+
         Examples
         --------
         >>> G = nx.Graph()  # or DiGraph, MultiGraph, MultiDiGraph, etc
         >>> G.add_weighted_edges_from([(0, 1, 3.0), (1, 2, 7.5)])
+
+        Evaluate an iterator over edges before passing it
+
+        >>> G = nx.Graph([(1, 2), (2, 3), (3, 4)])
+        >>> weight = 0.1
+        >>> # Grow graph by one new node, adding edges to all existing nodes.
+        >>> # wrong way - will raise RuntimeError
+        >>> # G.add_weighted_edges_from(((5, n, weight) for n in G.nodes))
+        >>> # correct way - note that there will be no self-edge for node 5
+        >>> G.add_weighted_edges_from(list((5, n, weight) for n in G.nodes))
         """
         self.add_edges_from(((u, v, {weight: d}) for u, v, d in ebunch_to_add), **attr)
 
diff --git a/networkx/classes/graphviews.py b/networkx/classes/graphviews.py
index dcb7836..5e3fa81 100644
--- a/networkx/classes/graphviews.py
+++ b/networkx/classes/graphviews.py
@@ -8,7 +8,7 @@ a graph to reverse directed edges, or treat a directed graph
 as undirected, etc. This module provides those graph views.
 
 The resulting views are essentially read-only graphs that
-report data from the orignal graph object. We provide an
+report data from the original graph object. We provide an
 attribute G._graph which points to the underlying graph object.
 
 Note: Since graphviews look like graphs, one can end up with
diff --git a/networkx/classes/multidigraph.py b/networkx/classes/multidigraph.py
index e118dc2..7591830 100644
--- a/networkx/classes/multidigraph.py
+++ b/networkx/classes/multidigraph.py
@@ -3,7 +3,7 @@ from copy import deepcopy
 from functools import cached_property
 
 import networkx as nx
-import networkx.convert as convert
+from networkx import convert
 from networkx.classes.coreviews import MultiAdjacencyView
 from networkx.classes.digraph import DiGraph
 from networkx.classes.multigraph import MultiGraph
@@ -63,7 +63,6 @@ class MultiDiGraph(MultiGraph, DiGraph):
     Graph
     DiGraph
     MultiGraph
-    OrderedMultiDiGraph
 
     Examples
     --------
@@ -271,9 +270,26 @@ class MultiDiGraph(MultiGraph, DiGraph):
         Class to create a new graph structure in the `to_undirected` method.
         If `None`, a NetworkX class (Graph or MultiGraph) is used.
 
-    Please see :mod:`~networkx.classes.ordered` for examples of
-    creating graph subclasses by overwriting the base class `dict` with
-    a dictionary-like object.
+    **Subclassing Example**
+
+    Create a low memory graph class that effectively disallows edge
+    attributes by using a single attribute dict for all edges.
+    This reduces the memory used, but you lose edge attributes.
+
+    >>> class ThinGraph(nx.Graph):
+    ...     all_edge_dict = {"weight": 1}
+    ...
+    ...     def single_edge_dict(self):
+    ...         return self.all_edge_dict
+    ...
+    ...     edge_attr_dict_factory = single_edge_dict
+    >>> G = ThinGraph()
+    >>> G.add_edge(2, 1)
+    >>> G[2][1]
+    {'weight': 1}
+    >>> G.add_edge(2, 2)
+    >>> G[2][1] is G[2][2]
+    True
     """
 
     # node_dict_factory = dict    # already assigned in Graph
@@ -292,7 +308,7 @@ class MultiDiGraph(MultiGraph, DiGraph):
             an empty graph is created.  The data can be an edge list, or any
             NetworkX graph object.  If the corresponding optional Python
             packages are installed the data can also be a 2D NumPy array, a
-            SciPy sparse matrix, or a PyGraphviz graph.
+            SciPy sparse array, or a PyGraphviz graph.
 
         multigraph_input : bool or None (default None)
             Note: Only used when `incoming_graph_data` is a dict.
@@ -658,7 +674,7 @@ class MultiDiGraph(MultiGraph, DiGraph):
 
     @cached_property
     def in_edges(self):
-        """An InMultiEdgeView of the Graph as G.in_edges or G.in_edges().
+        """A view of the in edges of the graph as G.in_edges or G.in_edges().
 
         in_edges(self, nbunch=None, data=False, keys=False, default=None)
 
@@ -679,7 +695,7 @@ class MultiDiGraph(MultiGraph, DiGraph):
 
         Returns
         -------
-        in_edges : InMultiEdgeView
+        in_edges : InMultiEdgeView or InMultiEdgeDataView
             A view of edge attributes, usually it iterates over (u, v)
             or (u, v, k) or (u, v, k, d) tuples of edges, but can also be
             used for attribute lookup as `edges[u, v, k]['foo']`.
diff --git a/networkx/classes/multigraph.py b/networkx/classes/multigraph.py
index 3332201..3b6b0c0 100644
--- a/networkx/classes/multigraph.py
+++ b/networkx/classes/multigraph.py
@@ -3,8 +3,7 @@ from copy import deepcopy
 from functools import cached_property
 
 import networkx as nx
-import networkx.convert as convert
-from networkx import NetworkXError
+from networkx import NetworkXError, convert
 from networkx.classes.coreviews import MultiAdjacencyView
 from networkx.classes.graph import Graph
 from networkx.classes.reportviews import MultiDegreeView, MultiEdgeView
@@ -36,7 +35,7 @@ class MultiGraph(Graph):
         graph is created.  The data can be any format that is supported
         by the to_networkx_graph() function, currently including edge list,
         dict of dicts, dict of lists, NetworkX graph, 2D NumPy array,
-        SciPy sparse matrix, or PyGraphviz graph.
+        SciPy sparse array, or PyGraphviz graph.
 
     multigraph_input : bool or None (default None)
         Note: Only used when `incoming_graph_data` is a dict.
@@ -59,7 +58,6 @@ class MultiGraph(Graph):
     Graph
     DiGraph
     MultiDiGraph
-    OrderedMultiGraph
 
     Examples
     --------
@@ -264,9 +262,26 @@ class MultiGraph(Graph):
         Class to create a new graph structure in the `to_undirected` method.
         If `None`, a NetworkX class (Graph or MultiGraph) is used.
 
-    Please see :mod:`~networkx.classes.ordered` for examples of
-    creating graph subclasses by overwriting the base class `dict` with
-    a dictionary-like object.
+    **Subclassing Example**
+
+    Create a low memory graph class that effectively disallows edge
+    attributes by using a single attribute dict for all edges.
+    This reduces the memory used, but you lose edge attributes.
+
+    >>> class ThinGraph(nx.Graph):
+    ...     all_edge_dict = {"weight": 1}
+    ...
+    ...     def single_edge_dict(self):
+    ...         return self.all_edge_dict
+    ...
+    ...     edge_attr_dict_factory = single_edge_dict
+    >>> G = ThinGraph()
+    >>> G.add_edge(2, 1)
+    >>> G[2][1]
+    {'weight': 1}
+    >>> G.add_edge(2, 2)
+    >>> G[2][1] is G[2][2]
+    True
     """
 
     # node_dict_factory = dict    # already assigned in Graph
@@ -301,7 +316,7 @@ class MultiGraph(Graph):
             an empty graph is created.  The data can be an edge list, or any
             NetworkX graph object.  If the corresponding optional Python
             packages are installed the data can also be a 2D NumPy array, a
-            SciPy sparse matrix, or a PyGraphviz graph.
+            SciPy sparse array, or a PyGraphviz graph.
 
         multigraph_input : bool or None (default None)
             Note: Only used when `incoming_graph_data` is a dict.
@@ -546,6 +561,14 @@ class MultiGraph(Graph):
         This method can be overridden by subclassing the base class and
         providing a custom ``new_edge_key()`` method.
 
+        When adding edges from an iterator over the graph you are changing,
+        a `RuntimeError` can be raised with message:
+        `RuntimeError: dictionary changed size during iteration`. This
+        happens when the graph's underlying dictionary is modified during
+        iteration. To avoid this error, evaluate the iterator into a separate
+        object, e.g. by using `list(iterator_of_edges)`, and pass this
+        object to `G.add_edges_from`.
+
         Examples
         --------
         >>> G = nx.Graph()  # or DiGraph, MultiGraph, MultiDiGraph, etc
@@ -557,6 +580,15 @@ class MultiGraph(Graph):
 
         >>> G.add_edges_from([(1, 2), (2, 3)], weight=3)
         >>> G.add_edges_from([(3, 4), (1, 4)], label="WN2898")
+
+        Evaluate an iterator over a graph if using it to modify the same graph
+
+        >>> G = nx.MultiGraph([(1, 2), (2, 3), (3, 4)])
+        >>> # Grow graph by one new node, adding edges to all existing nodes.
+        >>> # wrong way - will raise RuntimeError
+        >>> # G.add_edges_from(((5, n) for n in G.nodes))
+        >>> # right way - note that there will be no self-edge for node 5
+        >>> assigned_keys = G.add_edges_from(list((5, n) for n in G.nodes))
         """
         keylist = []
         for e in ebunch_to_add:
diff --git a/networkx/classes/ordered.py b/networkx/classes/ordered.py
deleted file mode 100644
index ca82d12..0000000
--- a/networkx/classes/ordered.py
+++ /dev/null
@@ -1,162 +0,0 @@
-"""
-
-.. deprecated:: 2.6
-
-   The ordered variants of graph classes in this module are deprecated and
-   will be removed in version 3.0.
-
-Consistently ordered variants of the default base classes.
-Note that if you are using Python 3.6+, you shouldn't need these classes
-because the dicts in Python 3.6+ are ordered.
-Note also that there are many differing expectations for the word "ordered"
-and that these classes may not provide the order you expect.
-The intent here is to give a consistent order not a particular order.
-
-The Ordered (Di/Multi/MultiDi) Graphs give a consistent order for reporting of
-nodes and edges.  The order of node reporting agrees with node adding, but for
-edges, the order is not necessarily the order that the edges were added.
-
-In general, you should use the default (i.e., unordered) graph classes.
-However, there are times (e.g., when testing) when you may need the
-order preserved.
-
-Special care is required when using subgraphs of the Ordered classes.
-The order of nodes in the subclass is not necessarily the same order
-as the original class.  In general it is probably better to avoid using
-subgraphs and replace with code similar to:
-
-.. code-block:: python
-
-    # instead of SG = G.subgraph(ordered_nodes)
-    SG = nx.OrderedGraph()
-    SG.add_nodes_from(ordered_nodes)
-    SG.add_edges_from((u, v) for (u, v) in G.edges() if u in SG if v in SG)
-
-"""
-import warnings
-from collections import OrderedDict
-
-from .digraph import DiGraph
-from .graph import Graph
-from .multidigraph import MultiDiGraph
-from .multigraph import MultiGraph
-
-__all__ = []
-
-__all__.extend(
-    ["OrderedGraph", "OrderedDiGraph", "OrderedMultiGraph", "OrderedMultiDiGraph"]
-)
-
-
-class OrderedGraph(Graph):
-    """Consistently ordered variant of :class:`~networkx.Graph`.
-
-    .. deprecated:: 2.6
-
-       OrderedGraph is deprecated and will be removed in version 3.0.
-       Use `Graph` instead, which guarantees order is preserved for
-       Python >= 3.7
-    """
-
-    node_dict_factory = OrderedDict
-    adjlist_outer_dict_factory = OrderedDict
-    adjlist_inner_dict_factory = OrderedDict
-    edge_attr_dict_factory = OrderedDict
-
-    def __init__(self, incoming_graph_data=None, **attr):
-        warnings.warn(
-            (
-                "OrderedGraph is deprecated and will be removed in version 3.0.\n"
-                "Use `Graph` instead, which guarantees order is preserved for\n"
-                "Python >= 3.7\n"
-            ),
-            DeprecationWarning,
-            stacklevel=2,
-        )
-        super().__init__(incoming_graph_data, **attr)
-
-
-class OrderedDiGraph(DiGraph):
-    """Consistently ordered variant of :class:`~networkx.DiGraph`.
-
-    .. deprecated:: 2.6
-
-       OrderedDiGraph is deprecated and will be removed in version 3.0.
-       Use `DiGraph` instead, which guarantees order is preserved for
-       Python >= 3.7
-    """
-
-    node_dict_factory = OrderedDict
-    adjlist_outer_dict_factory = OrderedDict
-    adjlist_inner_dict_factory = OrderedDict
-    edge_attr_dict_factory = OrderedDict
-
-    def __init__(self, incoming_graph_data=None, **attr):
-        warnings.warn(
-            (
-                "OrderedDiGraph is deprecated and will be removed in version 3.0.\n"
-                "Use `DiGraph` instead, which guarantees order is preserved for\n"
-                "Python >= 3.7\n"
-            ),
-            DeprecationWarning,
-            stacklevel=2,
-        )
-        super().__init__(incoming_graph_data, **attr)
-
-
-class OrderedMultiGraph(MultiGraph):
-    """Consistently ordered variant of :class:`~networkx.MultiGraph`.
-
-    .. deprecated:: 2.6
-
-       OrderedMultiGraph is deprecated and will be removed in version 3.0.
-       Use `MultiGraph` instead, which guarantees order is preserved for
-       Python >= 3.7
-    """
-
-    node_dict_factory = OrderedDict
-    adjlist_outer_dict_factory = OrderedDict
-    adjlist_inner_dict_factory = OrderedDict
-    edge_key_dict_factory = OrderedDict
-    edge_attr_dict_factory = OrderedDict
-
-    def __init__(self, incoming_graph_data=None, **attr):
-        warnings.warn(
-            (
-                "OrderedMultiGraph is deprecated and will be removed in version 3.0.\n"
-                "Use `MultiGraph` instead, which guarantees order is preserved for\n"
-                "Python >= 3.7\n"
-            ),
-            DeprecationWarning,
-            stacklevel=2,
-        )
-        super().__init__(incoming_graph_data, **attr)
-
-
-class OrderedMultiDiGraph(MultiDiGraph):
-    """Consistently ordered variant of :class:`~networkx.MultiDiGraph`.
-
-    .. deprecated:: 2.6
-
-       OrderedMultiDiGraph is deprecated and will be removed in version 3.0.
-       Use `MultiDiGraph` instead, which guarantees order is preserved for
-       Python >= 3.7
-    """
-
-    node_dict_factory = OrderedDict
-    adjlist_outer_dict_factory = OrderedDict
-    adjlist_inner_dict_factory = OrderedDict
-    edge_key_dict_factory = OrderedDict
-    edge_attr_dict_factory = OrderedDict
-
-    def __init__(self, incoming_graph_data=None, **attr):
-        warnings.warn(
-            (
-                "OrderedMultiDiGraph is deprecated and will be removed in version 3.0.\n"
-                "Use `MultiDiGraph` instead, which guarantees order is preserved for\n"
-                "Python >= 3.7\n"
-            ),
-            DeprecationWarning,
-            stacklevel=2,
-        )
-        super().__init__(incoming_graph_data, **attr)
diff --git a/networkx/classes/reportviews.py b/networkx/classes/reportviews.py
index de5ff04..4503566 100644
--- a/networkx/classes/reportviews.py
+++ b/networkx/classes/reportviews.py
@@ -966,10 +966,7 @@ class OutMultiEdgeDataView(OutEdgeDataView):
             except KeyError:
                 return False
             return e == self._report(u, v, k, dd)
-        for k, dd in kdict.items():
-            if e == self._report(u, v, k, dd):
-                return True
-        return False
+        return any(e == self._report(u, v, k, dd) for k, dd in kdict.items())
 
 
 class MultiEdgeDataView(OutMultiEdgeDataView):
@@ -1005,10 +1002,7 @@ class MultiEdgeDataView(OutMultiEdgeDataView):
             except KeyError:
                 return False
             return e == self._report(u, v, k, dd)
-        for k, dd in kdict.items():
-            if e == self._report(u, v, k, dd):
-                return True
-        return False
+        return any(e == self._report(u, v, k, dd) for k, dd in kdict.items())
 
 
 class InMultiEdgeDataView(OutMultiEdgeDataView):
@@ -1036,10 +1030,7 @@ class InMultiEdgeDataView(OutMultiEdgeDataView):
             k = e[2]
             dd = kdict[k]
             return e == self._report(u, v, k, dd)
-        for k, dd in kdict.items():
-            if e == self._report(u, v, k, dd):
-                return True
-        return False
+        return any(e == self._report(u, v, k, dd) for k, dd in kdict.items())
 
 
 # EdgeViews    have set operations and no data reported
diff --git a/networkx/classes/tests/dispatch_interface.py b/networkx/classes/tests/dispatch_interface.py
new file mode 100644
index 0000000..ded79b3
--- /dev/null
+++ b/networkx/classes/tests/dispatch_interface.py
@@ -0,0 +1,83 @@
+# This file contains utilities for testing the dispatching feature
+
+# A full test of all dispatchable algorithms is performed by
+# modifying the pytest invocation and setting an environment variable
+# NETWORKX_GRAPH_CONVERT=nx-loopback pytest
+# This is comprehensive, but only tests the `test_override_dispatch`
+# function in networkx.classes.backends.
+
+# To test the `_dispatch` function directly, several tests scattered throughout
+# NetworkX have been augmented to test normal and dispatch mode.
+# Searching for `dispatch_interface` should locate the specific tests.
+
+import networkx as nx
+from networkx import DiGraph, Graph, MultiDiGraph, MultiGraph, PlanarEmbedding
+
+
+class LoopbackGraph(Graph):
+    __networkx_plugin__ = "nx-loopback"
+
+
+class LoopbackDiGraph(DiGraph):
+    __networkx_plugin__ = "nx-loopback"
+
+
+class LoopbackMultiGraph(MultiGraph):
+    __networkx_plugin__ = "nx-loopback"
+
+
+class LoopbackMultiDiGraph(MultiDiGraph):
+    __networkx_plugin__ = "nx-loopback"
+
+
+class LoopbackPlanarEmbedding(PlanarEmbedding):
+    __networkx_plugin__ = "nx-loopback"
+
+
+def convert(graph):
+    if isinstance(graph, PlanarEmbedding):
+        return LoopbackPlanarEmbedding(graph)
+    if isinstance(graph, MultiDiGraph):
+        return LoopbackMultiDiGraph(graph)
+    if isinstance(graph, MultiGraph):
+        return LoopbackMultiGraph(graph)
+    if isinstance(graph, DiGraph):
+        return LoopbackDiGraph(graph)
+    if isinstance(graph, Graph):
+        return LoopbackGraph(graph)
+    raise TypeError(f"Unsupported type of graph: {type(graph)}")
+
+
+class LoopbackDispatcher:
+    non_toplevel = {
+        "inter_community_edges": nx.community.quality.inter_community_edges,
+        "is_tournament": nx.algorithms.tournament.is_tournament,
+        "mutual_weight": nx.algorithms.structuralholes.mutual_weight,
+        "score_sequence": nx.algorithms.tournament.score_sequence,
+        "tournament_matrix": nx.algorithms.tournament.tournament_matrix,
+    }
+
+    def __getattr__(self, item):
+        # Return the original, undecorated NetworkX algorithm
+        if hasattr(nx, item):
+            return getattr(nx, item)._orig_func
+        if item in self.non_toplevel:
+            return self.non_toplevel[item]._orig_func
+        raise AttributeError(item)
+
+    @staticmethod
+    def convert_from_nx(graph, weight=None, *, name=None):
+        return graph
+
+    @staticmethod
+    def convert_to_nx(obj, *, name=None):
+        return obj
+
+    @staticmethod
+    def on_start_tests(items):
+        # Verify that items can be xfailed
+        for item in items:
+            assert hasattr(item, "add_marker")
+
+
+dispatcher = LoopbackDispatcher()
diff --git a/networkx/classes/tests/historical_tests.py b/networkx/classes/tests/historical_tests.py
index 44b72fd..68089ba 100644
--- a/networkx/classes/tests/historical_tests.py
+++ b/networkx/classes/tests/historical_tests.py
@@ -70,8 +70,8 @@ class HistoricalTests:
         G = self.G()
         G.add_node("A")
         assert "A" in G
-        assert not [] in G  # never raise a Key or TypeError in this test
-        assert not {1: 1} in G
+        assert [] not in G  # never raise a Key or TypeError in this test
+        assert {1: 1} not in G
 
     def test_add_remove(self):
         # Test add_node and remove_node acting for various nbunch
diff --git a/networkx/classes/tests/test_backends.py b/networkx/classes/tests/test_backends.py
new file mode 100644
index 0000000..8d0c86a
--- /dev/null
+++ b/networkx/classes/tests/test_backends.py
@@ -0,0 +1,14 @@
+import pytest
+
+import networkx as nx
+
+pytest.importorskip("scipy")
+pytest.importorskip("numpy")
+
+
+def test_dispatch_kwds_vs_args():
+    G = nx.path_graph(4)
+    nx.pagerank(G)
+    nx.pagerank(G=G)
+    with pytest.raises(TypeError):
+        nx.pagerank()
diff --git a/networkx/classes/tests/test_coreviews.py b/networkx/classes/tests/test_coreviews.py
index fdea00f..07fa5bf 100644
--- a/networkx/classes/tests/test_coreviews.py
+++ b/networkx/classes/tests/test_coreviews.py
@@ -155,7 +155,7 @@ class TestUnionAtlas:
         assert view.__slots__ == pview.__slots__
 
     def test_len(self):
-        assert len(self.av) == len(self.s) + len(self.p)
+        assert len(self.av) == len(self.s.keys() | self.p.keys()) == 5
 
     def test_iter(self):
         assert set(self.av) == set(self.s) | set(self.p)
@@ -257,7 +257,7 @@ class TestUnionMultiInner(TestUnionAdjacency):
         self.adjview = nx.classes.coreviews.UnionMultiInner(self.s, self.p)
 
     def test_len(self):
-        assert len(self.adjview) == len(self.s) + len(self.p)
+        assert len(self.adjview) == len(self.s.keys() | self.p.keys()) == 4
 
     def test_getitem(self):
         assert self.adjview[1] is not self.s[1]
@@ -360,72 +360,3 @@ class TestFilteredGraphs:
             assert RG.adj[2].copy() == RG.adj[2]
             assert RsG.adj.copy() == RsG.adj
             assert RsG.adj[2].copy() == RsG.adj[2]
-
-    def test_filtered_copy(self):
-        # TODO: This function can be removed when filtered.copy()
-        # deprecation expires
-        SubGraph = nx.graphviews.subgraph_view
-        for Graph in self.Graphs:
-            G = nx.path_graph(4, Graph)
-            SG = G.subgraph([2, 3])
-            RG = SubGraph(G, nx.filters.hide_nodes([0, 1]))
-            RsG = SubGraph(G, nx.filters.show_nodes([2, 3]))
-            # test FilterAtlas & co in these subgraphs
-            assert SG._node.copy() == SG._node
-            assert SG.adj._atlas.copy() == SG.adj._atlas
-            assert SG.adj[2]._atlas.copy() == SG.adj[2]._atlas
-            assert SG.adj[2]._atlas[3].copy() == SG.adj[2]._atlas[3]
-            assert RG.adj._atlas.copy() == RG.adj._atlas
-            assert RG.adj[2]._atlas.copy() == RG.adj[2]._atlas
-            assert RG.adj[2]._atlas[3].copy() == RG.adj[2]._atlas[3]
-            assert RG._node.copy() == RG._node
-            assert RsG.adj._atlas.copy() == RsG.adj._atlas
-            assert RsG.adj[2]._atlas.copy() == RsG.adj[2]._atlas
-            assert RsG.adj[2]._atlas[3].copy() == RsG.adj[2]._atlas[3]
-            assert RsG._node.copy() == RsG._node
-            # test MultiFilterInner
-            if G.is_multigraph():
-                assert SG.adj[2]._atlas[3][0].copy() == SG.adj[2]._atlas[3][0]
-                assert RG.adj[2]._atlas[3][0].copy() == RG.adj[2]._atlas[3][0]
-                assert RsG.adj[2]._atlas[3][0].copy() == RsG.adj[2]._atlas[3][0]
-
-            # test deprecation
-            # FilterAtlas.copy()
-            pytest.deprecated_call(SG._node.copy)
-            # FilterAdjacency.copy()
-            pytest.deprecated_call(SG.adj._atlas.copy)
-            # FilterMultiAdjacency.copy()
-            if G.is_multigraph():
-                pytest.deprecated_call(SG.adj._atlas.copy)
-            # FilterMultiInner.copy()
-            if G.is_multigraph():
-                pytest.deprecated_call(SG.adj[2]._atlas.copy)
-
-            SSG = SG.subgraph([2])
-            assert list(SSG) == [2]
-
-            # check case when node_ok is small
-            G = nx.complete_graph(9, Graph)
-            SG = G.subgraph([2, 3])
-            RG = SubGraph(G, nx.filters.hide_nodes([0, 1]))
-            RsG = SubGraph(G, nx.filters.show_nodes([2, 3, 4, 5, 6, 7, 8]))
-            assert SG.adj._atlas.copy() == SG.adj._atlas
-            assert SG.adj[2]._atlas.copy() == SG.adj[2]._atlas
-            assert SG.adj[2]._atlas[3].copy() == SG.adj[2]._atlas[3]
-            assert SG._node.copy() == SG._node
-            assert RG.adj._atlas.copy() == RG.adj._atlas
-            assert RG.adj[2]._atlas.copy() == RG.adj[2]._atlas
-            assert RG.adj[2]._atlas[3].copy() == RG.adj[2]._atlas[3]
-            assert RG._node.copy() == RG._node
-            assert RsG.adj._atlas.copy() == RsG.adj._atlas
-            assert RsG.adj[2]._atlas.copy() == RsG.adj[2]._atlas
-            assert RsG.adj[2]._atlas[3].copy() == RsG.adj[2]._atlas[3]
-            assert RsG._node.copy() == RsG._node
-            # test MultiFilterInner
-            if G.is_multigraph():
-                assert SG.adj[2][3]._atlas.copy() == SG.adj[2][3]._atlas
-                assert RG.adj[2][3]._atlas.copy() == RG.adj[2][3]._atlas
-                assert RsG.adj[2][3]._atlas.copy() == RsG.adj[2][3]._atlas
-
-            SSG = SG.subgraph([2])
-            assert list(SSG) == [2]
diff --git a/networkx/classes/tests/test_function.py b/networkx/classes/tests/test_function.py
index c738ab5..1f6ed42 100644
--- a/networkx/classes/tests/test_function.py
+++ b/networkx/classes/tests/test_function.py
@@ -252,6 +252,7 @@ class TestFunction:
         pytest.raises(nx.NetworkXError, G.add_edges_from, [(1, 2)])
         pytest.raises(nx.NetworkXError, G.remove_edge, 1, 2)
         pytest.raises(nx.NetworkXError, G.remove_edges_from, [(1, 2)])
+        pytest.raises(nx.NetworkXError, G.clear_edges)
         pytest.raises(nx.NetworkXError, G.clear)
 
     def test_is_frozen(self):
@@ -260,37 +261,17 @@ class TestFunction:
         assert G.frozen == nx.is_frozen(self.G)
         assert G.frozen
 
-    def test_info(self):
-        G = nx.path_graph(5)
-        G.name = "path_graph(5)"
-        info = nx.info(G)
-        expected_graph_info = "Graph named 'path_graph(5)' with 5 nodes and 4 edges"
-        assert info == expected_graph_info
-
-        info = nx.info(G, n=1)
-        assert type(info) == str
-        expected_node_info = "\n".join(
-            ["Node 1 has the following properties:", "Degree: 2", "Neighbors: 0 2"]
-        )
-        assert info == expected_node_info
-
-        # must raise an error for a non-existent node
-        pytest.raises(nx.NetworkXError, nx.info, G, 1248)
-
-    def test_info_digraph(self):
-        G = nx.DiGraph(name="path_graph(5)")
-        nx.add_path(G, [0, 1, 2, 3, 4])
-        info = nx.info(G)
-        expected_graph_info = "DiGraph named 'path_graph(5)' with 5 nodes and 4 edges"
-        assert info == expected_graph_info
-
-        info = nx.info(G, n=1)
-        expected_node_info = "\n".join(
-            ["Node 1 has the following properties:", "Degree: 2", "Neighbors: 2"]
-        )
-        assert info == expected_node_info
+    def test_node_attributes_are_still_mutable_on_frozen_graph(self):
+        G = nx.freeze(nx.path_graph(3))
+        node = G.nodes[0]
+        node["node_attribute"] = True
+        assert node["node_attribute"] == True
 
-        pytest.raises(nx.NetworkXError, nx.info, G, n=-1)
+    def test_edge_attributes_are_still_mutable_on_frozen_graph(self):
+        G = nx.freeze(nx.path_graph(3))
+        edge = G.edges[(0, 1)]
+        edge["edge_attribute"] = True
+        assert edge["edge_attribute"] == True
 
     def test_neighbors_complete_graph(self):
         graph = nx.complete_graph(100)
@@ -349,13 +330,13 @@ class TestFunction:
         graph = nx.path_graph(4)
         expected = [(0, 2), (0, 3), (1, 3)]
         nedges = list(nx.non_edges(graph))
-        for (u, v) in expected:
+        for u, v in expected:
             assert (u, v) in nedges or (v, u) in nedges
 
         graph = nx.star_graph(4)
         expected = [(1, 2), (1, 3), (1, 4), (2, 3), (2, 4), (3, 4)]
         nedges = list(nx.non_edges(graph))
-        for (u, v) in expected:
+        for u, v in expected:
             assert (u, v) in nedges or (v, u) in nedges
 
         # Directed graphs
@@ -757,9 +738,9 @@ def test_pathweight():
     invalid_path = [1, 3, 2]
     graphs = [nx.Graph(), nx.DiGraph(), nx.MultiGraph(), nx.MultiDiGraph()]
     edges = [
-        (1, 2, dict(cost=5, dist=6)),
-        (2, 3, dict(cost=3, dist=4)),
-        (1, 2, dict(cost=1, dist=2)),
+        (1, 2, {"cost": 5, "dist": 6}),
+        (2, 3, {"cost": 3, "dist": 4}),
+        (1, 2, {"cost": 1, "dist": 2}),
     ]
     for graph in graphs:
         graph.add_edges_from(edges)
diff --git a/networkx/classes/tests/test_graph.py b/networkx/classes/tests/test_graph.py
index 46d5173..5c6d4e7 100644
--- a/networkx/classes/tests/test_graph.py
+++ b/networkx/classes/tests/test_graph.py
@@ -1,6 +1,7 @@
 import gc
 import pickle
 import platform
+import weakref
 
 import pytest
 
@@ -71,7 +72,19 @@ class BaseGraphTester:
         G = self.Graph()
 
         def count_objects_of_type(_type):
-            return sum(1 for obj in gc.get_objects() if isinstance(obj, _type))
+            # Iterating over all objects tracked by gc can include weak references
+            # whose weakly-referenced objects may no longer exist. Calling `isinstance`
+            # on such a weak reference will raise ReferenceError. There are at least
+            # three workarounds for this: one is to compare type names instead of using
+            # `isinstance` such as `type(obj).__name__ == typename`, another is to use
+            # `type(obj) == _type`, and the last is to ignore ProxyTypes as we do below.
+            # NOTE: even if this safeguard is deemed unnecessary to pass NetworkX tests,
+            # we should still keep it for maximum safety for other NetworkX backends.
+            return sum(
+                1
+                for obj in gc.get_objects()
+                if not isinstance(obj, weakref.ProxyTypes) and isinstance(obj, _type)
+            )
 
         gc.collect()
         before = count_objects_of_type(self.Graph)
diff --git a/networkx/classes/tests/test_graphviews.py b/networkx/classes/tests/test_graphviews.py
index d17a424..cb60bb9 100644
--- a/networkx/classes/tests/test_graphviews.py
+++ b/networkx/classes/tests/test_graphviews.py
@@ -255,7 +255,7 @@ class TestChainsOfViews:
 
     def test_subgraph_copy(self):
         for origG in self.graphs:
-            G = nx.OrderedGraph(origG)
+            G = nx.Graph(origG)
             SG = G.subgraph([4, 5, 6])
             H = SG.copy()
             assert type(G) == type(H)
@@ -330,10 +330,10 @@ class TestChainsOfViews:
         assert not hasattr(DCSG, "_graph")  # not a view
 
     def test_copy_of_view(self):
-        G = nx.OrderedMultiGraph(self.MGv)
-        assert G.__class__.__name__ == "OrderedMultiGraph"
+        G = nx.MultiGraph(self.MGv)
+        assert G.__class__.__name__ == "MultiGraph"
         G = G.copy(as_view=True)
-        assert G.__class__.__name__ == "OrderedMultiGraph"
+        assert G.__class__.__name__ == "MultiGraph"
 
     def test_subclass(self):
         class MyGraph(nx.DiGraph):
@@ -349,4 +349,4 @@ class TestChainsOfViews:
             H = SG.copy()
             assert SG.my_method() == "me"
             assert H.my_method() == "me"
-            assert not 3 in H or 3 in SG
+            assert 3 not in H or 3 in SG
diff --git a/networkx/classes/tests/test_multidigraph.py b/networkx/classes/tests/test_multidigraph.py
index c951b48..fc0bd54 100644
--- a/networkx/classes/tests/test_multidigraph.py
+++ b/networkx/classes/tests/test_multidigraph.py
@@ -421,13 +421,13 @@ class CustomDictClass(UserDict):
 
 
 class MultiDiGraphSubClass(nx.MultiDiGraph):
-    node_dict_factory = CustomDictClass  # type: ignore
-    node_attr_dict_factory = CustomDictClass  # type: ignore
-    adjlist_outer_dict_factory = CustomDictClass  # type: ignore
-    adjlist_inner_dict_factory = CustomDictClass  # type: ignore
-    edge_key_dict_factory = CustomDictClass  # type: ignore
-    edge_attr_dict_factory = CustomDictClass  # type: ignore
-    graph_attr_dict_factory = CustomDictClass  # type: ignore
+    node_dict_factory = CustomDictClass  # type: ignore[assignment]
+    node_attr_dict_factory = CustomDictClass  # type: ignore[assignment]
+    adjlist_outer_dict_factory = CustomDictClass  # type: ignore[assignment]
+    adjlist_inner_dict_factory = CustomDictClass  # type: ignore[assignment]
+    edge_key_dict_factory = CustomDictClass  # type: ignore[assignment]
+    edge_attr_dict_factory = CustomDictClass  # type: ignore[assignment]
+    graph_attr_dict_factory = CustomDictClass  # type: ignore[assignment]
 
 
 class TestMultiDiGraphSubclass(TestMultiDiGraph):
diff --git a/networkx/classes/tests/test_multigraph.py b/networkx/classes/tests/test_multigraph.py
index 07f7f5d..cd912d1 100644
--- a/networkx/classes/tests/test_multigraph.py
+++ b/networkx/classes/tests/test_multigraph.py
@@ -202,8 +202,8 @@ class TestMultiGraph(BaseMultiGraphTester, _TestGraph):
 
     def test_data_multigraph_input(self):
         # standard case with edge keys and edge data
-        edata0 = dict(w=200, s="foo")
-        edata1 = dict(w=201, s="bar")
+        edata0 = {"w": 200, "s": "foo"}
+        edata1 = {"w": 201, "s": "bar"}
         keydict = {0: edata0, 1: edata1}
         dododod = {"a": {"b": keydict}}
 
@@ -235,7 +235,7 @@ class TestMultiGraph(BaseMultiGraphTester, _TestGraph):
     dol = {"a": ["b"]}
 
     multiple_edge = [("a", "b", "traits", etraits), ("a", "b", "graphics", egraphics)]
-    single_edge = [("a", "b", 0, {})]  # type: ignore
+    single_edge = [("a", "b", 0, {})]  # type: ignore[var-annotated]
     single_edge1 = [("a", "b", 0, edata)]
     single_edge2 = [("a", "b", 0, etraits)]
     single_edge3 = [("a", "b", 0, {"traits": etraits, "s": "foo"})]
@@ -492,13 +492,13 @@ class CustomDictClass(UserDict):
 
 
 class MultiGraphSubClass(nx.MultiGraph):
-    node_dict_factory = CustomDictClass  # type: ignore
-    node_attr_dict_factory = CustomDictClass  # type: ignore
-    adjlist_outer_dict_factory = CustomDictClass  # type: ignore
-    adjlist_inner_dict_factory = CustomDictClass  # type: ignore
-    edge_key_dict_factory = CustomDictClass  # type: ignore
-    edge_attr_dict_factory = CustomDictClass  # type: ignore
-    graph_attr_dict_factory = CustomDictClass  # type: ignore
+    node_dict_factory = CustomDictClass  # type: ignore[assignment]
+    node_attr_dict_factory = CustomDictClass  # type: ignore[assignment]
+    adjlist_outer_dict_factory = CustomDictClass  # type: ignore[assignment]
+    adjlist_inner_dict_factory = CustomDictClass  # type: ignore[assignment]
+    edge_key_dict_factory = CustomDictClass  # type: ignore[assignment]
+    edge_attr_dict_factory = CustomDictClass  # type: ignore[assignment]
+    graph_attr_dict_factory = CustomDictClass  # type: ignore[assignment]
 
 
 class TestMultiGraphSubclass(TestMultiGraph):
diff --git a/networkx/classes/tests/test_ordered.py b/networkx/classes/tests/test_ordered.py
deleted file mode 100644
index f29ecb4..0000000
--- a/networkx/classes/tests/test_ordered.py
+++ /dev/null
@@ -1,40 +0,0 @@
-import networkx as nx
-
-
-class TestOrdered:
-    # Just test instantiation.
-    def test_graph(self):
-        G = nx.OrderedGraph()
-
-    def test_digraph(self):
-        G = nx.OrderedDiGraph()
-
-    def test_multigraph(self):
-        G = nx.OrderedMultiGraph()
-
-    def test_multidigraph(self):
-        G = nx.OrderedMultiDiGraph()
-
-
-class TestOrderedFeatures:
-    @classmethod
-    def setup_class(cls):
-        cls.G = nx.OrderedDiGraph()
-        cls.G.add_nodes_from([1, 2, 3])
-        cls.G.add_edges_from([(2, 3), (1, 3)])
-
-    def test_subgraph_order(self):
-        G = self.G
-        G_sub = G.subgraph([1, 2, 3])
-        assert list(G.nodes) == list(G_sub.nodes)
-        assert list(G.edges) == list(G_sub.edges)
-        assert list(G.pred[3]) == list(G_sub.pred[3])
-        assert [2, 1] == list(G_sub.pred[3])
-        assert [] == list(G_sub.succ[3])
-
-        G_sub = nx.induced_subgraph(G, [1, 2, 3])
-        assert list(G.nodes) == list(G_sub.nodes)
-        assert list(G.edges) == list(G_sub.edges)
-        assert list(G.pred[3]) == list(G_sub.pred[3])
-        assert [2, 1] == list(G_sub.pred[3])
-        assert [] == list(G_sub.succ[3])
diff --git a/networkx/classes/tests/test_reportviews.py b/networkx/classes/tests/test_reportviews.py
index 7e8dc3d..48148c2 100644
--- a/networkx/classes/tests/test_reportviews.py
+++ b/networkx/classes/tests/test_reportviews.py
@@ -205,7 +205,7 @@ class TestNodeViewSetOps:
         cls.nv = cls.G.nodes
 
     def n_its(self, nodes):
-        return {node for node in nodes}
+        return set(nodes)
 
     def test_len(self):
         G = self.G.copy()
@@ -351,26 +351,26 @@ class TestEdgeDataView:
             assert (1, 2) in ev and (2, 1) not in ev
         else:
             assert (1, 2) in ev and (2, 1) in ev
-        assert not (1, 4) in ev
-        assert not (1, 90) in ev
-        assert not (90, 1) in ev
+        assert (1, 4) not in ev
+        assert (1, 90) not in ev
+        assert (90, 1) not in ev
 
     def test_contains_with_nbunch(self):
         evr = self.eview(self.G)
         ev = evr(nbunch=[0, 2])
         if self.G.is_directed():
             assert (0, 1) in ev
-            assert not (1, 2) in ev
+            assert (1, 2) not in ev
             assert (2, 3) in ev
         else:
             assert (0, 1) in ev
             assert (1, 2) in ev
             assert (2, 3) in ev
-        assert not (3, 4) in ev
-        assert not (4, 5) in ev
-        assert not (5, 6) in ev
-        assert not (7, 8) in ev
-        assert not (8, 9) in ev
+        assert (3, 4) not in ev
+        assert (4, 5) not in ev
+        assert (5, 6) not in ev
+        assert (7, 8) not in ev
+        assert (8, 9) not in ev
 
     def test_len(self):
         evr = self.eview(self.G)
@@ -427,13 +427,13 @@ class TestOutEdgeDataView(TestEdgeDataView):
         evr = self.eview(self.G)
         ev = evr(nbunch=[0, 2])
         assert (0, 1) in ev
-        assert not (1, 2) in ev
+        assert (1, 2) not in ev
         assert (2, 3) in ev
-        assert not (3, 4) in ev
-        assert not (4, 5) in ev
-        assert not (5, 6) in ev
-        assert not (7, 8) in ev
-        assert not (8, 9) in ev
+        assert (3, 4) not in ev
+        assert (4, 5) not in ev
+        assert (5, 6) not in ev
+        assert (7, 8) not in ev
+        assert (8, 9) not in ev
 
 
 class TestInEdgeDataView(TestOutEdgeDataView):
@@ -455,14 +455,14 @@ class TestInEdgeDataView(TestOutEdgeDataView):
     def test_contains_with_nbunch(self):
         evr = self.eview(self.G)
         ev = evr(nbunch=[0, 2])
-        assert not (0, 1) in ev
+        assert (0, 1) not in ev
         assert (1, 2) in ev
-        assert not (2, 3) in ev
-        assert not (3, 4) in ev
-        assert not (4, 5) in ev
-        assert not (5, 6) in ev
-        assert not (7, 8) in ev
-        assert not (8, 9) in ev
+        assert (2, 3) not in ev
+        assert (3, 4) not in ev
+        assert (4, 5) not in ev
+        assert (5, 6) not in ev
+        assert (7, 8) not in ev
+        assert (8, 9) not in ev
 
 
 class TestMultiEdgeDataView(TestEdgeDataView):
@@ -490,11 +490,11 @@ class TestMultiEdgeDataView(TestEdgeDataView):
         assert (0, 1) in ev
         assert (1, 2) in ev
         assert (2, 3) in ev
-        assert not (3, 4) in ev
-        assert not (4, 5) in ev
-        assert not (5, 6) in ev
-        assert not (7, 8) in ev
-        assert not (8, 9) in ev
+        assert (3, 4) not in ev
+        assert (4, 5) not in ev
+        assert (5, 6) not in ev
+        assert (7, 8) not in ev
+        assert (8, 9) not in ev
 
 
 class TestOutMultiEdgeDataView(TestOutEdgeDataView):
@@ -520,13 +520,13 @@ class TestOutMultiEdgeDataView(TestOutEdgeDataView):
         evr = self.eview(self.G)
         ev = evr(nbunch=[0, 2])
         assert (0, 1) in ev
-        assert not (1, 2) in ev
+        assert (1, 2) not in ev
         assert (2, 3) in ev
-        assert not (3, 4) in ev
-        assert not (4, 5) in ev
-        assert not (5, 6) in ev
-        assert not (7, 8) in ev
-        assert not (8, 9) in ev
+        assert (3, 4) not in ev
+        assert (4, 5) not in ev
+        assert (5, 6) not in ev
+        assert (7, 8) not in ev
+        assert (8, 9) not in ev
 
 
 class TestInMultiEdgeDataView(TestOutMultiEdgeDataView):
@@ -548,14 +548,14 @@ class TestInMultiEdgeDataView(TestOutMultiEdgeDataView):
     def test_contains_with_nbunch(self):
         evr = self.eview(self.G)
         ev = evr(nbunch=[0, 2])
-        assert not (0, 1) in ev
+        assert (0, 1) not in ev
         assert (1, 2) in ev
-        assert not (2, 3) in ev
-        assert not (3, 4) in ev
-        assert not (4, 5) in ev
-        assert not (5, 6) in ev
-        assert not (7, 8) in ev
-        assert not (8, 9) in ev
+        assert (2, 3) not in ev
+        assert (3, 4) not in ev
+        assert (4, 5) not in ev
+        assert (5, 6) not in ev
+        assert (7, 8) not in ev
+        assert (8, 9) not in ev
 
 
 # Edge Views
@@ -631,13 +631,13 @@ class TestEdgeView:
         else:
             assert (1, 2) in ev and (2, 1) in ev
             assert (1, 2) in edv and (2, 1) in edv
-        assert not (1, 4) in ev
-        assert not (1, 4) in edv
+        assert (1, 4) not in ev
+        assert (1, 4) not in edv
         # edge not in graph
-        assert not (1, 90) in ev
-        assert not (90, 1) in ev
-        assert not (1, 90) in edv
-        assert not (90, 1) in edv
+        assert (1, 90) not in ev
+        assert (90, 1) not in ev
+        assert (1, 90) not in edv
+        assert (90, 1) not in edv
 
     def test_contains_with_nbunch(self):
         ev = self.eview(self.G)
@@ -645,11 +645,11 @@ class TestEdgeView:
         assert (0, 1) in evn
         assert (1, 2) in evn
         assert (2, 3) in evn
-        assert not (3, 4) in evn
-        assert not (4, 5) in evn
-        assert not (5, 6) in evn
-        assert not (7, 8) in evn
-        assert not (8, 9) in evn
+        assert (3, 4) not in evn
+        assert (4, 5) not in evn
+        assert (5, 6) not in evn
+        assert (7, 8) not in evn
+        assert (8, 9) not in evn
 
     def test_len(self):
         ev = self.eview(self.G)
@@ -726,13 +726,13 @@ class TestOutEdgeView(TestEdgeView):
         ev = self.eview(self.G)
         evn = ev(nbunch=[0, 2])
         assert (0, 1) in evn
-        assert not (1, 2) in evn
+        assert (1, 2) not in evn
         assert (2, 3) in evn
-        assert not (3, 4) in evn
-        assert not (4, 5) in evn
-        assert not (5, 6) in evn
-        assert not (7, 8) in evn
-        assert not (8, 9) in evn
+        assert (3, 4) not in evn
+        assert (4, 5) not in evn
+        assert (5, 6) not in evn
+        assert (7, 8) not in evn
+        assert (8, 9) not in evn
 
 
 class TestInEdgeView(TestEdgeView):
@@ -752,14 +752,14 @@ class TestInEdgeView(TestEdgeView):
     def test_contains_with_nbunch(self):
         ev = self.eview(self.G)
         evn = ev(nbunch=[0, 2])
-        assert not (0, 1) in evn
+        assert (0, 1) not in evn
         assert (1, 2) in evn
-        assert not (2, 3) in evn
-        assert not (3, 4) in evn
-        assert not (4, 5) in evn
-        assert not (5, 6) in evn
-        assert not (7, 8) in evn
-        assert not (8, 9) in evn
+        assert (2, 3) not in evn
+        assert (3, 4) not in evn
+        assert (4, 5) not in evn
+        assert (5, 6) not in evn
+        assert (7, 8) not in evn
+        assert (8, 9) not in evn
 
 
 class TestMultiEdgeView(TestEdgeView):
@@ -871,7 +871,7 @@ class TestMultiEdgeView(TestEdgeView):
         for e in ev:
             assert len(e) == 3
         elist = sorted([(i, i + 1, 0) for i in range(8)] + [(1, 2, 3)])
-        assert sorted(list(ev)) == elist
+        assert sorted(ev) == elist
         # test order of arguments:graph, nbunch, data, keys, default
         ev = evr((1, 2), "foo", True, 1)
         for e in ev:
@@ -938,11 +938,11 @@ class TestMultiEdgeView(TestEdgeView):
         assert (0, 1) in evn
         assert (1, 2) in evn
         assert (2, 3) in evn
-        assert not (3, 4) in evn
-        assert not (4, 5) in evn
-        assert not (5, 6) in evn
-        assert not (7, 8) in evn
-        assert not (8, 9) in evn
+        assert (3, 4) not in evn
+        assert (4, 5) not in evn
+        assert (5, 6) not in evn
+        assert (7, 8) not in evn
+        assert (8, 9) not in evn
 
 
 class TestOutMultiEdgeView(TestMultiEdgeView):
@@ -969,13 +969,13 @@ class TestOutMultiEdgeView(TestMultiEdgeView):
         ev = self.eview(self.G)
         evn = ev(nbunch=[0, 2])
         assert (0, 1) in evn
-        assert not (1, 2) in evn
+        assert (1, 2) not in evn
         assert (2, 3) in evn
-        assert not (3, 4) in evn
-        assert not (4, 5) in evn
-        assert not (5, 6) in evn
-        assert not (7, 8) in evn
-        assert not (8, 9) in evn
+        assert (3, 4) not in evn
+        assert (4, 5) not in evn
+        assert (5, 6) not in evn
+        assert (7, 8) not in evn
+        assert (8, 9) not in evn
 
 
 class TestInMultiEdgeView(TestMultiEdgeView):
@@ -1001,14 +1001,14 @@ class TestInMultiEdgeView(TestMultiEdgeView):
     def test_contains_with_nbunch(self):
         ev = self.eview(self.G)
         evn = ev(nbunch=[0, 2])
-        assert not (0, 1) in evn
+        assert (0, 1) not in evn
         assert (1, 2) in evn
-        assert not (2, 3) in evn
-        assert not (3, 4) in evn
-        assert not (4, 5) in evn
-        assert not (5, 6) in evn
-        assert not (7, 8) in evn
-        assert not (8, 9) in evn
+        assert (2, 3) not in evn
+        assert (3, 4) not in evn
+        assert (4, 5) not in evn
+        assert (5, 6) not in evn
+        assert (7, 8) not in evn
+        assert (8, 9) not in evn
 
 
 # Degrees
diff --git a/networkx/classes/tests/test_special.py b/networkx/classes/tests/test_special.py
index fbeb5f8..1fa7960 100644
--- a/networkx/classes/tests/test_special.py
+++ b/networkx/classes/tests/test_special.py
@@ -1,5 +1,3 @@
-from collections import OrderedDict
-
 import networkx as nx
 
 from .test_digraph import BaseDiGraphTester
@@ -58,19 +56,6 @@ class TestSpecialGraph(_TestGraph):
         self.Graph = nx.Graph
 
 
-class TestOrderedGraph(_TestGraph):
-    def setup_method(self):
-        _TestGraph.setup_method(self)
-
-        class MyGraph(nx.Graph):
-            node_dict_factory = OrderedDict
-            adjlist_outer_dict_factory = OrderedDict
-            adjlist_inner_dict_factory = OrderedDict
-            edge_attr_dict_factory = OrderedDict
-
-        self.Graph = MyGraph
-
-
 class TestThinGraph(BaseGraphTester):
     def setup_method(self):
         all_edge_dict = {"weight": 1}
@@ -99,19 +84,6 @@ class TestSpecialDiGraph(_TestDiGraph):
         self.Graph = nx.DiGraph
 
 
-class TestOrderedDiGraph(_TestDiGraph):
-    def setup_method(self):
-        _TestDiGraph.setup_method(self)
-
-        class MyGraph(nx.DiGraph):
-            node_dict_factory = OrderedDict
-            adjlist_outer_dict_factory = OrderedDict
-            adjlist_inner_dict_factory = OrderedDict
-            edge_attr_dict_factory = OrderedDict
-
-        self.Graph = MyGraph
-
-
 class TestThinDiGraph(BaseDiGraphTester):
     def setup_method(self):
         all_edge_dict = {"weight": 1}
@@ -153,35 +125,7 @@ class TestSpecialMultiGraph(_TestMultiGraph):
         self.Graph = nx.MultiGraph
 
 
-class TestOrderedMultiGraph(_TestMultiGraph):
-    def setup_method(self):
-        _TestMultiGraph.setup_method(self)
-
-        class MyGraph(nx.MultiGraph):
-            node_dict_factory = OrderedDict
-            adjlist_outer_dict_factory = OrderedDict
-            adjlist_inner_dict_factory = OrderedDict
-            edge_key_dict_factory = OrderedDict
-            edge_attr_dict_factory = OrderedDict
-
-        self.Graph = MyGraph
-
-
 class TestSpecialMultiDiGraph(_TestMultiDiGraph):
     def setup_method(self):
         _TestMultiDiGraph.setup_method(self)
         self.Graph = nx.MultiDiGraph
-
-
-class TestOrderedMultiDiGraph(_TestMultiDiGraph):
-    def setup_method(self):
-        _TestMultiDiGraph.setup_method(self)
-
-        class MyGraph(nx.MultiDiGraph):
-            node_dict_factory = OrderedDict
-            adjlist_outer_dict_factory = OrderedDict
-            adjlist_inner_dict_factory = OrderedDict
-            edge_key_dict_factory = OrderedDict
-            edge_attr_dict_factory = OrderedDict
-
-        self.Graph = MyGraph
diff --git a/networkx/conftest.py b/networkx/conftest.py
index 0306530..8da660b 100644
--- a/networkx/conftest.py
+++ b/networkx/conftest.py
@@ -30,6 +30,10 @@ def pytest_configure(config):
 
 
 def pytest_collection_modifyitems(config, items):
+    # Allow pluggable backends to add markers to tests when
+    # running in auto-conversion test mode
+    networkx.classes.backends._mark_tests(items)
+
     if config.getoption("--runslow"):
         # --runslow given in cli: do not skip slow tests
         return
@@ -42,15 +46,6 @@ def pytest_collection_modifyitems(config, items):
 # TODO: The warnings below need to be dealt with, but for now we silence them.
 @pytest.fixture(autouse=True)
 def set_warnings():
-    warnings.filterwarnings(
-        "ignore", category=DeprecationWarning, message="k_nearest_neighbors"
-    )
-    warnings.filterwarnings(
-        "ignore", category=DeprecationWarning, message="numeric_mixing_matrix"
-    )
-    warnings.filterwarnings(
-        "ignore", category=DeprecationWarning, message=r"Ordered.* is deprecated"
-    )
     warnings.filterwarnings(
         "ignore",
         category=DeprecationWarning,
@@ -61,192 +56,30 @@ def set_warnings():
         category=DeprecationWarning,
         message="literal_destringizer is deprecated",
     )
+    # create_using for scale_free_graph
     warnings.filterwarnings(
-        "ignore", category=DeprecationWarning, message="is_string_like is deprecated"
-    )
-    warnings.filterwarnings(
-        "ignore", category=DeprecationWarning, message="\nauthority_matrix"
-    )
-    warnings.filterwarnings(
-        "ignore", category=DeprecationWarning, message="\nhub_matrix"
-    )
-    warnings.filterwarnings(
-        "ignore", category=DeprecationWarning, message="default_opener is deprecated"
-    )
-    warnings.filterwarnings(
-        "ignore", category=DeprecationWarning, message="empty_generator is deprecated"
-    )
-    warnings.filterwarnings(
-        "ignore", category=DeprecationWarning, message="make_str is deprecated"
+        "ignore", category=DeprecationWarning, message="The create_using argument"
     )
     warnings.filterwarnings(
-        "ignore",
-        category=DeprecationWarning,
-        message="generate_unique_node is deprecated",
+        "ignore", category=DeprecationWarning, message="nx.nx_pydot"
     )
     warnings.filterwarnings(
         "ignore",
         category=DeprecationWarning,
-        message="context manager reversed is deprecated",
+        message="\n\nThe `attrs` keyword argument of node_link",
     )
     warnings.filterwarnings(
         "ignore",
         category=DeprecationWarning,
-        message="This will return a generator in 3.0*",
-    )
-    warnings.filterwarnings(
-        "ignore", category=DeprecationWarning, message="betweenness_centrality_source"
-    )
-    warnings.filterwarnings(
-        "ignore", category=DeprecationWarning, message="edge_betweeness"
-    )
-    warnings.filterwarnings(
-        "ignore", category=PendingDeprecationWarning, message="the matrix subclass"
-    )
-    warnings.filterwarnings(
-        "ignore", category=DeprecationWarning, message="to_numpy_matrix"
-    )
-    warnings.filterwarnings(
-        "ignore", category=DeprecationWarning, message="from_numpy_matrix"
-    )
-    warnings.filterwarnings(
-        "ignore", category=DeprecationWarning, message="networkx.pagerank_numpy"
-    )
-    warnings.filterwarnings(
-        "ignore", category=DeprecationWarning, message="networkx.pagerank_scipy"
-    )
-    warnings.filterwarnings(
-        "ignore", category=DeprecationWarning, message="write_gpickle"
-    )
-    warnings.filterwarnings(
-        "ignore", category=DeprecationWarning, message="read_gpickle"
-    )
-    warnings.filterwarnings("ignore", category=DeprecationWarning, message="write_shp")
-    warnings.filterwarnings("ignore", category=DeprecationWarning, message="read_shp")
-    warnings.filterwarnings(
-        "ignore", category=DeprecationWarning, message="edges_from_line"
-    )
-    warnings.filterwarnings("ignore", category=DeprecationWarning, message="write_yaml")
-    warnings.filterwarnings("ignore", category=DeprecationWarning, message="read_yaml")
-    warnings.filterwarnings(
-        "ignore", category=DeprecationWarning, message="FilterAtlas.copy"
-    )
-    warnings.filterwarnings(
-        "ignore", category=DeprecationWarning, message="FilterAdjacency.copy"
-    )
-    warnings.filterwarnings(
-        "ignore", category=DeprecationWarning, message="FilterMultiAdjacency.copy"
-    )
-    warnings.filterwarnings(
-        "ignore", category=DeprecationWarning, message="FilterMultiInner.copy"
-    )
-    warnings.filterwarnings("ignore", category=DeprecationWarning, message="jit_data")
-    warnings.filterwarnings("ignore", category=DeprecationWarning, message="jit_graph")
-    warnings.filterwarnings("ignore", category=DeprecationWarning, message="consume")
-    warnings.filterwarnings(
-        "ignore", category=DeprecationWarning, message="iterable is deprecated"
-    )
-    warnings.filterwarnings(
-        "ignore",
-        category=FutureWarning,
-        message="\nThe function signature for cytoscape",
-    )
-    warnings.filterwarnings(
-        "ignore", category=DeprecationWarning, message="\nThe `attrs` keyword"
-    )
-    warnings.filterwarnings(
-        "ignore", category=DeprecationWarning, message="preserve_random_state"
-    )
-    warnings.filterwarnings(
-        "ignore", category=DeprecationWarning, message="`almost_equal`"
-    )
-    warnings.filterwarnings(
-        "ignore", category=DeprecationWarning, message="`assert_nodes_equal`"
-    )
-    warnings.filterwarnings(
-        "ignore", category=DeprecationWarning, message="`assert_edges_equal`"
-    )
-    warnings.filterwarnings(
-        "ignore", category=DeprecationWarning, message="`assert_graphs_equal`"
-    )
-    warnings.filterwarnings(
-        "ignore", category=DeprecationWarning, message="networkx.hits_scipy"
-    )
-    warnings.filterwarnings(
-        "ignore", category=DeprecationWarning, message="networkx.hits_numpy"
-    )
-    warnings.filterwarnings(
-        "ignore", category=DeprecationWarning, message="preserve_random_state"
-    )
-    warnings.filterwarnings(
-        "ignore",
-        category=FutureWarning,
-        message="google_matrix will return an np.ndarray instead of a np.matrix",
-    )
-    ### Future warnings from scipy.sparse array transition
-    warnings.filterwarnings(
-        "ignore", category=FutureWarning, message="biadjacency_matrix"
-    )
-    warnings.filterwarnings(
-        "ignore", category=FutureWarning, message="bethe_hessian_matrix"
-    )
-    warnings.filterwarnings(
-        "ignore", category=FutureWarning, message="incidence_matrix"
-    )
-    warnings.filterwarnings(
-        "ignore", category=FutureWarning, message="laplacian_matrix"
-    )
-    warnings.filterwarnings(
-        "ignore", category=FutureWarning, message="normalized_laplacian_matrix"
-    )
-    warnings.filterwarnings(
-        "ignore", category=FutureWarning, message="directed_laplacian_matrix"
-    )
-    warnings.filterwarnings(
-        "ignore",
-        category=FutureWarning,
-        message="directed_combinatorial_laplacian_matrix",
-    )
-    warnings.filterwarnings(
-        "ignore", category=FutureWarning, message="modularity_matrix"
-    )
-    warnings.filterwarnings(
-        "ignore", category=FutureWarning, message="directed_modularity_matrix"
-    )
-    warnings.filterwarnings(
-        "ignore", category=FutureWarning, message="adjacency_matrix"
+        message="single_target_shortest_path_length will",
     )
     warnings.filterwarnings(
         "ignore",
         category=DeprecationWarning,
-        message="\n\nThe scipy.sparse array containers",
-    )
-    warnings.filterwarnings(
-        "ignore", category=DeprecationWarning, message="networkx.project"
-    )
-    warnings.filterwarnings(
-        "ignore", category=DeprecationWarning, message="\nfind_cores"
-    )
-    warnings.filterwarnings("ignore", category=FutureWarning, message="attr_matrix")
-    warnings.filterwarnings(
-        "ignore", category=DeprecationWarning, message=r"\n\nmake_small_.*"
-    )
-    warnings.filterwarnings(
-        "ignore", category=DeprecationWarning, message="to_numpy_recarray"
-    )
-    warnings.filterwarnings("ignore", category=DeprecationWarning, message="info")
-    warnings.filterwarnings("ignore", category=DeprecationWarning, message="to_tuple")
-    # create_using for scale_free_graph
-    warnings.filterwarnings(
-        "ignore", category=DeprecationWarning, message="The create_using argument"
-    )
-    warnings.filterwarnings(
-        "ignore", category=PendingDeprecationWarning, message="nx.nx_pydot"
+        message="shortest_path for all_pairs",
     )
     warnings.filterwarnings(
-        "ignore",
-        category=DeprecationWarning,
-        message="\n\nThe `attrs` keyword argument of node_link",
+        "ignore", category=DeprecationWarning, message="\nforest_str is deprecated"
     )
 
 
@@ -292,13 +125,6 @@ try:
 except ImportError:
     has_pygraphviz = False
 
-try:
-    import yaml
-
-    has_yaml = True
-except ImportError:
-    has_yaml = False
-
 try:
     import pydot
 
@@ -306,13 +132,6 @@ try:
 except ImportError:
     has_pydot = False
 
-try:
-    import ogr
-
-    has_ogr = True
-except ImportError:
-    has_ogr = False
-
 try:
     import sympy
 
@@ -328,12 +147,13 @@ collect_ignore = []
 needs_numpy = [
     "algorithms/approximation/traveling_salesman.py",
     "algorithms/centrality/current_flow_closeness.py",
-    "algorithms/node_classification/__init__.py",
+    "algorithms/node_classification.py",
     "algorithms/non_randomness.py",
     "algorithms/shortest_paths/dense.py",
     "linalg/bethehessianmatrix.py",
     "linalg/laplacianmatrix.py",
     "utils/misc.py",
+    "algorithms/centrality/laplacian.py",
 ]
 needs_scipy = [
     "algorithms/approximation/traveling_salesman.py",
@@ -351,9 +171,7 @@ needs_scipy = [
     "algorithms/communicability_alg.py",
     "algorithms/link_analysis/hits_alg.py",
     "algorithms/link_analysis/pagerank_alg.py",
-    "algorithms/node_classification/__init__.py",
-    "algorithms/node_classification/hmn.py",
-    "algorithms/node_classification/lgc.py",
+    "algorithms/node_classification.py",
     "algorithms/similarity.py",
     "convert_matrix.py",
     "drawing/layout.py",
@@ -365,13 +183,12 @@ needs_scipy = [
     "linalg/modularitymatrix.py",
     "linalg/spectrum.py",
     "utils/rcm.py",
+    "algorithms/centrality/laplacian.py",
 ]
 needs_matplotlib = ["drawing/nx_pylab.py"]
 needs_pandas = ["convert_matrix.py"]
-needs_yaml = ["readwrite/nx_yaml.py"]
 needs_pygraphviz = ["drawing/nx_agraph.py"]
 needs_pydot = ["drawing/nx_pydot.py"]
-needs_ogr = ["readwrite/nx_shp.py"]
 needs_sympy = ["algorithms/polynomials.py"]
 
 if not has_numpy:
@@ -382,13 +199,9 @@ if not has_matplotlib:
     collect_ignore += needs_matplotlib
 if not has_pandas:
     collect_ignore += needs_pandas
-if not has_yaml:
-    collect_ignore += needs_yaml
 if not has_pygraphviz:
     collect_ignore += needs_pygraphviz
 if not has_pydot:
     collect_ignore += needs_pydot
-if not has_ogr:
-    collect_ignore += needs_ogr
 if not has_sympy:
     collect_ignore += needs_sympy
diff --git a/networkx/convert.py b/networkx/convert.py
index 3356dd0..7ed668f 100644
--- a/networkx/convert.py
+++ b/networkx/convert.py
@@ -57,7 +57,7 @@ def to_networkx_graph(data, create_using=None, multigraph_input=False):
          generator of edges
          Pandas DataFrame (row per edge)
          2D numpy array
-         scipy sparse matrix
+         scipy sparse array
          pygraphviz agraph
 
     create_using : NetworkX graph constructor, optional (default=nx.Graph)
@@ -135,7 +135,7 @@ def to_networkx_graph(data, create_using=None, multigraph_input=False):
     except ImportError:
         warnings.warn("pandas not found, skipping conversion test.", ImportWarning)
 
-    # numpy matrix or ndarray
+    # numpy array
     try:
         import numpy as np
 
@@ -149,16 +149,16 @@ def to_networkx_graph(data, create_using=None, multigraph_input=False):
     except ImportError:
         warnings.warn("numpy not found, skipping conversion test.", ImportWarning)
 
-    # scipy sparse matrix - any format
+    # scipy sparse array - any format
     try:
         import scipy
 
         if hasattr(data, "format"):
             try:
-                return nx.from_scipy_sparse_matrix(data, create_using=create_using)
+                return nx.from_scipy_sparse_array(data, create_using=create_using)
             except Exception as err:
                 raise nx.NetworkXError(
-                    "Input is not a correct scipy sparse matrix type."
+                    "Input is not a correct scipy sparse array type."
                 ) from err
     except ImportError:
         warnings.warn("scipy not found, skipping conversion test.", ImportWarning)
diff --git a/networkx/convert_matrix.py b/networkx/convert_matrix.py
index fecc8ca..b595522 100644
--- a/networkx/convert_matrix.py
+++ b/networkx/convert_matrix.py
@@ -26,24 +26,18 @@ nx_agraph, nx_pydot
 """
 
 import itertools
-import warnings
 from collections import defaultdict
 
 import networkx as nx
 from networkx.utils import not_implemented_for
 
 __all__ = [
-    "from_numpy_matrix",
-    "to_numpy_matrix",
     "from_pandas_adjacency",
     "to_pandas_adjacency",
     "from_pandas_edgelist",
     "to_pandas_edgelist",
-    "to_numpy_recarray",
     "from_scipy_sparse_array",
-    "from_scipy_sparse_matrix",
     "to_scipy_sparse_array",
-    "to_scipy_sparse_matrix",
     "from_numpy_array",
     "to_numpy_array",
 ]
@@ -197,7 +191,7 @@ def from_pandas_adjacency(df, create_using=None):
     1  2  1
     >>> G = nx.from_pandas_adjacency(df)
     >>> G.name = "Graph from pandas adjacency matrix"
-    >>> print(nx.info(G))
+    >>> print(G)
     Graph named 'Graph from pandas adjacency matrix' with 2 nodes and 3 edges
     """
 
@@ -221,7 +215,6 @@ def to_pandas_edgelist(
     target="target",
     nodelist=None,
     dtype=None,
-    order=None,
     edge_key=None,
 ):
     """Returns the graph edge list as a Pandas DataFrame.
@@ -246,12 +239,6 @@ def to_pandas_edgelist(
         Use to create the DataFrame. Data type to force.
         Only a single dtype is allowed. If None, infer.
 
-    order : None
-        An unused parameter mistakenly included in the function.
-
-        .. deprecated:: 2.6
-            This is deprecated and will be removed in NetworkX v3.0.
-
     edge_key : str or int or None, optional (default=None)
         A valid column name (string or integer) for the edge keys (for the
         multigraph case). If None, edge keys are not stored in the DataFrame.
@@ -476,323 +463,6 @@ def from_pandas_edgelist(
     return g
 
 
-def to_numpy_matrix(
-    G,
-    nodelist=None,
-    dtype=None,
-    order=None,
-    multigraph_weight=sum,
-    weight="weight",
-    nonedge=0.0,
-):
-    """Returns the graph adjacency matrix as a NumPy matrix.
-
-    Parameters
-    ----------
-    G : graph
-        The NetworkX graph used to construct the NumPy matrix.
-
-    nodelist : list, optional
-        The rows and columns are ordered according to the nodes in `nodelist`.
-        If `nodelist` is None, then the ordering is produced by G.nodes().
-
-    dtype : NumPy data type, optional
-        A valid single NumPy data type used to initialize the array.
-        This must be a simple type such as int or numpy.float64 and
-        not a compound data type (see to_numpy_recarray)
-        If None, then the NumPy default is used.
-
-    order : {'C', 'F'}, optional
-        Whether to store multidimensional data in C- or Fortran-contiguous
-        (row- or column-wise) order in memory. If None, then the NumPy default
-        is used.
-
-    multigraph_weight : {sum, min, max}, optional
-        An operator that determines how weights in multigraphs are handled.
-        The default is to sum the weights of the multiple edges.
-
-    weight : string or None optional (default = 'weight')
-        The edge attribute that holds the numerical value used for
-        the edge weight. If an edge does not have that attribute, then the
-        value 1 is used instead.
-
-    nonedge : float (default = 0.0)
-        The matrix values corresponding to nonedges are typically set to zero.
-        However, this could be undesirable if there are matrix values
-        corresponding to actual edges that also have the value zero. If so,
-        one might prefer nonedges to have some other value, such as nan.
-
-    Returns
-    -------
-    M : NumPy matrix
-        Graph adjacency matrix
-
-    See Also
-    --------
-    to_numpy_recarray
-
-    Notes
-    -----
-    For directed graphs, entry i,j corresponds to an edge from i to j.
-
-    The matrix entries are assigned to the weight edge attribute. When
-    an edge does not have a weight attribute, the value of the entry is set to
-    the number 1.  For multiple (parallel) edges, the values of the entries
-    are determined by the `multigraph_weight` parameter.  The default is to
-    sum the weight attributes for each of the parallel edges.
-
-    When `nodelist` does not contain every node in `G`, the matrix is built
-    from the subgraph of `G` that is induced by the nodes in `nodelist`.
-
-    The convention used for self-loop edges in graphs is to assign the
-    diagonal matrix entry value to the weight attribute of the edge
-    (or the number 1 if the edge has no weight attribute).  If the
-    alternate convention of doubling the edge weight is desired the
-    resulting Numpy matrix can be modified as follows:
-
-    >>> import numpy as np
-    >>> G = nx.Graph([(1, 1)])
-    >>> A = nx.to_numpy_matrix(G)
-    >>> A
-    matrix([[1.]])
-    >>> A[np.diag_indices_from(A)] *= 2
-    >>> A
-    matrix([[2.]])
-
-    Examples
-    --------
-    >>> G = nx.MultiDiGraph()
-    >>> G.add_edge(0, 1, weight=2)
-    0
-    >>> G.add_edge(1, 0)
-    0
-    >>> G.add_edge(2, 2, weight=3)
-    0
-    >>> G.add_edge(2, 2)
-    1
-    >>> nx.to_numpy_matrix(G, nodelist=[0, 1, 2])
-    matrix([[0., 2., 0.],
-            [1., 0., 0.],
-            [0., 0., 4.]])
-
-    """
-    warnings.warn(
-        (
-            "to_numpy_matrix is deprecated and will be removed in NetworkX 3.0.\n"
-            "Use to_numpy_array instead, e.g. np.asmatrix(to_numpy_array(G, **kwargs))"
-        ),
-        DeprecationWarning,
-    )
-
-    import numpy as np
-
-    A = to_numpy_array(
-        G,
-        nodelist=nodelist,
-        dtype=dtype,
-        order=order,
-        multigraph_weight=multigraph_weight,
-        weight=weight,
-        nonedge=nonedge,
-    )
-    M = np.asmatrix(A, dtype=dtype)
-    return M
-
-
-def from_numpy_matrix(A, parallel_edges=False, create_using=None):
-    """Returns a graph from numpy matrix.
-
-    The numpy matrix is interpreted as an adjacency matrix for the graph.
-
-    Parameters
-    ----------
-    A : numpy matrix
-        An adjacency matrix representation of a graph
-
-    parallel_edges : Boolean
-        If True, `create_using` is a multigraph, and `A` is an
-        integer matrix, then entry *(i, j)* in the matrix is interpreted as the
-        number of parallel edges joining vertices *i* and *j* in the graph.
-        If False, then the entries in the adjacency matrix are interpreted as
-        the weight of a single edge joining the vertices.
-
-    create_using : NetworkX graph constructor, optional (default=nx.Graph)
-       Graph type to create. If graph instance, then cleared before populated.
-
-    Notes
-    -----
-    For directed graphs, explicitly mention create_using=nx.DiGraph,
-    and entry i,j of A corresponds to an edge from i to j.
-
-    If `create_using` is :class:`networkx.MultiGraph` or
-    :class:`networkx.MultiDiGraph`, `parallel_edges` is True, and the
-    entries of `A` are of type :class:`int`, then this function returns a
-    multigraph (constructed from `create_using`) with parallel edges.
-
-    If `create_using` indicates an undirected multigraph, then only the edges
-    indicated by the upper triangle of the matrix `A` will be added to the
-    graph.
-
-    If the numpy matrix has a single data type for each matrix entry it
-    will be converted to an appropriate Python data type.
-
-    If the numpy matrix has a user-specified compound data type the names
-    of the data fields will be used as attribute keys in the resulting
-    NetworkX graph.
-
-    See Also
-    --------
-    to_numpy_recarray
-
-    Examples
-    --------
-    Simple integer weights on edges:
-
-    >>> import numpy as np
-    >>> A = np.array([[1, 1], [2, 1]])
-    >>> G = nx.from_numpy_matrix(A)
-
-    If `create_using` indicates a multigraph and the matrix has only integer
-    entries and `parallel_edges` is False, then the entries will be treated
-    as weights for edges joining the nodes (without creating parallel edges):
-
-    >>> A = np.array([[1, 1], [1, 2]])
-    >>> G = nx.from_numpy_matrix(A, create_using=nx.MultiGraph)
-    >>> G[1][1]
-    AtlasView({0: {'weight': 2}})
-
-    If `create_using` indicates a multigraph and the matrix has only integer
-    entries and `parallel_edges` is True, then the entries will be treated
-    as the number of parallel edges joining those two vertices:
-
-    >>> A = np.array([[1, 1], [1, 2]])
-    >>> temp = nx.MultiGraph()
-    >>> G = nx.from_numpy_matrix(A, parallel_edges=True, create_using=temp)
-    >>> G[1][1]
-    AtlasView({0: {'weight': 1}, 1: {'weight': 1}})
-
-    User defined compound data type on edges:
-
-    >>> dt = [("weight", float), ("cost", int)]
-    >>> A = np.array([[(1.0, 2)]], dtype=dt)
-    >>> G = nx.from_numpy_matrix(A)
-    >>> list(G.edges())
-    [(0, 0)]
-    >>> G[0][0]["cost"]
-    2
-    >>> G[0][0]["weight"]
-    1.0
-
-    """
-    warnings.warn(
-        (
-            "from_numpy_matrix is deprecated and will be removed in NetworkX 3.0.\n"
-            "Use from_numpy_array instead, e.g. from_numpy_array(A, **kwargs)"
-        ),
-        DeprecationWarning,
-    )
-    return from_numpy_array(A, parallel_edges=parallel_edges, create_using=create_using)
-
-
-@not_implemented_for("multigraph")
-def to_numpy_recarray(G, nodelist=None, dtype=None, order=None):
-    """Returns the graph adjacency matrix as a NumPy recarray.
-
-    .. deprecated:: 2.7
-
-       ``to_numpy_recarray`` is deprecated and will be removed in NetworkX 3.0.
-       Use ``nx.to_numpy_array(G, dtype=dtype, weight=None).view(np.recarray)``
-       instead.
-
-    Parameters
-    ----------
-    G : graph
-        The NetworkX graph used to construct the NumPy recarray.
-
-    nodelist : list, optional
-       The rows and columns are ordered according to the nodes in `nodelist`.
-       If `nodelist` is None, then the ordering is produced by G.nodes().
-
-    dtype : NumPy data-type, optional
-        A valid NumPy named dtype used to initialize the NumPy recarray.
-        The data type names are assumed to be keys in the graph edge attribute
-        dictionary. The default is ``dtype([("weight", float)])``.
-
-    order : {'C', 'F'}, optional
-        Whether to store multidimensional data in C- or Fortran-contiguous
-        (row- or column-wise) order in memory. If None, then the NumPy default
-        is used.
-
-    Returns
-    -------
-    M : NumPy recarray
-       The graph with specified edge data as a Numpy recarray
-
-    Notes
-    -----
-    When `nodelist` does not contain every node in `G`, the adjacency
-    matrix is built from the subgraph of `G` that is induced by the nodes in
-    `nodelist`.
-
-    Examples
-    --------
-    >>> G = nx.Graph()
-    >>> G.add_edge(1, 2, weight=7.0, cost=5)
-    >>> A = nx.to_numpy_recarray(G, dtype=[("weight", float), ("cost", int)])
-    >>> print(A.weight)
-    [[0. 7.]
-     [7. 0.]]
-    >>> print(A.cost)
-    [[0 5]
-     [5 0]]
-
-    """
-    import warnings
-
-    import numpy as np
-
-    warnings.warn(
-        (
-            "to_numpy_recarray is deprecated and will be removed in version 3.0.\n"
-            "Use to_numpy_array instead::\n\n"
-            "    nx.to_numpy_array(G, dtype=dtype, weight=None).view(np.recarray)"
-        ),
-        DeprecationWarning,
-        stacklevel=2,
-    )
-
-    if dtype is None:
-        dtype = [("weight", float)]
-
-    if nodelist is None:
-        nodelist = list(G)
-        nodeset = G
-        nlen = len(G)
-    else:
-        nlen = len(nodelist)
-        nodeset = set(G.nbunch_iter(nodelist))
-        if nlen != len(nodeset):
-            for n in nodelist:
-                if n not in G:
-                    raise nx.NetworkXError(f"Node {n} in nodelist is not in G")
-            raise nx.NetworkXError("nodelist contains duplicates.")
-
-    undirected = not G.is_directed()
-    index = dict(zip(nodelist, range(nlen)))
-    M = np.zeros((nlen, nlen), dtype=dtype, order=order)
-
-    names = M.dtype.names
-    for u, v, attrs in G.edges(data=True):
-        if (u in nodeset) and (v in nodeset):
-            i, j = index[u], index[v]
-            values = tuple(attrs[n] for n in names)
-            M[i, j] = values
-            if undirected:
-                M[j, i] = M[i, j]
-
-    return M.view(np.recarray)
-
-
 def to_scipy_sparse_array(G, nodelist=None, dtype=None, weight="weight", format="csr"):
     """Returns the graph adjacency matrix as a SciPy sparse array.
 
@@ -841,7 +511,7 @@ def to_scipy_sparse_array(G, nodelist=None, dtype=None, weight="weight", format=
     diagonal matrix entry value to the weight attribute of the edge
     (or the number 1 if the edge has no weight attribute).  If the
     alternate convention of doubling the edge weight is desired the
-    resulting Scipy sparse matrix can be modified as follows:
+    resulting SciPy sparse array can be modified as follows:
 
     >>> G = nx.Graph([(1, 1)])
     >>> A = nx.to_scipy_sparse_array(G)
@@ -927,189 +597,8 @@ def to_scipy_sparse_array(G, nodelist=None, dtype=None, weight="weight", format=
         raise nx.NetworkXError(f"Unknown sparse matrix format: {format}") from err
 
 
-def to_scipy_sparse_matrix(G, nodelist=None, dtype=None, weight="weight", format="csr"):
-    """Returns the graph adjacency matrix as a SciPy sparse matrix.
-
-    Parameters
-    ----------
-    G : graph
-        The NetworkX graph used to construct the sparse matrix.
-
-    nodelist : list, optional
-       The rows and columns are ordered according to the nodes in `nodelist`.
-       If `nodelist` is None, then the ordering is produced by G.nodes().
-
-    dtype : NumPy data-type, optional
-        A valid NumPy dtype used to initialize the array. If None, then the
-        NumPy default is used.
-
-    weight : string or None   optional (default='weight')
-        The edge attribute that holds the numerical value used for
-        the edge weight.  If None then all edge weights are 1.
-
-    format : str in {'bsr', 'csr', 'csc', 'coo', 'lil', 'dia', 'dok'}
-        The type of the matrix to be returned (default 'csr').  For
-        some algorithms different implementations of sparse matrices
-        can perform better.  See [1]_ for details.
-
-    Returns
-    -------
-    A : SciPy sparse matrix
-       Graph adjacency matrix.
-
-    Notes
-    -----
-    For directed graphs, matrix entry i,j corresponds to an edge from i to j.
-
-    The matrix entries are populated using the edge attribute held in
-    parameter weight. When an edge does not have that attribute, the
-    value of the entry is 1.
-
-    For multiple edges the matrix values are the sums of the edge weights.
-
-    When `nodelist` does not contain every node in `G`, the adjacency matrix
-    is built from the subgraph of `G` that is induced by the nodes in
-    `nodelist`.
-
-    The convention used for self-loop edges in graphs is to assign the
-    diagonal matrix entry value to the weight attribute of the edge
-    (or the number 1 if the edge has no weight attribute).  If the
-    alternate convention of doubling the edge weight is desired the
-    resulting Scipy sparse matrix can be modified as follows:
-
-    >>> G = nx.Graph([(1, 1)])
-    >>> A = nx.to_scipy_sparse_matrix(G)
-    >>> print(A.todense())
-    [[1]]
-    >>> A.setdiag(A.diagonal() * 2)
-    >>> print(A.todense())
-    [[2]]
-
-    Examples
-    --------
-    >>> G = nx.MultiDiGraph()
-    >>> G.add_edge(0, 1, weight=2)
-    0
-    >>> G.add_edge(1, 0)
-    0
-    >>> G.add_edge(2, 2, weight=3)
-    0
-    >>> G.add_edge(2, 2)
-    1
-    >>> S = nx.to_scipy_sparse_matrix(G, nodelist=[0, 1, 2])
-    >>> print(S.todense())
-    [[0 2 0]
-     [1 0 0]
-     [0 0 4]]
-
-    References
-    ----------
-    .. [1] Scipy Dev. References, "Sparse Matrices",
-       https://docs.scipy.org/doc/scipy/reference/sparse.html
-    """
-    import scipy as sp
-    import scipy.sparse
-
-    warnings.warn(
-        (
-            "\n\nThe scipy.sparse array containers will be used instead of matrices\n"
-            "in Networkx 3.0. Use `to_scipy_sparse_array` instead."
-        ),
-        DeprecationWarning,
-        stacklevel=2,
-    )
-    A = to_scipy_sparse_array(
-        G, nodelist=nodelist, dtype=dtype, weight=weight, format=format
-    )
-    return sp.sparse.csr_matrix(A).asformat(format)
-
-
-def from_scipy_sparse_matrix(
-    A, parallel_edges=False, create_using=None, edge_attribute="weight"
-):
-    """Creates a new graph from an adjacency matrix given as a SciPy sparse
-    matrix.
-
-    Parameters
-    ----------
-    A: scipy sparse matrix
-      An adjacency matrix representation of a graph
-
-    parallel_edges : Boolean
-      If this is True, `create_using` is a multigraph, and `A` is an
-      integer matrix, then entry *(i, j)* in the matrix is interpreted as the
-      number of parallel edges joining vertices *i* and *j* in the graph.
-      If it is False, then the entries in the matrix are interpreted as
-      the weight of a single edge joining the vertices.
-
-    create_using : NetworkX graph constructor, optional (default=nx.Graph)
-       Graph type to create. If graph instance, then cleared before populated.
-
-    edge_attribute: string
-       Name of edge attribute to store matrix numeric value. The data will
-       have the same type as the matrix entry (int, float, (real,imag)).
-
-    Notes
-    -----
-    For directed graphs, explicitly mention create_using=nx.DiGraph,
-    and entry i,j of A corresponds to an edge from i to j.
-
-    If `create_using` is :class:`networkx.MultiGraph` or
-    :class:`networkx.MultiDiGraph`, `parallel_edges` is True, and the
-    entries of `A` are of type :class:`int`, then this function returns a
-    multigraph (constructed from `create_using`) with parallel edges.
-    In this case, `edge_attribute` will be ignored.
-
-    If `create_using` indicates an undirected multigraph, then only the edges
-    indicated by the upper triangle of the matrix `A` will be added to the
-    graph.
-
-    Examples
-    --------
-    >>> import scipy as sp
-    >>> import scipy.sparse  # call as sp.sparse
-    >>> A = sp.sparse.eye(2, 2, 1)
-    >>> G = nx.from_scipy_sparse_matrix(A)
-
-    If `create_using` indicates a multigraph and the matrix has only integer
-    entries and `parallel_edges` is False, then the entries will be treated
-    as weights for edges joining the nodes (without creating parallel edges):
-
-    >>> A = sp.sparse.csr_matrix([[1, 1], [1, 2]])
-    >>> G = nx.from_scipy_sparse_matrix(A, create_using=nx.MultiGraph)
-    >>> G[1][1]
-    AtlasView({0: {'weight': 2}})
-
-    If `create_using` indicates a multigraph and the matrix has only integer
-    entries and `parallel_edges` is True, then the entries will be treated
-    as the number of parallel edges joining those two vertices:
-
-    >>> A = sp.sparse.csr_matrix([[1, 1], [1, 2]])
-    >>> G = nx.from_scipy_sparse_matrix(
-    ...     A, parallel_edges=True, create_using=nx.MultiGraph
-    ... )
-    >>> G[1][1]
-    AtlasView({0: {'weight': 1}, 1: {'weight': 1}})
-
-    """
-    warnings.warn(
-        (
-            "\n\nThe scipy.sparse array containers will be used instead of matrices\n"
-            "in Networkx 3.0. Use `from_scipy_sparse_array` instead."
-        ),
-        DeprecationWarning,
-        stacklevel=2,
-    )
-    return from_scipy_sparse_array(
-        A,
-        parallel_edges=parallel_edges,
-        create_using=create_using,
-        edge_attribute=edge_attribute,
-    )
-
-
 def _csr_gen_triples(A):
-    """Converts a SciPy sparse matrix in **Compressed Sparse Row** format to
+    """Converts a SciPy sparse array in **Compressed Sparse Row** format to
     an iterable of weighted edge triples.
 
     """
@@ -1121,7 +610,7 @@ def _csr_gen_triples(A):
 
 
 def _csc_gen_triples(A):
-    """Converts a SciPy sparse matrix in **Compressed Sparse Column** format to
+    """Converts a SciPy sparse array in **Compressed Sparse Column** format to
     an iterable of weighted edge triples.
 
     """
@@ -1133,7 +622,7 @@ def _csc_gen_triples(A):
 
 
 def _coo_gen_triples(A):
-    """Converts a SciPy sparse matrix in **Coordinate** format to an iterable
+    """Converts a SciPy sparse array in **Coordinate** format to an iterable
     of weighted edge triples.
 
     """
@@ -1142,7 +631,7 @@ def _coo_gen_triples(A):
 
 
 def _dok_gen_triples(A):
-    """Converts a SciPy sparse matrix in **Dictionary of Keys** format to an
+    """Converts a SciPy sparse array in **Dictionary of Keys** format to an
     iterable of weighted edge triples.
 
     """
@@ -1154,7 +643,7 @@ def _generate_weighted_edges(A):
     """Returns an iterable over (u, v, w) triples, where u and v are adjacent
     vertices and w is the weight of the edge joining u and v.
 
-    `A` is a SciPy sparse matrix (in any format).
+    `A` is a SciPy sparse array (in any format).
 
     """
     if A.format == "csr":
@@ -1658,7 +1147,7 @@ def from_numpy_array(A, parallel_edges=False, create_using=None):
             ((u, v, {"weight": 1}) for d in range(A[u, v])) for (u, v) in edges
         )
     else:  # basic data type
-        triples = ((u, v, dict(weight=python_type(A[u, v]))) for u, v in edges)
+        triples = ((u, v, {"weight": python_type(A[u, v])}) for u, v in edges)
     # If we are creating an undirected multigraph, only add the edges from the
     # upper triangle of the matrix. Otherwise, add all the edges. This relies
     # on the fact that the vertices created in the
diff --git a/networkx/drawing/__init__.py b/networkx/drawing/__init__.py
index 1e8542f..0f53309 100644
--- a/networkx/drawing/__init__.py
+++ b/networkx/drawing/__init__.py
@@ -1,6 +1,7 @@
 # graph drawing and interface to graphviz
 
 from .layout import *
+from .nx_latex import *
 from .nx_pylab import *
 from . import nx_agraph
 from . import nx_pydot
diff --git a/networkx/drawing/layout.py b/networkx/drawing/layout.py
index b6d2afe..6200b3e 100644
--- a/networkx/drawing/layout.py
+++ b/networkx/drawing/layout.py
@@ -32,6 +32,7 @@ __all__ = [
     "fruchterman_reingold_layout",
     "spiral_layout",
     "multipartite_layout",
+    "arf_layout",
 ]
 
 
@@ -703,7 +704,7 @@ def kamada_kawai_layout(
         elif dim == 2:
             pos = circular_layout(G, dim=dim)
         else:
-            pos = {n: pt for n, pt in zip(G, np.linspace(0, 1, len(G)))}
+            pos = dict(zip(G, np.linspace(0, 1, len(G))))
     pos_arr = np.array([pos[n] for n in G])
 
     pos = _kamada_kawai_solve(dist_mtx, pos_arr, dim)
@@ -1110,6 +1111,118 @@ def multipartite_layout(G, subset_key="subset", align="vertical", scale=1, cente
     return pos
 
 
+def arf_layout(
+    G,
+    pos=None,
+    scaling=1,
+    a=1.1,
+    etol=1e-6,
+    dt=1e-3,
+    max_iter=1000,
+):
+    """Arf layout for networkx
+
+    The attractive and repulsive forces (arf) layout [1]
+    improves the spring layout in three ways. First, it
+    prevents congestion of highly connected nodes due to
+    strong forcing between nodes. Second, it utilizes the
+    layout space more effectively by preventing large gaps
+    that spring layout tends to create. Lastly, the arf
+    layout represents symmmetries in the layout better than
+    the default spring layout.
+
+    Parameters
+    ----------
+    G : nx.Graph or nx.DiGraph
+        Networkx graph.
+    pos : dict
+        Initial  position of  the nodes.  If set  to None  a
+        random layout will be used.
+    scaling : float
+        Scales the radius of the circular layout space.
+    a : float
+        Strength of springs between connected nodes. Should be larger than 1. The greater a, the clearer the separation ofunconnected sub clusters.
+    etol : float
+        Graduent sum of spring forces must be larger than `etol` before successful termination.
+    dt : float
+        Time step for force differential equation simulations.
+    max_iter : int
+        Max iterations before termination of the algorithm.
+
+    References
+    .. [1] "Self-Organization Applied to Dynamic Network Layout", M. Geipel,
+            International Journal of Modern Physics C, 2007, Vol 18, No 10, pp. 1537-1549.
+            https://doi.org/10.1142/S0129183107011558 https://arxiv.org/abs/0704.1748
+
+    Returns
+    -------
+    pos : dict
+        A dictionary of positions keyed by node.
+
+    Examples
+    --------
+    >>> G = nx.grid_graph((5, 5))
+    >>> pos = nx.arf_layout(G)
+
+    """
+    import warnings
+
+    import numpy as np
+
+    if a <= 1:
+        msg = "The parameter a should be larger than 1"
+        raise ValueError(msg)
+
+    pos_tmp = nx.random_layout(G)
+    if pos is None:
+        pos = pos_tmp
+    else:
+        for node in G.nodes():
+            if node not in pos:
+                pos[node] = pos_tmp[node].copy()
+
+    # Initialize spring constant matrix
+    N = len(G)
+    # No nodes no computation
+    if N == 0:
+        return pos
+
+    # init force of springs
+    K = np.ones((N, N)) - np.eye(N)
+    node_order = {node: i for i, node in enumerate(G)}
+    for x, y in G.edges():
+        if x != y:
+            idx, jdx = (node_order[i] for i in (x, y))
+            K[idx, jdx] = a
+
+    # vectorize values
+    p = np.asarray(list(pos.values()))
+
+    # equation 10 in [1]
+    rho = scaling * np.sqrt(N)
+
+    # looping variables
+    error = etol + 1
+    n_iter = 0
+    while error > etol:
+        diff = p[:, np.newaxis] - p[np.newaxis]
+        A = np.linalg.norm(diff, axis=-1)[..., np.newaxis]
+        # attraction_force - repulsions force
+        # suppress nans due to division; caused by diagonal set to zero.
+        # Does not affect the computation due to nansum
+        with warnings.catch_warnings():
+            warnings.simplefilter("ignore")
+            change = K[..., np.newaxis] * diff - rho / A * diff
+        change = np.nansum(change, axis=0)
+        p += change * dt
+
+        error = np.linalg.norm(change, axis=-1).sum()
+        if n_iter > max_iter:
+            break
+        n_iter += 1
+    return dict(zip(G.nodes(), p))
+
+
 def rescale_layout(pos, scale=1):
     """Returns scaled position array to (-scale, scale) in all axes.
 
diff --git a/networkx/drawing/nx_agraph.py b/networkx/drawing/nx_agraph.py
index eeb9cf8..2ffa21f 100644
--- a/networkx/drawing/nx_agraph.py
+++ b/networkx/drawing/nx_agraph.py
@@ -324,7 +324,7 @@ def view_pygraphviz(
     G : NetworkX graph
         The machine to draw.
     edgelabel : str, callable, None
-        If a string, then it specifes the edge attribute to be displayed
+        If a string, then it specifies the edge attribute to be displayed
         on the edge labels. If a callable, then it is called for each
         edge and it should return the string to be displayed on the edges.
         The function signature of `edgelabel` should be edgelabel(data),
@@ -458,52 +458,3 @@ def view_pygraphviz(
         Image.open(path.name).show()
 
     return path.name, A
-
-
-def display_pygraphviz(graph, path, format=None, prog=None, args=""):
-    """Internal function to display a graph in OS dependent manner.
-
-    Parameters
-    ----------
-    graph : PyGraphviz graph
-        A PyGraphviz AGraph instance.
-    path :  file object
-        An already opened file object that will be closed.
-    format : str, None
-        An attempt is made to guess the output format based on the extension
-        of the filename. If that fails, the value of `format` is used.
-    prog : string
-        Name of Graphviz layout program.
-    args : str
-        Additional arguments to pass to the Graphviz layout program.
-
-    Notes
-    -----
-    If this function is called in succession too quickly, sometimes the
-    image is not displayed. So you might consider time.sleep(.5) between
-    calls if you experience problems.
-
-    """
-    import warnings
-
-    from PIL import Image
-
-    warnings.warn(
-        "display_pygraphviz is deprecated and will be removed in NetworkX 3.0. "
-        "To view a graph G using pygraphviz, use nx.nx_agraph.view_pygraphviz(G). "
-        "To view a graph from file, consider an image processing libary like "
-        "`Pillow`, e.g. ``PIL.Image.open(path.name).show()``",
-        DeprecationWarning,
-    )
-    if format is None:
-        filename = path.name
-        format = os.path.splitext(filename)[1].lower()[1:]
-    if not format:
-        # Let the draw() function use its default
-        format = None
-
-    # Save to a file and display in the default viewer.
-    # We must close the file before viewing it.
-    graph.draw(path, format, prog, args)
-    path.close()
-    Image.open(filename).show()
diff --git a/networkx/drawing/nx_latex.py b/networkx/drawing/nx_latex.py
new file mode 100644
index 0000000..6312f71
--- /dev/null
+++ b/networkx/drawing/nx_latex.py
@@ -0,0 +1,571 @@
+r"""
+*****
+LaTeX
+*****
+
+Export NetworkX graphs in LaTeX format using the TikZ library within TeX/LaTeX.
+Usually, you will want the drawing to appear in a figure environment so
+you use ``to_latex(G, caption="A caption")``. If you want the raw
+drawing commands without a figure environment use :func:`to_latex_raw`.
+And if you want to write to a file instead of just returning the latex
+code as a string, use ``write_latex(G, "filename.tex", caption="A caption")``.
+
+To construct a figure with subfigures for each graph to be shown, provide
+``to_latex`` or ``write_latex`` a list of graphs, a list of subcaptions,
+and a number of rows of subfigures inside the figure.
+
+To be able to refer to the figures or subfigures in latex using ``\\ref``,
+the keyword ``latex_label`` is available for figures and `sub_labels` for
+a list of labels, one for each subfigure.
+
+We intend to eventually provide an interface to the TikZ Graph
+features which include e.g. layout algorithms.
+
+Let us know via github what you'd like to see available, or better yet
+give us some code to do it, or even better make a github pull request
+to add the feature.
+
+The TikZ approach
+=================
+Drawing options can be stored on the graph as node/edge attributes, or
+can be provided as dicts keyed by node/edge to a string of the options
+for that node/edge. Similarly a label can be shown for each node/edge
+by specifying the labels as graph node/edge attributes or by providing
+a dict keyed by node/edge to the text to be written for that node/edge.
+
+Options for the tikzpicture environment (e.g. "[scale=2]") can be provided
+via a keyword argument. Similarly default node and edge options can be
+provided through keywords arguments. The default node options are applied
+to the single TikZ "path" that draws all nodes (and no edges). The default edge
+options are applied to a TikZ "scope" which contains a path for each edge.
+
+Examples
+========
+>>> G = nx.path_graph(3)
+>>> nx.write_latex(G, "just_my_figure.tex", as_document=True)
+>>> nx.write_latex(G, "my_figure.tex", caption="A path graph", latex_label="fig1")
+>>> latex_code = nx.to_latex(G)  # a string rather than a file
+
+You can change many features of the nodes and edges.
+
+>>> G = nx.path_graph(4, create_using=nx.DiGraph)
+>>> pos = {n: (n, n) for n in G}  # nodes set on a line
+
+>>> G.nodes[0]["style"] = "blue"
+>>> G.nodes[2]["style"] = "line width=3,draw"
+>>> G.nodes[3]["label"] = "Stop"
+>>> G.edges[(0, 1)]["label"] = "1st Step"
+>>> G.edges[(0, 1)]["label_opts"] = "near start"
+>>> G.edges[(1, 2)]["style"] = "line width=3"
+>>> G.edges[(1, 2)]["label"] = "2nd Step"
+>>> G.edges[(2, 3)]["style"] = "green"
+>>> G.edges[(2, 3)]["label"] = "3rd Step"
+>>> G.edges[(2, 3)]["label_opts"] = "near end"
+
+>>> nx.write_latex(G, "latex_graph.tex", pos=pos, as_document=True)
+
+Then compile the LaTeX using something like ``pdflatex latex_graph.tex``
+and view the pdf file created: ``latex_graph.pdf``.
+
+If you want **subfigures** each containing one graph, you can input a list of graphs.
+
+>>> H1 = nx.path_graph(4)
+>>> H2 = nx.complete_graph(4)
+>>> H3 = nx.path_graph(8)
+>>> H4 = nx.complete_graph(8)
+>>> graphs = [H1, H2, H3, H4]
+>>> caps = ["Path 4", "Complete graph 4", "Path 8", "Complete graph 8"]
+>>> lbls = ["fig2a", "fig2b", "fig2c", "fig2d"]
+>>> nx.write_latex(graphs, "subfigs.tex", n_rows=2, sub_captions=caps, sub_labels=lbls)
+>>> latex_code = nx.to_latex(graphs, n_rows=2, sub_captions=caps, sub_labels=lbls)
+
+>>> node_color = {0: "red", 1: "orange", 2: "blue", 3: "gray!90"}
+>>> edge_width = {e: "line width=1.5" for e in H3.edges}
+>>> pos = nx.circular_layout(H3)
+>>> latex_code = nx.to_latex(H3, pos, node_options=node_color, edge_options=edge_width)
+>>> print(latex_code)
+\documentclass{report}
+\usepackage{tikz}
+\usepackage{subcaption}
+<BLANKLINE>
+\begin{document}
+\begin{figure}
+  \begin{tikzpicture}
+      \draw
+        (1.0, 0.0) node[red] (0){0}
+        (0.707, 0.707) node[orange] (1){1}
+        (-0.0, 1.0) node[blue] (2){2}
+        (-0.707, 0.707) node[gray!90] (3){3}
+        (-1.0, -0.0) node (4){4}
+        (-0.707, -0.707) node (5){5}
+        (0.0, -1.0) node (6){6}
+        (0.707, -0.707) node (7){7};
+      \begin{scope}[-]
+        \draw[line width=1.5] (0) to (1);
+        \draw[line width=1.5] (1) to (2);
+        \draw[line width=1.5] (2) to (3);
+        \draw[line width=1.5] (3) to (4);
+        \draw[line width=1.5] (4) to (5);
+        \draw[line width=1.5] (5) to (6);
+        \draw[line width=1.5] (6) to (7);
+      \end{scope}
+    \end{tikzpicture}
+\end{figure}
+\end{document}
+
+Notes
+-----
+If you want to change the preamble/postamble of the figure/document/subfigure
+environment, use the keyword arguments: `figure_wrapper`, `document_wrapper`,
+`subfigure_wrapper`. The default values are stored in private variables
+e.g. ``nx.nx_layout._DOCUMENT_WRAPPER``
+
+References
+----------
+TikZ:          https://tikz.dev/
+
+TikZ options details:   https://tikz.dev/tikz-actions
+"""
+import numbers
+import os
+
+import networkx as nx
+
+__all__ = [
+    "to_latex_raw",
+    "to_latex",
+    "write_latex",
+]
+
+
+@nx.utils.not_implemented_for("multigraph")
+def to_latex_raw(
+    G,
+    pos="pos",
+    tikz_options="",
+    default_node_options="",
+    node_options="node_options",
+    node_label="label",
+    default_edge_options="",
+    edge_options="edge_options",
+    edge_label="label",
+    edge_label_options="edge_label_options",
+):
+    """Return a string of the LaTeX/TikZ code to draw `G`
+
+    This function produces just the code for the tikzpicture
+    without any enclosing environment.
+
+    Parameters
+    ==========
+    G : NetworkX graph
+        The NetworkX graph to be drawn
+    pos : string or dict (default "pos")
+        The name of the node attribute on `G` that holds the position of each node.
+        Positions can be sequences of length 2 with numbers for (x,y) coordinates.
+        They can also be strings to denote positions in TikZ style, such as (x, y)
+        or (angle:radius).
+        If a dict, it should be keyed by node to a position.
+        If an empty dict, a circular layout is computed by TikZ.
+    tikz_options : string
+        The tikzpicture options description defining the options for the picture.
+        Often large scale options like `[scale=2]`.
+    default_node_options : string
+        The draw options for a path of nodes. Individual node options override these.
+    node_options : string or dict
+        The name of the node attribute on `G` that holds the options for each node.
+        Or a dict keyed by node to a string holding the options for that node.
+    node_label : string or dict
+        The name of the node attribute on `G` that holds the node label (text)
+        displayed for each node. If the attribute is "" or not present, the node
+        itself is drawn as a string. LaTeX processing such as ``"$A_1$"`` is allowed.
+        Or a dict keyed by node to a string holding the label for that node.
+    default_edge_options : string
+        The options for the scope drawing all edges. The default is "[-]" for
+        undirected graphs and "[->]" for directed graphs.
+    edge_options : string or dict
+        The name of the edge attribute on `G` that holds the options for each edge.
+        If the edge is a self-loop and ``"loop" not in edge_options`` the option
+        "loop," is added to the options for the self-loop edge. Hence you can
+        use "[loop above]" explicitly, but the default is "[loop]".
+        Or a dict keyed by edge to a string holding the options for that edge.
+    edge_label : string or dict
+        The name of the edge attribute on `G` that holds the edge label (text)
+        displayed for each edge. If the attribute is "" or not present, no edge
+        label is drawn.
+        Or a dict keyed by edge to a string holding the label for that edge.
+    edge_label_options : string or dict
+        The name of the edge attribute on `G` that holds the label options for
+        each edge. For example, "[sloped,above,blue]". The default is no options.
+        Or a dict keyed by edge to a string holding the label options for that edge.
+
+    Returns
+    =======
+    latex_code : string
+       The text string which draws the desired graph(s) when compiled by LaTeX.
+
+    See Also
+    ========
+    to_latex
+    write_latex
+    """
+    i4 = "\n    "
+    i8 = "\n        "
+
+    # set up position dict
+    # TODO allow pos to be None and use a nice TikZ default
+    if not isinstance(pos, dict):
+        pos = nx.get_node_attributes(G, pos)
+    if not pos:
+        # circular layout with radius 2
+        pos = {n: f"({round(360.0 * i / len(G), 3)}:2)" for i, n in enumerate(G)}
+    for node in G:
+        if node not in pos:
+            raise nx.NetworkXError(f"node {node} has no specified pos {pos}")
+        posnode = pos[node]
+        if not isinstance(posnode, str):
+            try:
+                posx, posy = posnode
+                pos[node] = f"({round(posx, 3)}, {round(posy, 3)})"
+            except (TypeError, ValueError):
+                msg = f"position pos[{node}] is not 2-tuple or a string: {posnode}"
+                raise nx.NetworkXError(msg)
+
+    # set up all the dicts
+    if not isinstance(node_options, dict):
+        node_options = nx.get_node_attributes(G, node_options)
+    if not isinstance(node_label, dict):
+        node_label = nx.get_node_attributes(G, node_label)
+    if not isinstance(edge_options, dict):
+        edge_options = nx.get_edge_attributes(G, edge_options)
+    if not isinstance(edge_label, dict):
+        edge_label = nx.get_edge_attributes(G, edge_label)
+    if not isinstance(edge_label_options, dict):
+        edge_label_options = nx.get_edge_attributes(G, edge_label_options)
+
+    # process default options (add brackets or not)
+    topts = "" if tikz_options == "" else f"[{tikz_options.strip('[]')}]"
+    defn = "" if default_node_options == "" else f"[{default_node_options.strip('[]')}]"
+    linestyle = f"{'->' if G.is_directed() else '-'}"
+    if default_edge_options == "":
+        defe = "[" + linestyle + "]"
+    elif "-" in default_edge_options:
+        defe = default_edge_options
+    else:
+        defe = f"[{linestyle},{default_edge_options.strip('[]')}]"
+
+    # Construct the string line by line
+    result = "  \\begin{tikzpicture}" + topts
+    result += i4 + "  \\draw" + defn
+    # load the nodes
+    for n in G:
+        # node options goes inside square brackets
+        nopts = f"[{node_options[n].strip('[]')}]" if n in node_options else ""
+        # node text goes inside curly brackets {}
+        ntext = f"{{{node_label[n]}}}" if n in node_label else f"{{{n}}}"
+
+        result += i8 + f"{pos[n]} node{nopts} ({n}){ntext}"
+    result += ";\n"
+
+    # load the edges
+    result += "      \\begin{scope}" + defe
+    for edge in G.edges:
+        u, v = edge[:2]
+        e_opts = f"{edge_options[edge]}".strip("[]") if edge in edge_options else ""
+        # add loop options for selfloops if not present
+        if u == v and "loop" not in e_opts:
+            e_opts = "loop," + e_opts
+        e_opts = f"[{e_opts}]" if e_opts != "" else ""
+        # TODO -- handle bending of multiedges
+
+        els = edge_label_options[edge] if edge in edge_label_options else ""
+        # edge label options goes inside square brackets []
+        els = f"[{els.strip('[]')}]"
+        # edge text is drawn using the TikZ node command inside curly brackets {}
+        e_label = f" node{els} {{{edge_label[edge]}}}" if edge in edge_label else ""
+
+        result += i8 + f"\\draw{e_opts} ({u}) to{e_label} ({v});"
+
+    result += "\n      \\end{scope}\n    \\end{tikzpicture}\n"
+    return result
+
+
+_DOC_WRAPPER_TIKZ = r"""\documentclass{{report}}
+\usepackage{{tikz}}
+\usepackage{{subcaption}}
+
+\begin{{document}}
+{content}
+\end{{document}}"""
+
+
+_FIG_WRAPPER = r"""\begin{{figure}}
+{content}{caption}{label}
+\end{{figure}}"""
+
+
+_SUBFIG_WRAPPER = r"""  \begin{{subfigure}}{{{size}\textwidth}}
+{content}{caption}{label}
+  \end{{subfigure}}"""
+
+
+def to_latex(
+    Gbunch,
+    pos="pos",
+    tikz_options="",
+    default_node_options="",
+    node_options="node_options",
+    node_label="node_label",
+    default_edge_options="",
+    edge_options="edge_options",
+    edge_label="edge_label",
+    edge_label_options="edge_label_options",
+    caption="",
+    latex_label="",
+    sub_captions=None,
+    sub_labels=None,
+    n_rows=1,
+    as_document=True,
+    document_wrapper=_DOC_WRAPPER_TIKZ,
+    figure_wrapper=_FIG_WRAPPER,
+    subfigure_wrapper=_SUBFIG_WRAPPER,
+):
+    """Return latex code to draw the graph(s) in `Gbunch`
+
+    The TikZ drawing utility in LaTeX is used to draw the graph(s).
+    If `Gbunch` is a graph, it is drawn in a figure environment.
+    If `Gbunch` is an iterable of graphs, each is drawn in a subfigure environment
+    within a single figure environment.
+
+    If `as_document` is True, the figure is wrapped inside a document environment
+    so that the resulting string is ready to be compiled by LaTeX. Otherwise,
+    the string is ready for inclusion in a larger tex document using ``\\include``
+    or ``\\input`` statements.
+
+    Parameters
+    ==========
+    Gbunch : NetworkX graph or iterable of NetworkX graphs
+        The NetworkX graph to be drawn or an iterable of graphs
+        to be drawn inside subfigures of a single figure.
+    pos : string or list of strings
+        The name of the node attribute on `G` that holds the position of each node.
+        Positions can be sequences of length 2 with numbers for (x,y) coordinates.
+        They can also be strings to denote positions in TikZ style, such as (x, y)
+        or (angle:radius).
+        If a dict, it should be keyed by node to a position.
+        If an empty dict, a circular layout is computed by TikZ.
+        If you are drawing many graphs in subfigures, use a list of position dicts.
+    tikz_options : string
+        The tikzpicture options description defining the options for the picture.
+        Often large scale options like `[scale=2]`.
+    default_node_options : string
+        The draw options for a path of nodes. Individual node options override these.
+    node_options : string or dict
+        The name of the node attribute on `G` that holds the options for each node.
+        Or a dict keyed by node to a string holding the options for that node.
+    node_label : string or dict
+        The name of the node attribute on `G` that holds the node label (text)
+        displayed for each node. If the attribute is "" or not present, the node
+        itself is drawn as a string. LaTeX processing such as ``"$A_1$"`` is allowed.
+        Or a dict keyed by node to a string holding the label for that node.
+    default_edge_options : string
+        The options for the scope drawing all edges. The default is "[-]" for
+        undirected graphs and "[->]" for directed graphs.
+    edge_options : string or dict
+        The name of the edge attribute on `G` that holds the options for each edge.
+        If the edge is a self-loop and ``"loop" not in edge_options`` the option
+        "loop," is added to the options for the self-loop edge. Hence you can
+        use "[loop above]" explicitly, but the default is "[loop]".
+        Or a dict keyed by edge to a string holding the options for that edge.
+    edge_label : string or dict
+        The name of the edge attribute on `G` that holds the edge label (text)
+        displayed for each edge. If the attribute is "" or not present, no edge
+        label is drawn.
+        Or a dict keyed by edge to a string holding the label for that edge.
+    edge_label_options : string or dict
+        The name of the edge attribute on `G` that holds the label options for
+        each edge. For example, "[sloped,above,blue]". The default is no options.
+        Or a dict keyed by edge to a string holding the label options for that edge.
+    caption : string
+        The caption string for the figure environment
+    latex_label : string
+        The latex label used for the figure for easy referral from the main text
+    sub_captions : list of strings
+        The sub_caption string for each subfigure in the figure
+    sub_latex_labels : list of strings
+        The latex label for each subfigure in the figure
+    n_rows : int
+        The number of rows of subfigures to arrange for multiple graphs
+    as_document : bool
+        Whether to wrap the latex code in a document environment for compiling
+    document_wrapper : formatted text string with variable ``content``.
+        This text is called to evaluate the content embedded in a document
+        environment with a preamble setting up TikZ.
+    figure_wrapper : formatted text string
+        This text is evaluated with variables ``content``, ``caption`` and ``label``.
+        It wraps the content and if a caption is provided, adds the latex code for
+        that caption, and if a label is provided, adds the latex code for a label.
+    subfigure_wrapper : formatted text string
+        This text evaluate variables ``size``, ``content``, ``caption`` and ``label``.
+        It wraps the content and if a caption is provided, adds the latex code for
+        that caption, and if a label is provided, adds the latex code for a label.
+        The size is the vertical size of each row of subfigures as a fraction.
+
+    Returns
+    =======
+    latex_code : string
+        The text string which draws the desired graph(s) when compiled by LaTeX.
+
+    See Also
+    ========
+    write_latex
+    to_latex_raw
+    """
+    if hasattr(Gbunch, "adj"):
+        raw = to_latex_raw(
+            Gbunch,
+            pos,
+            tikz_options,
+            default_node_options,
+            node_options,
+            node_label,
+            default_edge_options,
+            edge_options,
+            edge_label,
+            edge_label_options,
+        )
+    else:  # iterator of graphs
+        sbf = subfigure_wrapper
+        size = 1 / n_rows
+
+        N = len(Gbunch)
+        if isinstance(pos, (str, dict)):
+            pos = [pos] * N
+        if sub_captions is None:
+            sub_captions = [""] * N
+        if sub_labels is None:
+            sub_labels = [""] * N
+        if not (len(Gbunch) == len(pos) == len(sub_captions) == len(sub_labels)):
+            raise nx.NetworkXError(
+                "length of Gbunch, sub_captions and sub_figures must agree"
+            )
+
+        raw = ""
+        for G, pos, subcap, sublbl in zip(Gbunch, pos, sub_captions, sub_labels):
+            subraw = to_latex_raw(
+                G,
+                pos,
+                tikz_options,
+                default_node_options,
+                node_options,
+                node_label,
+                default_edge_options,
+                edge_options,
+                edge_label,
+                edge_label_options,
+            )
+            cap = f"    \\caption{{{subcap}}}" if subcap else ""
+            lbl = f"\\label{{{sublbl}}}" if sublbl else ""
+            raw += sbf.format(size=size, content=subraw, caption=cap, label=lbl)
+            raw += "\n"
+
+    # put raw latex code into a figure environment and optionally into a document
+    raw = raw[:-1]
+    cap = f"\n  \\caption{{{caption}}}" if caption else ""
+    lbl = f"\\label{{{latex_label}}}" if latex_label else ""
+    fig = figure_wrapper.format(content=raw, caption=cap, label=lbl)
+    if as_document:
+        return document_wrapper.format(content=fig)
+    return fig
+
+
+@nx.utils.open_file(1, mode="w")
+def write_latex(Gbunch, path, **options):
+    """Write the latex code to draw the graph(s) onto `path`.
+
+    This convenience function creates the latex drawing code as a string
+    and writes that to a file ready to be compiled when `as_document` is True
+    or ready to be ``import`` ed or ``include`` ed into your main LaTeX document.
+
+    The `path` argument can be a string filename or a file handle to write to.
+
+    Parameters
+    ----------
+    Gbunch : NetworkX graph or iterable of NetworkX graphs
+        If Gbunch is a graph, it is drawn in a figure environment.
+        If Gbunch is an iterable of graphs, each is drawn in a subfigure
+        environment within a single figure environment.
+    path : filename
+        Filename or file handle to write to
+    options : dict
+        By default, TikZ is used with options: (others are ignored)::
+
+            pos : string or dict or list
+                The name of the node attribute on `G` that holds the position of each node.
+                Positions can be sequences of length 2 with numbers for (x,y) coordinates.
+                They can also be strings to denote positions in TikZ style, such as (x, y)
+                or (angle:radius).
+                If a dict, it should be keyed by node to a position.
+                If an empty dict, a circular layout is computed by TikZ.
+                If you are drawing many graphs in subfigures, use a list of position dicts.
+            tikz_options : string
+                The tikzpicture options description defining the options for the picture.
+                Often large scale options like `[scale=2]`.
+            default_node_options : string
+                The draw options for a path of nodes. Individual node options override these.
+            node_options : string or dict
+                The name of the node attribute on `G` that holds the options for each node.
+                Or a dict keyed by node to a string holding the options for that node.
+            node_label : string or dict
+                The name of the node attribute on `G` that holds the node label (text)
+                displayed for each node. If the attribute is "" or not present, the node
+                itself is drawn as a string. LaTeX processing such as ``"$A_1$"`` is allowed.
+                Or a dict keyed by node to a string holding the label for that node.
+            default_edge_options : string
+                The options for the scope drawing all edges. The default is "[-]" for
+                undirected graphs and "[->]" for directed graphs.
+            edge_options : string or dict
+                The name of the edge attribute on `G` that holds the options for each edge.
+                If the edge is a self-loop and ``"loop" not in edge_options`` the option
+                "loop," is added to the options for the self-loop edge. Hence you can
+                use "[loop above]" explicitly, but the default is "[loop]".
+                Or a dict keyed by edge to a string holding the options for that edge.
+            edge_label : string or dict
+                The name of the edge attribute on `G` that holds the edge label (text)
+                displayed for each edge. If the attribute is "" or not present, no edge
+                label is drawn.
+                Or a dict keyed by edge to a string holding the label for that edge.
+            edge_label_options : string or dict
+                The name of the edge attribute on `G` that holds the label options for
+                each edge. For example, "[sloped,above,blue]". The default is no options.
+                Or a dict keyed by edge to a string holding the label options for that edge.
+            caption : string
+                The caption string for the figure environment
+            latex_label : string
+                The latex label used for the figure for easy referral from the main text
+            sub_captions : list of strings
+                The sub_caption string for each subfigure in the figure
+            sub_latex_labels : list of strings
+                The latex label for each subfigure in the figure
+            n_rows : int
+                The number of rows of subfigures to arrange for multiple graphs
+            as_document : bool
+                Whether to wrap the latex code in a document environment for compiling
+            document_wrapper : formatted text string with variable ``content``.
+                This text is called to evaluate the content embedded in a document
+                environment with a preamble setting up the TikZ syntax.
+            figure_wrapper : formatted text string
+                This text is evaluated with variables ``content``, ``caption`` and ``label``.
+                It wraps the content and if a caption is provided, adds the latex code for
+                that caption, and if a label is provided, adds the latex code for a label.
+            subfigure_wrapper : formatted text string
+                This text evaluate variables ``size``, ``content``, ``caption`` and ``label``.
+                It wraps the content and if a caption is provided, adds the latex code for
+                that caption, and if a label is provided, adds the latex code for a label.
+                The size is the vertical size of each row of subfigures as a fraction.
+
+    See Also
+    ========
+    to_latex
+    """
+    path.write(to_latex(Gbunch, **options))
diff --git a/networkx/drawing/nx_pydot.py b/networkx/drawing/nx_pydot.py
index 2055eb3..93513b0 100644
--- a/networkx/drawing/nx_pydot.py
+++ b/networkx/drawing/nx_pydot.py
@@ -47,7 +47,7 @@ def write_dot(G, path):
         "nx.nx_agraph.write_dot instead.\n\n"
         "See https://github.com/networkx/networkx/issues/5723"
     )
-    warnings.warn(msg, PendingDeprecationWarning, stacklevel=2)
+    warnings.warn(msg, DeprecationWarning, stacklevel=2)
     P = to_pydot(G)
     path.write(P.to_string())
     return
@@ -84,7 +84,7 @@ def read_dot(path):
         "nx.nx_agraph.read_dot instead.\n\n"
         "See https://github.com/networkx/networkx/issues/5723"
     )
-    warnings.warn(msg, PendingDeprecationWarning, stacklevel=2)
+    warnings.warn(msg, DeprecationWarning, stacklevel=2)
 
     data = path.read()
 
@@ -123,7 +123,7 @@ def from_pydot(P):
         "known issues and is not actively maintained.\n\n"
         "See https://github.com/networkx/networkx/issues/5723"
     )
-    warnings.warn(msg, PendingDeprecationWarning, stacklevel=2)
+    warnings.warn(msg, DeprecationWarning, stacklevel=2)
 
     if P.get_strict(None):  # pydot bug: get_strict() shouldn't take argument
         multiedges = False
@@ -223,7 +223,7 @@ def to_pydot(N):
         "known issues and is not actively maintained.\n\n"
         "See https://github.com/networkx/networkx/issues/5723"
     )
-    warnings.warn(msg, PendingDeprecationWarning, stacklevel=2)
+    warnings.warn(msg, DeprecationWarning, stacklevel=2)
 
     # set Graphviz graph type
     if N.is_directed():
@@ -352,7 +352,7 @@ def graphviz_layout(G, prog="neato", root=None):
         "nx.nx_agraph.graphviz_layout instead.\n\n"
         "See https://github.com/networkx/networkx/issues/5723"
     )
-    warnings.warn(msg, PendingDeprecationWarning, stacklevel=2)
+    warnings.warn(msg, DeprecationWarning, stacklevel=2)
 
     return pydot_layout(G=G, prog=prog, root=root)
 
@@ -402,7 +402,7 @@ def pydot_layout(G, prog="neato", root=None):
         "known issues and is not actively maintained.\n\n"
         "See https://github.com/networkx/networkx/issues/5723"
     )
-    warnings.warn(msg, PendingDeprecationWarning, stacklevel=2)
+    warnings.warn(msg, DeprecationWarning, stacklevel=2)
     P = to_pydot(G)
     if root is not None:
         P.set("root", str(root))
diff --git a/networkx/drawing/nx_pylab.py b/networkx/drawing/nx_pylab.py
index 09e5ed3..5a7fc54 100644
--- a/networkx/drawing/nx_pylab.py
+++ b/networkx/drawing/nx_pylab.py
@@ -289,7 +289,7 @@ def draw_networkx(G, pos=None, arrows=None, with_labels=True, **kwds):
         "with_labels",
     }
 
-    if any([k not in valid_kwds for k in kwds]):
+    if any(k not in valid_kwds for k in kwds):
         invalid_args = ", ".join([k for k in kwds if k not in valid_kwds])
         raise ValueError(f"Received invalid argument(s): {invalid_args}")
 
@@ -522,8 +522,11 @@ def draw_networkx_edges(
         Also, `(offset, onoffseq)` tuples can be used as style instead of a strings.
         (See `matplotlib.patches.FancyArrowPatch`: `linestyle`)
 
-    alpha : float or None (default=None)
-        The edge transparency
+    alpha : float or array of floats (default=None)
+        The edge transparency.  This can be a single alpha value,
+        in which case it will be applied to all specified edges. Otherwise,
+        if it is an array, the elements of alpha will be applied to the colors
+        in order (cycling through alpha multiple times if necessary).
 
     edge_cmap : Matplotlib colormap, optional
         Colormap for mapping intensities of edges
@@ -575,14 +578,14 @@ def draw_networkx_edges(
         Label for legend
 
     min_source_margin : int (default=0)
-        The minimum margin (gap) at the begining of the edge at the source.
+        The minimum margin (gap) at the beginning of the edge at the source.
 
     min_target_margin : int (default=0)
         The minimum margin (gap) at the end of the edge at the target.
 
     Returns
     -------
-     matplotlib.colections.LineCollection or a list of matplotlib.patches.FancyArrowPatch
+     matplotlib.collections.LineCollection or a list of matplotlib.patches.FancyArrowPatch
         If ``arrows=True``, a list of FancyArrowPatches is returned.
         If ``arrows=False``, a LineCollection is returned.
         If ``arrows=None`` (the default), then a LineCollection is returned if
@@ -650,6 +653,42 @@ def draw_networkx_edges(
     # undirected graphs (for performance reasons) and use FancyArrowPatches
     # for directed graphs.
     # The `arrows` keyword can be used to override the default behavior
+    use_linecollection = not G.is_directed()
+    if arrows in (True, False):
+        use_linecollection = not arrows
+
+    # Some kwargs only apply to FancyArrowPatches. Warn users when they use
+    # non-default values for these kwargs when LineCollection is being used
+    # instead of silently ignoring the specified option
+    if use_linecollection and any(
+        [
+            arrowstyle is not None,
+            arrowsize != 10,
+            connectionstyle != "arc3",
+            min_source_margin != 0,
+            min_target_margin != 0,
+        ]
+    ):
+        import warnings
+
+        msg = (
+            "\n\nThe {0} keyword argument is not applicable when drawing edges\n"
+            "with LineCollection.\n\n"
+            "To make this warning go away, either specify `arrows=True` to\n"
+            "force FancyArrowPatches or use the default value for {0}.\n"
+            "Note that using FancyArrowPatches may be slow for large graphs.\n"
+        )
+        if arrowstyle is not None:
+            msg = msg.format("arrowstyle")
+        if arrowsize != 10:
+            msg = msg.format("arrowsize")
+        if connectionstyle != "arc3":
+            msg = msg.format("connectionstyle")
+        if min_source_margin != 0:
+            msg = msg.format("min_source_margin")
+        if min_target_margin != 0:
+            msg = msg.format("min_target_margin")
+        warnings.warn(msg, category=UserWarning, stacklevel=2)
 
     if arrowstyle == None:
         if G.is_directed():
@@ -657,10 +696,6 @@ def draw_networkx_edges(
         else:
             arrowstyle = "-"
 
-    use_linecollection = not G.is_directed()
-    if arrows in (True, False):
-        use_linecollection = not arrows
-
     if ax is None:
         ax = plt.gca()
 
@@ -749,7 +784,7 @@ def draw_networkx_edges(
                 # is 0, e.g. for a single node. In this case, fall back to scaling
                 # by the maximum node size
                 selfloop_ht = 0.005 * max_nodesize if h == 0 else h
-                # this is called with _screen space_ values so covert back
+                # this is called with _screen space_ values so convert back
                 # to data space
                 data_loc = ax.transData.inverted().transform(posA)
                 v_shift = 0.1 * selfloop_ht
@@ -1077,7 +1112,7 @@ def draw_networkx_edge_labels(
     ax : Matplotlib Axes object, optional
         Draw the graph in the specified Matplotlib axes.
 
-    rotate : bool (deafult=True)
+    rotate : bool (default=True)
         Rotate edge labels to lie parallel to edges
 
     clip_on : bool (default=True)
@@ -1149,7 +1184,7 @@ def draw_networkx_edge_labels(
             trans_angle = 0.0
         # use default box of white with white border
         if bbox is None:
-            bbox = dict(boxstyle="round", ec=(1.0, 1.0, 1.0), fc=(1.0, 1.0, 1.0))
+            bbox = {"boxstyle": "round", "ec": (1.0, 1.0, 1.0), "fc": (1.0, 1.0, 1.0)}
         if not isinstance(label, str):
             label = str(label)  # this makes "1" and 1 labeled the same
 
diff --git a/networkx/drawing/tests/test_latex.py b/networkx/drawing/tests/test_latex.py
new file mode 100644
index 0000000..14ab542
--- /dev/null
+++ b/networkx/drawing/tests/test_latex.py
@@ -0,0 +1,292 @@
+import pytest
+
+import networkx as nx
+
+
+def test_tikz_attributes():
+    G = nx.path_graph(4, create_using=nx.DiGraph)
+    pos = {n: (n, n) for n in G}
+
+    G.add_edge(0, 0)
+    G.edges[(0, 0)]["label"] = "Loop"
+    G.edges[(0, 0)]["label_options"] = "midway"
+
+    G.nodes[0]["style"] = "blue"
+    G.nodes[1]["style"] = "line width=3,draw"
+    G.nodes[2]["style"] = "circle,draw,blue!50"
+    G.nodes[3]["label"] = "Stop"
+    G.edges[(0, 1)]["label"] = "1st Step"
+    G.edges[(0, 1)]["label_options"] = "near end"
+    G.edges[(2, 3)]["label"] = "3rd Step"
+    G.edges[(2, 3)]["label_options"] = "near start"
+    G.edges[(2, 3)]["style"] = "bend left,green"
+    G.edges[(1, 2)]["label"] = "2nd"
+    G.edges[(1, 2)]["label_options"] = "pos=0.5"
+    G.edges[(1, 2)]["style"] = ">->,bend right,line width=3,green!90"
+
+    output_tex = nx.to_latex(
+        G,
+        pos=pos,
+        as_document=False,
+        tikz_options="[scale=3]",
+        node_options="style",
+        edge_options="style",
+        node_label="label",
+        edge_label="label",
+        edge_label_options="label_options",
+    )
+    expected_tex = r"""\begin{figure}
+  \begin{tikzpicture}[scale=3]
+      \draw
+        (0, 0) node[blue] (0){0}
+        (1, 1) node[line width=3,draw] (1){1}
+        (2, 2) node[circle,draw,blue!50] (2){2}
+        (3, 3) node (3){Stop};
+      \begin{scope}[->]
+        \draw (0) to node[near end] {1st Step} (1);
+        \draw[loop,] (0) to node[midway] {Loop} (0);
+        \draw[>->,bend right,line width=3,green!90] (1) to node[pos=0.5] {2nd} (2);
+        \draw[bend left,green] (2) to node[near start] {3rd Step} (3);
+      \end{scope}
+    \end{tikzpicture}
+\end{figure}"""
+
+    assert output_tex == expected_tex
+    # print(output_tex)
+    # # Pretty way to assert that A.to_document() == expected_tex
+    # content_same = True
+    # for aa, bb in zip(expected_tex.split("\n"), output_tex.split("\n")):
+    #     if aa != bb:
+    #         content_same = False
+    #         print(f"-{aa}|\n+{bb}|")
+    # assert content_same
+
+
+def test_basic_multiple_graphs():
+    H1 = nx.path_graph(4)
+    H2 = nx.complete_graph(4)
+    H3 = nx.path_graph(8)
+    H4 = nx.complete_graph(8)
+    captions = [
+        "Path on 4 nodes",
+        "Complete graph on 4 nodes",
+        "Path on 8 nodes",
+        "Complete graph on 8 nodes",
+    ]
+    labels = ["fig2a", "fig2b", "fig2c", "fig2d"]
+    latex_code = nx.to_latex(
+        [H1, H2, H3, H4],
+        n_rows=2,
+        sub_captions=captions,
+        sub_labels=labels,
+    )
+    # print(latex_code)
+    assert "begin{document}" in latex_code
+    assert "begin{figure}" in latex_code
+    assert latex_code.count("begin{subfigure}") == 4
+    assert latex_code.count("tikzpicture") == 8
+    assert latex_code.count("[-]") == 4
+
+
+def test_basic_tikz():
+    expected_tex = r"""\documentclass{report}
+\usepackage{tikz}
+\usepackage{subcaption}
+
+\begin{document}
+\begin{figure}
+  \begin{subfigure}{0.5\textwidth}
+  \begin{tikzpicture}[scale=2]
+      \draw[gray!90]
+        (0.749, 0.702) node[red!90] (0){0}
+        (1.0, -0.014) node[red!90] (1){1}
+        (-0.777, -0.705) node (2){2}
+        (-0.984, 0.042) node (3){3}
+        (-0.028, 0.375) node[cyan!90] (4){4}
+        (-0.412, 0.888) node (5){5}
+        (0.448, -0.856) node (6){6}
+        (0.003, -0.431) node[cyan!90] (7){7};
+      \begin{scope}[->,gray!90]
+        \draw (0) to (4);
+        \draw (0) to (5);
+        \draw (0) to (6);
+        \draw (0) to (7);
+        \draw (1) to (4);
+        \draw (1) to (5);
+        \draw (1) to (6);
+        \draw (1) to (7);
+        \draw (2) to (4);
+        \draw (2) to (5);
+        \draw (2) to (6);
+        \draw (2) to (7);
+        \draw (3) to (4);
+        \draw (3) to (5);
+        \draw (3) to (6);
+        \draw (3) to (7);
+      \end{scope}
+    \end{tikzpicture}
+    \caption{My tikz number 1 of 2}\label{tikz_1_2}
+  \end{subfigure}
+  \begin{subfigure}{0.5\textwidth}
+  \begin{tikzpicture}[scale=2]
+      \draw[gray!90]
+        (0.749, 0.702) node[green!90] (0){0}
+        (1.0, -0.014) node[green!90] (1){1}
+        (-0.777, -0.705) node (2){2}
+        (-0.984, 0.042) node (3){3}
+        (-0.028, 0.375) node[purple!90] (4){4}
+        (-0.412, 0.888) node (5){5}
+        (0.448, -0.856) node (6){6}
+        (0.003, -0.431) node[purple!90] (7){7};
+      \begin{scope}[->,gray!90]
+        \draw (0) to (4);
+        \draw (0) to (5);
+        \draw (0) to (6);
+        \draw (0) to (7);
+        \draw (1) to (4);
+        \draw (1) to (5);
+        \draw (1) to (6);
+        \draw (1) to (7);
+        \draw (2) to (4);
+        \draw (2) to (5);
+        \draw (2) to (6);
+        \draw (2) to (7);
+        \draw (3) to (4);
+        \draw (3) to (5);
+        \draw (3) to (6);
+        \draw (3) to (7);
+      \end{scope}
+    \end{tikzpicture}
+    \caption{My tikz number 2 of 2}\label{tikz_2_2}
+  \end{subfigure}
+  \caption{A graph generated with python and latex.}
+\end{figure}
+\end{document}"""
+
+    edges = [
+        (0, 4),
+        (0, 5),
+        (0, 6),
+        (0, 7),
+        (1, 4),
+        (1, 5),
+        (1, 6),
+        (1, 7),
+        (2, 4),
+        (2, 5),
+        (2, 6),
+        (2, 7),
+        (3, 4),
+        (3, 5),
+        (3, 6),
+        (3, 7),
+    ]
+    G = nx.DiGraph()
+    G.add_nodes_from(range(8))
+    G.add_edges_from(edges)
+    pos = {
+        0: (0.7490296171687696, 0.702353520257394),
+        1: (1.0, -0.014221357723796535),
+        2: (-0.7765783344161441, -0.7054170966808919),
+        3: (-0.9842690223417624, 0.04177547602465483),
+        4: (-0.02768523817180917, 0.3745724439551441),
+        5: (-0.41154855146767433, 0.8880106515525136),
+        6: (0.44780153389148264, -0.8561492709269164),
+        7: (0.0032499953371383505, -0.43092436645809945),
+    }
+
+    rc_node_color = {0: "red!90", 1: "red!90", 4: "cyan!90", 7: "cyan!90"}
+    gp_node_color = {0: "green!90", 1: "green!90", 4: "purple!90", 7: "purple!90"}
+
+    H = G.copy()
+    nx.set_node_attributes(G, rc_node_color, "color")
+    nx.set_node_attributes(H, gp_node_color, "color")
+
+    sub_captions = ["My tikz number 1 of 2", "My tikz number 2 of 2"]
+    sub_labels = ["tikz_1_2", "tikz_2_2"]
+
+    output_tex = nx.to_latex(
+        [G, H],
+        [pos, pos],
+        tikz_options="[scale=2]",
+        default_node_options="gray!90",
+        default_edge_options="gray!90",
+        node_options="color",
+        sub_captions=sub_captions,
+        sub_labels=sub_labels,
+        caption="A graph generated with python and latex.",
+        n_rows=2,
+        as_document=True,
+    )
+
+    assert output_tex == expected_tex
+    # print(output_tex)
+    # # Pretty way to assert that A.to_document() == expected_tex
+    # content_same = True
+    # for aa, bb in zip(expected_tex.split("\n"), output_tex.split("\n")):
+    #     if aa != bb:
+    #         content_same = False
+    #         print(f"-{aa}|\n+{bb}|")
+    # assert content_same
+
+
+def test_exception_pos_single_graph(to_latex=nx.to_latex):
+    # smoke test that pos can be a string
+    G = nx.path_graph(4)
+    to_latex(G, pos="pos")
+
+    # must include all nodes
+    pos = {0: (1, 2), 1: (0, 1), 2: (2, 1)}
+    with pytest.raises(nx.NetworkXError):
+        to_latex(G, pos)
+
+    # must have 2 values
+    pos[3] = (1, 2, 3)
+    with pytest.raises(nx.NetworkXError):
+        to_latex(G, pos)
+    pos[3] = 2
+    with pytest.raises(nx.NetworkXError):
+        to_latex(G, pos)
+
+    # check that passes with 2 values
+    pos[3] = (3, 2)
+    to_latex(G, pos)
+
+
+def test_exception_multiple_graphs(to_latex=nx.to_latex):
+    G = nx.path_graph(3)
+    pos_bad = {0: (1, 2), 1: (0, 1)}
+    pos_OK = {0: (1, 2), 1: (0, 1), 2: (2, 1)}
+    fourG = [G, G, G, G]
+    fourpos = [pos_OK, pos_OK, pos_OK, pos_OK]
+
+    # input single dict to use for all graphs
+    to_latex(fourG, pos_OK)
+    with pytest.raises(nx.NetworkXError):
+        to_latex(fourG, pos_bad)
+
+    # input list of dicts to use for all graphs
+    to_latex(fourG, fourpos)
+    with pytest.raises(nx.NetworkXError):
+        to_latex(fourG, [pos_bad, pos_bad, pos_bad, pos_bad])
+
+    # every pos dict must include all nodes
+    with pytest.raises(nx.NetworkXError):
+        to_latex(fourG, [pos_OK, pos_OK, pos_bad, pos_OK])
+
+    # test sub_captions and sub_labels (len must match Gbunch)
+    with pytest.raises(nx.NetworkXError):
+        to_latex(fourG, fourpos, sub_captions=["hi", "hi"])
+
+    with pytest.raises(nx.NetworkXError):
+        to_latex(fourG, fourpos, sub_labels=["hi", "hi"])
+
+    # all pass
+    to_latex(fourG, fourpos, sub_captions=["hi"] * 4, sub_labels=["lbl"] * 4)
+
+
+def test_exception_multigraph():
+    G = nx.path_graph(4, create_using=nx.MultiGraph)
+    G.add_edge(1, 2)
+    with pytest.raises(nx.NetworkXNotImplemented):
+        nx.to_latex(G)
diff --git a/networkx/drawing/tests/test_layout.py b/networkx/drawing/tests/test_layout.py
index f24d003..48d0e6d 100644
--- a/networkx/drawing/tests/test_layout.py
+++ b/networkx/drawing/tests/test_layout.py
@@ -66,6 +66,7 @@ class TestLayout:
         nx.kamada_kawai_layout(G)
         nx.kamada_kawai_layout(G, dim=1)
         nx.kamada_kawai_layout(G, dim=3)
+        nx.arf_layout(G)
 
     def test_smoke_string(self):
         G = self.Gs
@@ -80,6 +81,7 @@ class TestLayout:
         nx.kamada_kawai_layout(G)
         nx.kamada_kawai_layout(G, dim=1)
         nx.kamada_kawai_layout(G, dim=3)
+        nx.arf_layout(G)
 
     def check_scale_and_center(self, pos, scale, center):
         center = np.array(center)
@@ -175,6 +177,10 @@ class TestLayout:
         pos = nx.circular_layout(self.Gi)
         npos = nx.fruchterman_reingold_layout(self.Gi, pos=pos)
 
+    def test_smoke_initial_pos_arf(self):
+        pos = nx.circular_layout(self.Gi)
+        npos = nx.arf_layout(self.Gi, pos=pos)
+
     def test_fixed_node_fruchterman_reingold(self):
         # Dense version (numpy based)
         pos = nx.circular_layout(self.Gi)
@@ -242,6 +248,8 @@ class TestLayout:
         assert vpos == {}
         vpos = nx.kamada_kawai_layout(G, center=(1, 1))
         assert vpos == {}
+        vpos = nx.arf_layout(G)
+        assert vpos == {}
 
     def test_bipartite_layout(self):
         G = nx.complete_bipartite_graph(3, 5)
@@ -349,7 +357,6 @@ class TestLayout:
         self.check_kamada_kawai_costfn(pos, invdist, meanwt, 3)
 
     def test_spiral_layout(self):
-
         G = self.Gs
 
         # a lower value of resolution should result in a more compact layout
@@ -402,7 +409,6 @@ class TestLayout:
         for k, v in expectation.items():
             assert (s_vpos[k] == v).all()
         s_vpos = nx.rescale_layout_dict(vpos, scale=2)
-
         expectation = {
             0: np.array((-2, -2)),
             1: np.array((2, 2)),
@@ -411,6 +417,24 @@ class TestLayout:
         for k, v in expectation.items():
             assert (s_vpos[k] == v).all()
 
+    def test_arf_layout_partial_input_test(self):
+        """
+        Checks whether partial pos input still returns a proper position.
+        """
+        G = self.Gs
+        node = nx.utils.arbitrary_element(G)
+        pos = nx.circular_layout(G)
+        del pos[node]
+        pos = nx.arf_layout(G, pos=pos)
+        assert len(pos) == len(G)
+
+    def test_arf_layout_negative_a_check(self):
+        """
+        Checks input parameters correctly raises errors. For example,  `a` should be larger than 1
+        """
+        G = self.Gs
+        pytest.raises(ValueError, nx.arf_layout, G=G, a=-1)
+
 
 def test_multipartite_layout_nonnumeric_partition_labels():
     """See gh-5123."""
diff --git a/networkx/drawing/tests/test_pylab.py b/networkx/drawing/tests/test_pylab.py
index f642dcc..668a627 100644
--- a/networkx/drawing/tests/test_pylab.py
+++ b/networkx/drawing/tests/test_pylab.py
@@ -1,6 +1,7 @@
 """Unit tests for matplotlib drawing functions."""
 import itertools
 import os
+import warnings
 
 import pytest
 
@@ -196,7 +197,7 @@ def test_more_edge_colors_than_num_edges_directed():
         assert mpl.colors.same_color(fap.get_edgecolor(), expected)
 
 
-def test_edge_color_string_with_gloabl_alpha_undirected():
+def test_edge_color_string_with_global_alpha_undirected():
     edge_collection = nx.draw_networkx_edges(
         barbell,
         pos=nx.random_layout(barbell),
@@ -396,6 +397,7 @@ def test_labels_and_colors():
         G,
         pos,
         edgelist=[(4, 5), (5, 6), (6, 7), (7, 4)],
+        arrows=True,
         min_source_margin=0.5,
         min_target_margin=0.75,
         width=8,
@@ -752,3 +754,38 @@ def test_draw_networkx_edges_undirected_selfloop_colors():
     for fap, clr, slp in zip(ax.patches, edge_colors[-3:], sl_points):
         assert fap.get_path().contains_point(slp)
         assert mpl.colors.same_color(fap.get_edgecolor(), clr)
+    plt.delaxes(ax)
+
+
+@pytest.mark.parametrize(
+    "fap_only_kwarg",  # Non-default values for kwargs that only apply to FAPs
+    (
+        {"arrowstyle": "-"},
+        {"arrowsize": 20},
+        {"connectionstyle": "arc3,rad=0.2"},
+        {"min_source_margin": 10},
+        {"min_target_margin": 10},
+    ),
+)
+def test_user_warnings_for_unused_edge_drawing_kwargs(fap_only_kwarg):
+    """Users should get a warning when they specify a non-default value for
+    one of the kwargs that applies only to edges drawn with FancyArrowPatches,
+    but FancyArrowPatches aren't being used under the hood."""
+    G = nx.path_graph(3)
+    pos = {n: (n, n) for n in G}
+    fig, ax = plt.subplots()
+    # By default, an undirected graph will use LineCollection to represent
+    # the edges
+    kwarg_name = list(fap_only_kwarg.keys())[0]
+    with pytest.warns(
+        UserWarning, match=f"\n\nThe {kwarg_name} keyword argument is not applicable"
+    ):
+        nx.draw_networkx_edges(G, pos, ax=ax, **fap_only_kwarg)
+    # FancyArrowPatches are always used when `arrows=True` is specified.
+    # Check that warnings are *not* raised in this case
+    with warnings.catch_warnings():
+        # Escalate warnings -> errors so tests fail if warnings are raised
+        warnings.simplefilter("error")
+        nx.draw_networkx_edges(G, pos, ax=ax, arrows=True, **fap_only_kwarg)
+
+    plt.delaxes(ax)
diff --git a/networkx/generators/classic.py b/networkx/generators/classic.py
index d83062a..c6dffde 100644
--- a/networkx/generators/classic.py
+++ b/networkx/generators/classic.py
@@ -145,7 +145,26 @@ def balanced_tree(r, h, create_using=None):
 def barbell_graph(m1, m2, create_using=None):
     """Returns the Barbell Graph: two complete graphs connected by a path.
 
-    For $m1 > 1$ and $m2 >= 0$.
+    Parameters
+    ----------
+    m1 : int
+        Size of the left and right barbells, must be greater than 2.
+
+    m2 : int
+        Length of the path connecting the barbells.
+
+    create_using : NetworkX graph constructor, optional (default=nx.Graph)
+       Graph type to create. If graph instance, then cleared before populated.
+       Only undirected Graphs are supported.
+
+    Returns
+    -------
+    G : NetworkX graph
+        A barbell graph.
+
+    Notes
+    -----
+
 
     Two identical complete graphs $K_{m1}$ form the left and right bells,
     and are connected by a path $P_{m2}$.
@@ -177,14 +196,17 @@ def barbell_graph(m1, m2, create_using=None):
     G.add_nodes_from(range(m1, m1 + m2 - 1))
     if m2 > 1:
         G.add_edges_from(pairwise(range(m1, m1 + m2)))
+
     # right barbell
     G.add_edges_from(
         (u, v) for u in range(m1 + m2, 2 * m1 + m2) for v in range(u + 1, 2 * m1 + m2)
     )
+
     # connect it up
     G.add_edge(m1 - 1, m1)
     if m2 > 0:
         G.add_edge(m1 + m2 - 1, m1 + m2)
+
     return G
 
 
@@ -233,6 +255,8 @@ def complete_graph(n, create_using=None):
     n : int or iterable container of nodes
         If n is an integer, nodes are from range(n).
         If n is a container of nodes, those nodes appear in the graph.
+        Warning: n is not checked for duplicates and if present the
+        resulting graph may not be as desired. Make sure you have no duplicates.
     create_using : NetworkX graph constructor, optional (default=nx.Graph)
        Graph type to create. If graph instance, then cleared before populated.
 
@@ -360,6 +384,8 @@ def cycle_graph(n, create_using=None):
     n : int or iterable container of nodes
         If n is an integer, nodes are from `range(n)`.
         If n is a container of nodes, those nodes appear in the graph.
+        Warning: n is not checked for duplicates and if present the
+        resulting graph may not be as desired. Make sure you have no duplicates.
     create_using : NetworkX graph constructor, optional (default=nx.Graph)
        Graph type to create. If graph instance, then cleared before populated.
 
@@ -377,9 +403,38 @@ def cycle_graph(n, create_using=None):
 def dorogovtsev_goltsev_mendes_graph(n, create_using=None):
     """Returns the hierarchically constructed Dorogovtsev-Goltsev-Mendes graph.
 
-    n is the generation.
-    See: arXiv:/cond-mat/0112143 by Dorogovtsev, Goltsev and Mendes.
+    The Dorogovtsev-Goltsev-Mendes [1]_ procedure produces a scale-free graph
+    deterministically with the following properties for a given `n`:
+    - Total number of nodes = ``3 * (3**n + 1) / 2``
+    - Total number of edges = ``3 ** (n + 1)``
 
+    Parameters
+    ----------
+    n : integer
+       The generation number.
+
+    create_using : NetworkX Graph, optional
+       Graph type to be returned. Directed graphs and multi graphs are not
+       supported.
+
+    Returns
+    -------
+    G : NetworkX Graph
+
+    Examples
+    --------
+    >>> G = nx.dorogovtsev_goltsev_mendes_graph(3)
+    >>> G.number_of_nodes()
+    15
+    >>> G.number_of_edges()
+    27
+    >>> nx.is_planar(G)
+    True
+
+    References
+    ----------
+    .. [1] Dorogotsev S.N., Goltsev A.V., and Mendes J.F.F "Pseudofractal
+       Scale-free Web". arXiv:cond-mat/0112143
     """
     G = empty_graph(0, create_using)
     if G.is_directed():
@@ -481,7 +536,7 @@ def empty_graph(n=0, create_using=None, default=Graph):
     """
     if create_using is None:
         G = default()
-    elif type(create_using) is type:
+    elif isinstance(create_using, type):
         G = create_using()
     elif not hasattr(create_using, "adj"):
         raise TypeError("create_using is not a valid NetworkX graph type or instance")
@@ -523,7 +578,9 @@ def lollipop_graph(m, n, create_using=None):
     ----------
     m, n : int or iterable container of nodes (default = 0)
         If an integer, nodes are from `range(m)` and `range(m,m+n)`.
-        If a container, the entries are the coordinate of the node.
+        If a container of nodes, those nodes appear in the graph.
+        Warning: m and n are not checked for duplicates and if present the
+        resulting graph may not be as desired. Make sure you have no duplicates.
 
         The nodes for m appear in the complete graph $K_m$ and the nodes
         for n appear in the path $P_n$
@@ -587,6 +644,8 @@ def path_graph(n, create_using=None):
     n : int or iterable
         If an integer, nodes are 0 to n - 1.
         If an iterable of nodes, in the order they appear in the path.
+        Warning: n is not checked for duplicates and if present the
+        resulting graph may not be as desired. Make sure you have no duplicates.
     create_using : NetworkX graph constructor, optional (default=nx.Graph)
        Graph type to create. If graph instance, then cleared before populated.
 
@@ -608,6 +667,8 @@ def star_graph(n, create_using=None):
     n : int or iterable
         If an integer, node labels are 0 to n with center 0.
         If an iterable of nodes, the center is the first.
+        Warning: n is not checked for duplicates and if present the
+        resulting graph may not be as desired. Make sure you have no duplicates.
     create_using : NetworkX graph constructor, optional (default=nx.Graph)
        Graph type to create. If graph instance, then cleared before populated.
 
@@ -679,6 +740,8 @@ def wheel_graph(n, create_using=None):
     n : int or iterable
         If an integer, node labels are 0 to n with center 0.
         If an iterable of nodes, the center is the first.
+        Warning: n is not checked for duplicates and if present the
+        resulting graph may not be as desired. Make sure you have no duplicates.
     create_using : NetworkX graph constructor, optional (default=nx.Graph)
        Graph type to create. If graph instance, then cleared before populated.
 
@@ -768,7 +831,7 @@ def complete_multipartite_graph(*subset_sizes):
     # add nodes with subset attribute
     # while checking that ints are not mixed with iterables
     try:
-        for (i, subset) in enumerate(subsets):
+        for i, subset in enumerate(subsets):
             G.add_nodes_from(subset, subset=i)
     except TypeError as err:
         raise NetworkXError("Arguments must be all ints or all iterables") from err
diff --git a/networkx/generators/cographs.py b/networkx/generators/cographs.py
index e876358..9f2e35b 100644
--- a/networkx/generators/cographs.py
+++ b/networkx/generators/cographs.py
@@ -25,7 +25,7 @@ def random_cograph(n, seed=None):
     Cographs or $P_4$-free graphs can be obtained from a single vertex
     by disjoint union and complementation operations.
 
-    This generator starts off from a single vertex and performes disjoint
+    This generator starts off from a single vertex and performs disjoint
     union and full join operations on itself.
     The decision on which operation will take place is random.
 
diff --git a/networkx/generators/community.py b/networkx/generators/community.py
index f1f4d67..32b1901 100644
--- a/networkx/generators/community.py
+++ b/networkx/generators/community.py
@@ -132,7 +132,7 @@ def relaxed_caveman_graph(l, k, p, seed=None):
     k : int
       Size of cliques
     p : float
-      Probabilty of rewiring each edge.
+      Probability of rewiring each edge.
     seed : integer, random_state, or None (default)
         Indicator of random number generation state.
         See :ref:`Randomness<randomness>`.
@@ -159,7 +159,7 @@ def relaxed_caveman_graph(l, k, p, seed=None):
     """
     G = nx.caveman_graph(l, k)
     nodes = list(G)
-    for (u, v) in G.edges():
+    for u, v in G.edges():
         if seed.random() < p:  # rewire the edge
             x = seed.choice(nodes)
             if G.has_edge(u, x):
@@ -320,7 +320,7 @@ def gaussian_random_partition_graph(n, s, v, p_in, p_out, directed=False, seed=N
     v : float
       Shape parameter. The variance of cluster size distribution is s/v.
     p_in : float
-      Probabilty of intra cluster connection.
+      Probability of intra cluster connection.
     p_out : float
       Probability of inter cluster connection.
     directed : boolean, optional default=False
diff --git a/networkx/generators/directed.py b/networkx/generators/directed.py
index 21afab4..af3514d 100644
--- a/networkx/generators/directed.py
+++ b/networkx/generators/directed.py
@@ -94,7 +94,7 @@ def gnr_graph(n, p, create_using=None, seed=None):
 
     The GNR graph is built by adding nodes one at a time with a link to one
     previously added node.  The previous target node is chosen uniformly at
-    random.  With probabiliy `p` the link is instead "redirected" to the
+    random.  With probability `p` the link is instead "redirected" to the
     successor node of the target.
 
     The graph is always a (directed) tree.
diff --git a/networkx/generators/ego.py b/networkx/generators/ego.py
index cca7dfa..d1c126f 100644
--- a/networkx/generators/ego.py
+++ b/networkx/generators/ego.py
@@ -6,6 +6,7 @@ __all__ = ["ego_graph"]
 import networkx as nx
 
 
+@nx._dispatch
 def ego_graph(G, n, radius=1, center=True, undirected=False, distance=None):
     """Returns induced subgraph of neighbors centered at node n within
     a given radius.
diff --git a/networkx/generators/expanders.py b/networkx/generators/expanders.py
index 30b6702..aac8c10 100644
--- a/networkx/generators/expanders.py
+++ b/networkx/generators/expanders.py
@@ -70,8 +70,8 @@ def margulis_gabber_galil_graph(n, create_using=None):
         msg = "`create_using` must be an undirected multigraph."
         raise nx.NetworkXError(msg)
 
-    for (x, y) in itertools.product(range(n), repeat=2):
-        for (u, v) in (
+    for x, y in itertools.product(range(n), repeat=2):
+        for u, v in (
             ((x + 2 * y) % n, y),
             ((x + (2 * y + 1)) % n, y),
             (x, (y + 2 * x) % n),
@@ -146,22 +146,22 @@ def chordal_cycle_graph(p, create_using=None):
 
 
 def paley_graph(p, create_using=None):
-    """Returns the Paley (p-1)/2-regular graph on p nodes.
+    r"""Returns the Paley $\frac{(p-1)}{2}$ -regular graph on $p$ nodes.
 
-    The returned graph is a graph on Z/pZ with edges between x and y
-    if and only if x-y is a nonzero square in Z/pZ.
+    The returned graph is a graph on $\mathbb{Z}/p\mathbb{Z}$ with edges between $x$ and $y$
+    if and only if $x-y$ is a nonzero square in $\mathbb{Z}/p\mathbb{Z}$.
 
-    If p = 1 mod 4, -1 is a square in Z/pZ and therefore x-y is a square if and
-    only if y-x is also a square, i.e the edges in the Paley graph are symmetric.
+    If $p \equiv 1  \pmod 4$, $-1$ is a square in $\mathbb{Z}/p\mathbb{Z}$ and therefore $x-y$ is a square if and
+    only if $y-x$ is also a square, i.e the edges in the Paley graph are symmetric.
 
-    If p = 3 mod 4, -1 is not a square in Z/pZ and therefore either x-y or y-x
-    is a square in Z/pZ but not both.
+    If $p \equiv 3 \pmod 4$, $-1$ is not a square in $\mathbb{Z}/p\mathbb{Z}$ and therefore either $x-y$ or $y-x$
+    is a square in $\mathbb{Z}/p\mathbb{Z}$ but not both.
 
     Note that a more general definition of Paley graphs extends this construction
-    to graphs over q=p^n vertices, by using the finite field F_q instead of Z/pZ.
+    to graphs over $q=p^n$ vertices, by using the finite field $F_q$ instead of $\mathbb{Z}/p\mathbb{Z}$.
     This construction requires to compute squares in general finite fields and is
-    not what is implemented here (i.e paley_graph(25) does not return the true
-    Paley graph associated with 5^2).
+    not what is implemented here (i.e `paley_graph(25)` does not return the true
+    Paley graph associated with $5^2$).
 
     Parameters
     ----------
diff --git a/networkx/generators/geometric.py b/networkx/generators/geometric.py
index de5fbd4..dcc0512 100644
--- a/networkx/generators/geometric.py
+++ b/networkx/generators/geometric.py
@@ -19,24 +19,6 @@ __all__ = [
 ]
 
 
-def euclidean(x, y):
-    """Returns the Euclidean distance between the vectors ``x`` and ``y``.
-
-    Each of ``x`` and ``y`` can be any iterable of numbers. The
-    iterables must be of the same length.
-
-     .. deprecated:: 2.7
-    """
-    import warnings
-
-    msg = (
-        "euclidean is deprecated and will be removed in 3.0."
-        "Use math.dist(x, y) instead."
-    )
-    warnings.warn(msg, DeprecationWarning, stacklevel=2)
-    return math.dist(x, y)
-
-
 def geometric_edges(G, radius, p=2):
     """Returns edge list of node pairs within `radius` of each other.
 
diff --git a/networkx/generators/internet_as_graphs.py b/networkx/generators/internet_as_graphs.py
index c3c1278..6bc7738 100644
--- a/networkx/generators/internet_as_graphs.py
+++ b/networkx/generators/internet_as_graphs.py
@@ -272,7 +272,7 @@ class AS_graph_generator:
     def add_cp_peering_link(self, cp, to_kind):
         """Add a peering link to a content provider (CP) node.
 
-        Target node j can be CP or M and it is drawn uniformely among the nodes
+        Target node j can be CP or M and it is drawn uniformly among the nodes
         belonging to the same region as cp.
 
         Parameters
@@ -376,7 +376,7 @@ class AS_graph_generator:
         self.nodes = {"T": set(), "M": set(), "CP": set(), "C": set()}
 
         self.t_graph()
-        self.nodes["T"] = set(list(self.G.nodes()))
+        self.nodes["T"] = set(self.G.nodes())
 
         i = len(self.nodes["T"])
         for _ in range(self.n_m):
diff --git a/networkx/generators/joint_degree_seq.py b/networkx/generators/joint_degree_seq.py
index 7ab7c76..1a1c265 100644
--- a/networkx/generators/joint_degree_seq.py
+++ b/networkx/generators/joint_degree_seq.py
@@ -227,13 +227,11 @@ def joint_degree_graph(joint_degrees, seed=None):
     # for each pair
     for k in joint_degrees:
         for l in joint_degrees[k]:
-
             # n_edges_add is the number of edges to add for the
             # degree pair (k,l)
             n_edges_add = joint_degrees[k][l]
 
             if (n_edges_add > 0) and (k >= l):
-
                 # number of nodes with degree k and l
                 k_size = degree_count[k]
                 l_size = degree_count[l]
@@ -253,14 +251,12 @@ def joint_degree_graph(joint_degrees, seed=None):
                     n_edges_add = joint_degrees[k][l] // 2
 
                 while n_edges_add > 0:
-
                     # randomly pick nodes v and w that have degrees k and l
                     v = k_nodes[seed.randrange(k_size)]
                     w = l_nodes[seed.randrange(l_size)]
 
                     # if nodes v and w are disconnected then attempt to connect
                     if not G.has_edge(v, w) and (v != w):
-
                         # if node v has no free stubs then do neighbor switch
                         if h_node_residual[v] == 0:
                             _neighbor_switch(G, v, k_unsat, h_node_residual)
@@ -351,13 +347,7 @@ def is_valid_directed_joint_degree(in_degrees, out_degrees, nkk):
                 if val + forbidden.get((k, l), 0) > V[(k, 1)] * V[(l, 0)]:
                     return False
 
-    for s in S:
-        if not S[s] / s[0] == V[s]:  # condition 2
-            return False
-
-    # if all conditions abive have been satisfied then the input nkk is
-    # realizable as a simple graph.
-    return True
+    return all(S[s] / s[0] == V[s] for s in S)
 
 
 def _directed_neighbor_switch(
diff --git a/networkx/generators/line.py b/networkx/generators/line.py
index 7432c2f..305ad3c 100644
--- a/networkx/generators/line.py
+++ b/networkx/generators/line.py
@@ -243,10 +243,10 @@ def inverse_line_graph(G):
 
     Notes
     -----
-    This is an implementation of the Roussopoulos algorithm.
+    This is an implementation of the Roussopoulos algorithm[1]_.
 
     If G consists of multiple components, then the algorithm doesn't work.
-    You should invert every component seperately:
+    You should invert every component separately:
 
     >>> K5 = nx.complete_graph(5)
     >>> P4 = nx.Graph([("a", "b"), ("b", "c"), ("c", "d")])
@@ -259,8 +259,9 @@ def inverse_line_graph(G):
 
     References
     ----------
-    * Roussopolous, N, "A max {m, n} algorithm for determining the graph H from
-      its line graph G", Information Processing Letters 2, (1973), 108--112.
+    .. [1] Roussopoulos, N.D. , "A max {m, n} algorithm for determining the graph H from
+       its line graph G", Information Processing Letters 2, (1973), 108--112, ISSN 0020-0190,
+       `DOI link <https://doi.org/10.1016/0020-0190(73)90029-X>`_
 
     """
     if G.number_of_nodes() == 0:
@@ -274,7 +275,14 @@ def inverse_line_graph(G):
     elif G.number_of_nodes() > 1 and G.number_of_edges() == 0:
         msg = (
             "inverse_line_graph() doesn't work on an edgeless graph. "
-            "Please use this function on each component seperately."
+            "Please use this function on each component separately."
+        )
+        raise nx.NetworkXError(msg)
+
+    if nx.number_of_selfloops(G) != 0:
+        msg = (
+            "A line graph as generated by NetworkX has no selfloops, so G has no "
+            "inverse line graph. Please remove the selfloops from G and try again."
         )
         raise nx.NetworkXError(msg)
 
@@ -350,10 +358,7 @@ def _odd_triangle(G, T):
         for v in G[t]:
             if v not in T:
                 T_neighbors[v] += 1
-    for v in T_neighbors:
-        if T_neighbors[v] in [1, 3]:
-            return True
-    return False
+    return any(T_neighbors[v] in [1, 3] for v in T_neighbors)
 
 
 def _find_partition(G, starting_cell):
@@ -380,13 +385,9 @@ def _find_partition(G, starting_cell):
     partitioned_vertices = list(starting_cell)
     while G_partition.number_of_edges() > 0:
         # there are still edges left and so more cells to be made
-        u = partitioned_vertices[-1]
+        u = partitioned_vertices.pop()
         deg_u = len(G_partition[u])
-        if deg_u == 0:
-            # if u has no edges left in G_partition then we have found
-            # all of its cells so we do not need to keep looking
-            partitioned_vertices.pop()
-        else:
+        if deg_u != 0:
             # if u still has edges then we need to find its other cell
             # this other cell must be a complete subgraph or else G is
             # not a line graph
@@ -433,6 +434,8 @@ def _select_starting_cell(G, starting_edge=None):
         e = arbitrary_element(G.edges())
     else:
         e = starting_edge
+        if e[0] not in G.nodes():
+            raise nx.NetworkXError(f"Vertex {e[0]} not in graph")
         if e[1] not in G[e[0]]:
             msg = f"starting_edge ({e[0]}, {e[1]}) is not in the Graph"
             raise nx.NetworkXError(msg)
@@ -447,10 +450,10 @@ def _select_starting_cell(G, starting_edge=None):
         T = e_triangles[0]
         a, b, c = T
         # ab was original edge so check the other 2 edges
-        ac_edges = [x for x in _triangles(G, (a, c))]
-        bc_edges = [x for x in _triangles(G, (b, c))]
-        if len(ac_edges) == 1:
-            if len(bc_edges) == 1:
+        ac_edges = len(_triangles(G, (a, c)))
+        bc_edges = len(_triangles(G, (b, c)))
+        if ac_edges == 1:
+            if bc_edges == 1:
                 starting_cell = T
             else:
                 return _select_starting_cell(G, starting_edge=(b, c))
@@ -469,29 +472,22 @@ def _select_starting_cell(G, starting_edge=None):
             starting_cell = T
         elif r - 1 <= s <= r:
             # check if odd triangles containing e form complete subgraph
-            # there must be exactly s+2 of them
-            # and they must all be connected
             triangle_nodes = set()
             for T in odd_triangles:
                 for x in T:
                     triangle_nodes.add(x)
-            if len(triangle_nodes) == s + 2:
-                for u in triangle_nodes:
-                    for v in triangle_nodes:
-                        if u != v and (v not in G[u]):
-                            msg = (
-                                "G is not a line graph (odd triangles "
-                                "do not form complete subgraph)"
-                            )
-                            raise nx.NetworkXError(msg)
-                # otherwise then we can use this as the starting cell
-                starting_cell = tuple(triangle_nodes)
-            else:
-                msg = (
-                    "G is not a line graph (odd triangles "
-                    "do not form complete subgraph)"
-                )
-                raise nx.NetworkXError(msg)
+
+            for u in triangle_nodes:
+                for v in triangle_nodes:
+                    if u != v and (v not in G[u]):
+                        msg = (
+                            "G is not a line graph (odd triangles "
+                            "do not form complete subgraph)"
+                        )
+                        raise nx.NetworkXError(msg)
+            # otherwise then we can use this as the starting cell
+            starting_cell = tuple(triangle_nodes)
+
         else:
             msg = (
                 "G is not a line graph (incorrect number of "
diff --git a/networkx/generators/random_graphs.py b/networkx/generators/random_graphs.py
index d75bbd3..dfca7d1 100644
--- a/networkx/generators/random_graphs.py
+++ b/networkx/generators/random_graphs.py
@@ -343,7 +343,7 @@ def newman_watts_strogatz_graph(n, k, p, seed=None):
     # for each edge u-v, with probability p, randomly select existing
     # node w and add new edge u-w
     e = list(G.edges())
-    for (u, v) in e:
+    for u, v in e:
         if seed.random() < p:
             w = seed.choice(nlist)
             # no self-loops and reject if edge u-w exists
@@ -487,6 +487,8 @@ def connected_watts_strogatz_graph(n, k, p, tries=100, seed=None):
 def random_regular_graph(d, n, seed=None):
     r"""Returns a random $d$-regular graph on $n$ nodes.
 
+    A regular graph is a graph where each node has the same number of neighbors.
+
     The resulting graph has no self-loops or parallel edges.
 
     Parameters
@@ -518,7 +520,7 @@ def random_regular_graph(d, n, seed=None):
     .. [1] A. Steger and N. Wormald,
        Generating random regular graphs quickly,
        Probability and Computing 8 (1999), 377-396, 1999.
-       http://citeseer.ist.psu.edu/steger99generating.html
+       https://doi.org/10.1017/S0963548399003867
 
     .. [2] Jeong Han Kim and Van H. Vu,
        Generating random regular graphs,
@@ -1015,7 +1017,7 @@ def powerlaw_cluster_graph(n, m, p, seed=None):
                 neighborhood = [
                     nbr
                     for nbr in G.neighbors(target)
-                    if not G.has_edge(source, nbr) and not nbr == source
+                    if not G.has_edge(source, nbr) and nbr != source
                 ]
                 if neighborhood:  # if there is a neighbor without a link
                     nbr = seed.choice(neighborhood)
@@ -1065,7 +1067,7 @@ def random_lobster(n, p1, p2, seed=None):
         If `p1` or `p2` parameters are >= 1 because the while loops would never finish.
     """
     p1, p2 = abs(p1), abs(p2)
-    if any([p >= 1 for p in [p1, p2]]):
+    if any(p >= 1 for p in [p1, p2]):
         raise nx.NetworkXError("Probability values for `p1` and `p2` must both be < 1.")
 
     # a necessary ingredient in any self-respecting graph library
@@ -1114,7 +1116,7 @@ def random_shell_graph(constructor, seed=None):
     intra_edges = []
     nnodes = 0
     # create gnm graphs for each shell
-    for (n, m, d) in constructor:
+    for n, m, d in constructor:
         inter_edges = int(m * d)
         intra_edges.append(m - inter_edges)
         g = nx.convert_node_labels_to_integers(
diff --git a/networkx/generators/small.py b/networkx/generators/small.py
index 7b4dd57..60a4388 100644
--- a/networkx/generators/small.py
+++ b/networkx/generators/small.py
@@ -4,7 +4,6 @@ Various small and named graphs, together with some compact generators.
 """
 
 __all__ = [
-    "make_small_graph",
     "LCF_graph",
     "bull_graph",
     "chvatal_graph",
@@ -60,115 +59,6 @@ def _raise_on_directed(func):
     return wrapper
 
 
-def make_small_undirected_graph(graph_description, create_using=None):
-    """
-    Return a small undirected graph described by graph_description.
-
-    .. deprecated:: 2.7
-
-       make_small_undirected_graph is deprecated and will be removed in
-       version 3.0. If "ltype" == "adjacencylist", convert the list to a dict
-       and use `from_dict_of_lists`. If "ltype" == "edgelist", use
-       `from_edgelist`.
-
-    See make_small_graph.
-    """
-    import warnings
-
-    msg = (
-        "\n\nmake_small_undirected_graph is deprecated and will be removed in "
-        "version 3.0.\n"
-        "If `ltype` == 'adjacencylist', convert `xlist` to a dict and use\n"
-        "`from_dict_of_lists` instead.\n"
-        "If `ltype` == 'edgelist', use `from_edgelist` instead."
-    )
-    warnings.warn(msg, category=DeprecationWarning, stacklevel=2)
-
-    G = empty_graph(0, create_using)
-    if G.is_directed():
-        raise NetworkXError("Directed Graph not supported")
-    return make_small_graph(graph_description, G)
-
-
-def make_small_graph(graph_description, create_using=None):
-    """
-    Return the small graph described by graph_description.
-
-    .. deprecated:: 2.7
-
-       make_small_graph is deprecated and will be removed in
-       version 3.0. If "ltype" == "adjacencylist", convert the list to a dict
-       and use `from_dict_of_lists`. If "ltype" == "edgelist", use
-       `from_edgelist`.
-
-    graph_description is a list of the form [ltype,name,n,xlist]
-
-    Here ltype is one of "adjacencylist" or "edgelist",
-    name is the name of the graph and n the number of nodes.
-    This constructs a graph of n nodes with integer labels 0,..,n-1.
-
-    If ltype="adjacencylist"  then xlist is an adjacency list
-    with exactly n entries, in with the j'th entry (which can be empty)
-    specifies the nodes connected to vertex j.
-    e.g. the "square" graph C_4 can be obtained by
-
-    >>> G = nx.make_small_graph(
-    ...     ["adjacencylist", "C_4", 4, [[2, 4], [1, 3], [2, 4], [1, 3]]]
-    ... )
-
-    or, since we do not need to add edges twice,
-
-    >>> G = nx.make_small_graph(["adjacencylist", "C_4", 4, [[2, 4], [3], [4], []]])
-
-    If ltype="edgelist" then xlist is an edge list
-    written as [[v1,w2],[v2,w2],...,[vk,wk]],
-    where vj and wj integers in the range 1,..,n
-    e.g. the "square" graph C_4 can be obtained by
-
-    >>> G = nx.make_small_graph(
-    ...     ["edgelist", "C_4", 4, [[1, 2], [3, 4], [2, 3], [4, 1]]]
-    ... )
-
-    Use the create_using argument to choose the graph class/type.
-    """
-    import warnings
-
-    msg = (
-        "\n\nmake_small_graph is deprecated and will be removed in version 3.0.\n"
-        "If `ltype` == 'adjacencylist', convert `xlist` to a dict and use\n"
-        "`from_dict_of_lists` instead.\n"
-        "If `ltype` == 'edgelist', use `from_edgelist` instead."
-    )
-    warnings.warn(msg, category=DeprecationWarning, stacklevel=2)
-
-    if graph_description[0] not in ("adjacencylist", "edgelist"):
-        raise NetworkXError("ltype must be either adjacencylist or edgelist")
-
-    ltype = graph_description[0]
-    name = graph_description[1]
-    n = graph_description[2]
-
-    G = empty_graph(n, create_using)
-    nodes = G.nodes()
-
-    if ltype == "adjacencylist":
-        adjlist = graph_description[3]
-        if len(adjlist) != n:
-            raise NetworkXError("invalid graph_description")
-        G.add_edges_from([(u - 1, v) for v in nodes for u in adjlist[v]])
-    elif ltype == "edgelist":
-        edgelist = graph_description[3]
-        for e in edgelist:
-            v1 = e[0] - 1
-            v2 = e[1] - 1
-            if v1 < 0 or v1 > n - 1 or v2 < 0 or v2 > n - 1:
-                raise NetworkXError("invalid graph_description")
-            else:
-                G.add_edge(v1, v2)
-    G.name = name
-    return G
-
-
 def LCF_graph(n, shift_list, repeats, create_using=None):
     """
     Return the cubic graph specified in LCF notation.
@@ -213,7 +103,7 @@ def LCF_graph(n, shift_list, repeats, create_using=None):
     if G.is_directed():
         raise NetworkXError("Directed Graph not supported")
     G.name = "LCF_graph"
-    nodes = sorted(list(G))
+    nodes = sorted(G)
 
     n_extra_edges = repeats * len(shift_list)
     # edges are added n_extra_edges times
diff --git a/networkx/generators/tests/test_classic.py b/networkx/generators/tests/test_classic.py
index 9cb0866..77090e7 100644
--- a/networkx/generators/tests/test_classic.py
+++ b/networkx/generators/tests/test_classic.py
@@ -6,6 +6,7 @@ Generators - Classic
 Unit tests for various classic graph generators in generators/classic.py
 """
 import itertools
+import typing
 
 import pytest
 
@@ -288,6 +289,15 @@ class TestGeneratorClassic:
         assert not H.is_directed()
         assert G is not H
 
+        # test for subclasses that also use typing.Protocol. See gh-6243
+        class Mixin(typing.Protocol):
+            pass
+
+        class MyGraph(Mixin, nx.DiGraph):
+            pass
+
+        G = nx.empty_graph(create_using=MyGraph)
+
     def test_empty_graph(self):
         G = nx.empty_graph()
         assert nx.number_of_nodes(G) == 0
@@ -543,7 +553,7 @@ class TestGeneratorClassic:
                 assert v not in G[u]
                 assert G.nodes[u] == G.nodes[v]
         # Across blocks, all vertices should be adjacent.
-        for (block1, block2) in itertools.combinations(blocks, 2):
+        for block1, block2 in itertools.combinations(blocks, 2):
             for u, v in itertools.product(block1, block2):
                 assert v in G[u]
                 assert G.nodes[u] != G.nodes[v]
diff --git a/networkx/generators/tests/test_community.py b/networkx/generators/tests/test_community.py
index 9f4a2e4..24af7fd 100644
--- a/networkx/generators/tests/test_community.py
+++ b/networkx/generators/tests/test_community.py
@@ -110,6 +110,10 @@ def test_caveman_graph():
     G = nx.caveman_graph(4, 3)
     assert len(G) == 12
 
+    G = nx.caveman_graph(5, 1)
+    E5 = nx.empty_graph(5)
+    assert nx.is_isomorphic(G, E5)
+
     G = nx.caveman_graph(1, 5)
     K5 = nx.complete_graph(5)
     assert nx.is_isomorphic(G, K5)
@@ -133,6 +137,9 @@ def test_gaussian_random_partition_graph():
     pytest.raises(
         nx.NetworkXError, nx.gaussian_random_partition_graph, 100, 101, 10, 1, 0
     )
+    # Test when clusters are likely less than 1
+    G = nx.gaussian_random_partition_graph(10, 0.5, 0.5, 0.5, 0.5, seed=1)
+    assert len(G) == 10
 
 
 def test_ring_of_cliques():
@@ -146,8 +153,14 @@ def test_ring_of_cliques():
                 # the edge that already exists cannot be duplicated
                 expected_num_edges = i * (((j * (j - 1)) // 2) + 1) - 1
             assert G.number_of_edges() == expected_num_edges
-    pytest.raises(nx.NetworkXError, nx.ring_of_cliques, 1, 5)
-    pytest.raises(nx.NetworkXError, nx.ring_of_cliques, 3, 0)
+    with pytest.raises(
+        nx.NetworkXError, match="A ring of cliques must have at least two cliques"
+    ):
+        nx.ring_of_cliques(1, 5)
+    with pytest.raises(
+        nx.NetworkXError, match="The cliques must have at least two nodes"
+    ):
+        nx.ring_of_cliques(3, 0)
 
 
 def test_windmill_graph():
@@ -159,8 +172,14 @@ def test_windmill_graph():
             assert G.degree(0) == G.number_of_nodes() - 1
             for i in range(1, G.number_of_nodes()):
                 assert G.degree(i) == k - 1
-    pytest.raises(nx.NetworkXError, nx.ring_of_cliques, 1, 3)
-    pytest.raises(nx.NetworkXError, nx.ring_of_cliques, 15, 0)
+    with pytest.raises(
+        nx.NetworkXError, match="A windmill graph must have at least two cliques"
+    ):
+        nx.windmill_graph(1, 3)
+    with pytest.raises(
+        nx.NetworkXError, match="The cliques must have at least two nodes"
+    ):
+        nx.windmill_graph(3, 0)
 
 
 def test_stochastic_block_model():
@@ -215,7 +234,7 @@ def test_generator():
 
 
 def test_invalid_tau1():
-    with pytest.raises(nx.NetworkXError):
+    with pytest.raises(nx.NetworkXError, match="tau2 must be greater than one"):
         n = 100
         tau1 = 2
         tau2 = 1
@@ -224,7 +243,7 @@ def test_invalid_tau1():
 
 
 def test_invalid_tau2():
-    with pytest.raises(nx.NetworkXError):
+    with pytest.raises(nx.NetworkXError, match="tau1 must be greater than one"):
         n = 100
         tau1 = 1
         tau2 = 2
@@ -233,7 +252,7 @@ def test_invalid_tau2():
 
 
 def test_mu_too_large():
-    with pytest.raises(nx.NetworkXError):
+    with pytest.raises(nx.NetworkXError, match="mu must be in the interval \\[0, 1\\]"):
         n = 100
         tau1 = 2
         tau2 = 2
@@ -242,7 +261,7 @@ def test_mu_too_large():
 
 
 def test_mu_too_small():
-    with pytest.raises(nx.NetworkXError):
+    with pytest.raises(nx.NetworkXError, match="mu must be in the interval \\[0, 1\\]"):
         n = 100
         tau1 = 2
         tau2 = 2
@@ -251,18 +270,93 @@ def test_mu_too_small():
 
 
 def test_both_degrees_none():
-    with pytest.raises(nx.NetworkXError):
+    with pytest.raises(
+        nx.NetworkXError,
+        match="Must assign exactly one of min_degree and average_degree",
+    ):
         n = 100
         tau1 = 2
         tau2 = 2
-        mu = -1
+        mu = 1
         nx.LFR_benchmark_graph(n, tau1, tau2, mu)
 
 
 def test_neither_degrees_none():
-    with pytest.raises(nx.NetworkXError):
+    with pytest.raises(
+        nx.NetworkXError,
+        match="Must assign exactly one of min_degree and average_degree",
+    ):
         n = 100
         tau1 = 2
         tau2 = 2
-        mu = -1
+        mu = 1
         nx.LFR_benchmark_graph(n, tau1, tau2, mu, min_degree=2, average_degree=5)
+
+
+def test_max_iters_exeded():
+    with pytest.raises(
+        nx.ExceededMaxIterations,
+        match="Could not assign communities; try increasing min_community",
+    ):
+        n = 10
+        tau1 = 2
+        tau2 = 2
+        mu = 0.1
+        nx.LFR_benchmark_graph(n, tau1, tau2, mu, min_degree=2, max_iters=10, seed=1)
+
+
+def test_max_deg_out_of_range():
+    with pytest.raises(
+        nx.NetworkXError, match="max_degree must be in the interval \\(0, n\\]"
+    ):
+        n = 10
+        tau1 = 2
+        tau2 = 2
+        mu = 0.1
+        nx.LFR_benchmark_graph(
+            n, tau1, tau2, mu, max_degree=n + 1, max_iters=10, seed=1
+        )
+
+
+def test_max_community():
+    n = 250
+    tau1 = 3
+    tau2 = 1.5
+    mu = 0.1
+    G = nx.LFR_benchmark_graph(
+        n,
+        tau1,
+        tau2,
+        mu,
+        average_degree=5,
+        max_degree=100,
+        min_community=50,
+        max_community=200,
+        seed=10,
+    )
+    assert len(G) == 250
+    C = {frozenset(G.nodes[v]["community"]) for v in G}
+    assert nx.community.is_partition(G.nodes(), C)
+
+
+def test_powerlaw_iterations_exceeded():
+    with pytest.raises(
+        nx.ExceededMaxIterations, match="Could not create power law sequence"
+    ):
+        n = 100
+        tau1 = 2
+        tau2 = 2
+        mu = 1
+        nx.LFR_benchmark_graph(n, tau1, tau2, mu, min_degree=2, max_iters=0)
+
+
+def test_no_scipy_zeta():
+    zeta2 = 1.6449340668482264
+    assert abs(zeta2 - nx.generators.community._hurwitz_zeta(2, 1, 0.0001)) < 0.01
+
+
+def test_generate_min_degree_itr():
+    with pytest.raises(
+        nx.ExceededMaxIterations, match="Could not match average_degree"
+    ):
+        nx.generators.community._generate_min_degree(2, 2, 1, 0.01, 0)
diff --git a/networkx/generators/tests/test_degree_seq.py b/networkx/generators/tests/test_degree_seq.py
index 70a63b6..39ed59a 100644
--- a/networkx/generators/tests/test_degree_seq.py
+++ b/networkx/generators/tests/test_degree_seq.py
@@ -87,19 +87,19 @@ class TestConfigurationModel:
             nx.configuration_model([1, 2])
 
 
-def test_directed_configuation_raise_unequal():
+def test_directed_configuration_raise_unequal():
     with pytest.raises(nx.NetworkXError):
         zin = [5, 3, 3, 3, 3, 2, 2, 2, 1, 1]
         zout = [5, 3, 3, 3, 3, 2, 2, 2, 1, 2]
         nx.directed_configuration_model(zin, zout)
 
 
-def test_directed_configuation_model():
+def test_directed_configuration_model():
     G = nx.directed_configuration_model([], [], seed=0)
     assert len(G) == 0
 
 
-def test_simple_directed_configuation_model():
+def test_simple_directed_configuration_model():
     G = nx.directed_configuration_model([1, 1], [1, 1], seed=0)
     assert len(G) == 2
 
@@ -169,11 +169,11 @@ def test_directed_havel_hakimi():
     p = 1.0 / r
     for i in range(r):
         G1 = nx.erdos_renyi_graph(n, p * (i + 1), None, True)
-        din1 = list(d for n, d in G1.in_degree())
-        dout1 = list(d for n, d in G1.out_degree())
+        din1 = [d for n, d in G1.in_degree()]
+        dout1 = [d for n, d in G1.out_degree()]
         G2 = nx.directed_havel_hakimi_graph(din1, dout1)
-        din2 = list(d for n, d in G2.in_degree())
-        dout2 = list(d for n, d in G2.out_degree())
+        din2 = [d for n, d in G2.in_degree()]
+        dout2 = [d for n, d in G2.out_degree()]
         assert sorted(din1) == sorted(din2)
         assert sorted(dout1) == sorted(dout2)
 
diff --git a/networkx/generators/tests/test_directed.py b/networkx/generators/tests/test_directed.py
index 157c9ab..2333cd1 100644
--- a/networkx/generators/tests/test_directed.py
+++ b/networkx/generators/tests/test_directed.py
@@ -57,6 +57,41 @@ class TestGeneratorsDirected:
         pytest.raises(ValueError, scale_free_graph, 100, beta=-0.3)
         pytest.raises(ValueError, scale_free_graph, 100, gamma=-0.3)
 
+    def test_parameters(self):
+        G = nx.DiGraph()
+        G.add_node(0)
+
+        def kernel(x):
+            return x
+
+        assert nx.is_isomorphic(gn_graph(1), G)
+        assert nx.is_isomorphic(gn_graph(1, kernel=kernel), G)
+        assert nx.is_isomorphic(gnc_graph(1), G)
+        assert nx.is_isomorphic(gnr_graph(1, 0.5), G)
+
+
+def test_scale_free_graph_create_using_with_initial_graph():
+    G = nx.MultiGraph()
+    with pytest.raises(
+        ValueError,
+        match="Cannot set both create_using and initial_graph. Set create_using=None.",
+    ):
+        scale_free_graph(10, create_using=nx.Graph, initial_graph=G)
+
+
+def test_scale_free_graph_negative_delta():
+    with pytest.raises(ValueError, match="delta_in must be >= 0."):
+        scale_free_graph(10, create_using=None, delta_in=-1)
+    with pytest.raises(ValueError, match="delta_out must be >= 0."):
+        scale_free_graph(10, create_using=None, delta_out=-1)
+
+
+def test_non_numeric_ordering():
+    G = MultiDiGraph([("a", "b"), ("b", "c"), ("c", "a")])
+    s = scale_free_graph(3, initial_graph=G)
+    assert len(s) == 3
+    assert len(s.edges) == 3
+
 
 @pytest.mark.parametrize("ig", (nx.Graph(), nx.DiGraph([(0, 1)])))
 def test_scale_free_graph_initial_graph_kwarg(ig):
@@ -88,6 +123,10 @@ class TestRandomKOutGraph:
         G = random_k_out_graph(n, k, alpha, self_loops=False)
         assert nx.number_of_selfloops(G) == 0
 
+    def test_negative_alpha(self):
+        with pytest.raises(ValueError, match="alpha must be positive"):
+            random_k_out_graph(10, 3, -1)
+
 
 class TestUniformRandomKOutGraph:
     """Unit tests for the
@@ -119,6 +158,11 @@ class TestUniformRandomKOutGraph:
         G = random_uniform_k_out_graph(n, k, with_replacement=True)
         assert G.is_multigraph()
         assert all(d == k for v, d in G.out_degree())
+        n = 10
+        k = 9
+        G = random_uniform_k_out_graph(n, k, with_replacement=False, self_loops=False)
+        assert nx.number_of_selfloops(G) == 0
+        assert all(d == k for v, d in G.out_degree())
 
     def test_without_replacement(self):
         n = 10
diff --git a/networkx/generators/tests/test_expanders.py b/networkx/generators/tests/test_expanders.py
index f84b04a..a2b4bae 100644
--- a/networkx/generators/tests/test_expanders.py
+++ b/networkx/generators/tests/test_expanders.py
@@ -60,7 +60,7 @@ def test_paley_graph(p):
     # If p = 1 mod 4, -1 is a square mod 4 and therefore the
     # edge in the Paley graph are symmetric.
     if p % 4 == 1:
-        for (u, v) in G.edges:
+        for u, v in G.edges:
             assert (v, u) in G.edges
 
 
diff --git a/networkx/generators/tests/test_geometric.py b/networkx/generators/tests/test_geometric.py
index 58490bf..28b1b54 100644
--- a/networkx/generators/tests/test_geometric.py
+++ b/networkx/generators/tests/test_geometric.py
@@ -101,6 +101,7 @@ class TestSoftRandomGeometricGraph:
         generator.
 
         """
+
         # Use the L1 metric.
         def dist(x, y):
             return sum(abs(a - b) for a, b in zip(x, y))
@@ -137,7 +138,7 @@ class TestSoftRandomGeometricGraph:
         assert len(SRGG.edges()) <= len(RGG.edges())
 
     def test_p_dist_zero(self):
-        """Tests if p_dict = 0 returns disconencted graph with 0 edges"""
+        """Tests if p_dict = 0 returns disconnected graph with 0 edges"""
 
         def p_dist(dist):
             return 0
@@ -208,7 +209,7 @@ class TestGeographicalThresholdGraph:
                 assert not join(G, u, v, 10, -2, l1dist)
 
     def test_p_dist_zero(self):
-        """Tests if p_dict = 0 returns disconencted graph with 0 edges"""
+        """Tests if p_dict = 0 returns disconnected graph with 0 edges"""
 
         def p_dist(dist):
             return 0
@@ -287,6 +288,7 @@ class TestThresholdedRandomGeometricGraph:
         generator.
 
         """
+
         # Use the L1 metric.
         def dist(x, y):
             return sum(abs(a - b) for a, b in zip(x, y))
diff --git a/networkx/generators/tests/test_harary_graph.py b/networkx/generators/tests/test_harary_graph.py
index 82647e3..a0bcce5 100644
--- a/networkx/generators/tests/test_harary_graph.py
+++ b/networkx/generators/tests/test_harary_graph.py
@@ -16,7 +16,7 @@ class TestHararyGraph:
     def test_hnm_harary_graph(self):
         # When d is even and r = 0, the hnm_harary_graph(n,m) is
         # the circulant_graph(n, list(range(1,d/2+1)))
-        for (n, m) in [(5, 5), (6, 12), (7, 14)]:
+        for n, m in [(5, 5), (6, 12), (7, 14)]:
             G1 = hnm_harary_graph(n, m)
             d = 2 * m // n
             G2 = nx.circulant_graph(n, list(range(1, d // 2 + 1)))
@@ -25,7 +25,7 @@ class TestHararyGraph:
         # When d is even and r > 0, the hnm_harary_graph(n,m) is
         # the circulant_graph(n, list(range(1,d/2+1)))
         # with r edges added arbitrarily
-        for (n, m) in [(5, 7), (6, 13), (7, 16)]:
+        for n, m in [(5, 7), (6, 13), (7, 16)]:
             G1 = hnm_harary_graph(n, m)
             d = 2 * m // n
             G2 = nx.circulant_graph(n, list(range(1, d // 2 + 1)))
@@ -34,7 +34,7 @@ class TestHararyGraph:
 
         # When d is odd and n is even and r = 0, the hnm_harary_graph(n,m)
         # is the circulant_graph(n, list(range(1,(d+1)/2) plus [n//2])
-        for (n, m) in [(6, 9), (8, 12), (10, 15)]:
+        for n, m in [(6, 9), (8, 12), (10, 15)]:
             G1 = hnm_harary_graph(n, m)
             d = 2 * m // n
             L = list(range(1, (d + 1) // 2))
@@ -45,7 +45,7 @@ class TestHararyGraph:
         # When d is odd and n is even and r > 0, the hnm_harary_graph(n,m)
         # is the circulant_graph(n, list(range(1,(d+1)/2) plus [n//2])
         # with r edges added arbitrarily
-        for (n, m) in [(6, 10), (8, 13), (10, 17)]:
+        for n, m in [(6, 10), (8, 13), (10, 17)]:
             G1 = hnm_harary_graph(n, m)
             d = 2 * m // n
             L = list(range(1, (d + 1) // 2))
@@ -57,7 +57,7 @@ class TestHararyGraph:
         # When d is odd and n is odd, the hnm_harary_graph(n,m) is
         # the circulant_graph(n, list(range(1,(d+1)/2))
         # with m - n*(d-1)/2 edges added arbitrarily
-        for (n, m) in [(5, 4), (7, 12), (9, 14)]:
+        for n, m in [(5, 4), (7, 12), (9, 14)]:
             G1 = hnm_harary_graph(n, m)
             d = 2 * m // n
             L = list(range(1, (d + 1) // 2))
@@ -87,21 +87,21 @@ class TestHararyGraph:
     def test_hkn_harary_graph(self):
         # When k == 1, the hkn_harary_graph(k,n) is
         # the path_graph(n)
-        for (k, n) in [(1, 6), (1, 7)]:
+        for k, n in [(1, 6), (1, 7)]:
             G1 = hkn_harary_graph(k, n)
             G2 = nx.path_graph(n)
             assert is_isomorphic(G1, G2)
 
         # When k is even, the hkn_harary_graph(k,n) is
         # the circulant_graph(n, list(range(1,k/2+1)))
-        for (k, n) in [(2, 6), (2, 7), (4, 6), (4, 7)]:
+        for k, n in [(2, 6), (2, 7), (4, 6), (4, 7)]:
             G1 = hkn_harary_graph(k, n)
             G2 = nx.circulant_graph(n, list(range(1, k // 2 + 1)))
             assert is_isomorphic(G1, G2)
 
         # When k is odd and n is even, the hkn_harary_graph(k,n) is
         # the circulant_graph(n, list(range(1,(k+1)/2)) plus [n/2])
-        for (k, n) in [(3, 6), (5, 8), (7, 10)]:
+        for k, n in [(3, 6), (5, 8), (7, 10)]:
             G1 = hkn_harary_graph(k, n)
             L = list(range(1, (k + 1) // 2))
             L.append(n // 2)
@@ -111,7 +111,7 @@ class TestHararyGraph:
         # When k is odd and n is odd, the hkn_harary_graph(k,n) is
         # the circulant_graph(n, list(range(1,(k+1)/2))) with
         # n//2+1 edges added between node i and node i+n//2+1
-        for (k, n) in [(3, 5), (5, 9), (7, 11)]:
+        for k, n in [(3, 5), (5, 9), (7, 11)]:
             G1 = hkn_harary_graph(k, n)
             G2 = nx.circulant_graph(n, list(range(1, (k + 1) // 2)))
             eSet1 = set(G1.edges)
diff --git a/networkx/generators/tests/test_internet_as_graphs.py b/networkx/generators/tests/test_internet_as_graphs.py
index fdfbf05..0d578b4 100644
--- a/networkx/generators/tests/test_internet_as_graphs.py
+++ b/networkx/generators/tests/test_internet_as_graphs.py
@@ -27,10 +27,7 @@ class TestInternetASTopology:
             elif cls.G.nodes[i]["type"] == "CP":
                 cls.CP.append(i)
             else:
-                raise ValueError(
-                    "Inconsistent data in the graph\
-                        node attributes"
-                )
+                raise ValueError("Inconsistent data in the graph node attributes")
             cls.set_customers(i)
             cls.set_providers(i)
 
@@ -48,8 +45,7 @@ class TestInternetASTopology:
                         cls.customers[i].add(j)
                     elif i != customer:
                         raise ValueError(
-                            "Inconsistent data in the graph\
-                                edge attributes"
+                            "Inconsistent data in the graph edge attributes"
                         )
 
     @classmethod
@@ -66,8 +62,7 @@ class TestInternetASTopology:
                         cls.providers[i].add(j)
                     elif j != customer:
                         raise ValueError(
-                            "Inconsistent data in the graph\
-                                edge attributes"
+                            "Inconsistent data in the graph edge attributes"
                         )
 
     def test_wrong_input(self):
@@ -136,10 +131,7 @@ class TestInternetASTopology:
                 elif j == cust:
                     prov = i
                 else:
-                    raise ValueError(
-                        "Inconsistent data in the graph edge\
-                            attributes"
-                    )
+                    raise ValueError("Inconsistent data in the graph edge attributes")
                 if cust in self.M:
                     d_m += 1
                     if self.G.nodes[prov]["type"] == "T":
@@ -153,10 +145,7 @@ class TestInternetASTopology:
                     if self.G.nodes[prov]["type"] == "T":
                         t_cp += 1
                 else:
-                    raise ValueError(
-                        "Inconsistent data in the graph edge\
-                            attributes"
-                    )
+                    raise ValueError("Inconsistent data in the graph edge attributes")
             elif e["type"] == "peer":
                 if self.G.nodes[i]["type"] == "M" and self.G.nodes[j]["type"] == "M":
                     p_m_m += 1
@@ -170,10 +159,7 @@ class TestInternetASTopology:
                 ):
                     p_cp_m += 1
             else:
-                raise ValueError(
-                    "Unexpected data in the graph edge\
-                        attributes"
-                )
+                raise ValueError("Unexpected data in the graph edge attributes")
 
         assert d_m / len(self.M) == approx((2 + (2.5 * self.n) / 10000), abs=1e-0)
         assert d_cp / len(self.CP) == approx((2 + (1.5 * self.n) / 10000), abs=1e-0)
diff --git a/networkx/generators/tests/test_joint_degree_seq.py b/networkx/generators/tests/test_joint_degree_seq.py
index aa581c5..1bc0df5 100644
--- a/networkx/generators/tests/test_joint_degree_seq.py
+++ b/networkx/generators/tests/test_joint_degree_seq.py
@@ -80,7 +80,6 @@ def test_joint_degree_graph(ntimes=10):
 
 
 def test_is_valid_directed_joint_degree():
-
     in_degrees = [0, 1, 1, 2]
     out_degrees = [1, 1, 1, 1]
     nkk = {1: {1: 2, 2: 2}}
@@ -99,18 +98,17 @@ def test_is_valid_directed_joint_degree():
     nkk = {1: {1: 2, 2: 2}}
     assert not is_valid_directed_joint_degree(in_degrees, out_degrees, nkk)
 
-    # not realizable, degree seqeunces have fewer than required nodes.
+    # not realizable, degree sequences have fewer than required nodes.
     in_degrees = [0, 1, 2]
     assert not is_valid_directed_joint_degree(in_degrees, out_degrees, nkk)
 
 
 def test_directed_joint_degree_graph(n=15, m=100, ntimes=1000):
     for _ in range(ntimes):
-
         # generate gnm random graph and calculate its joint degree.
         g = gnm_random_graph(n, m, None, directed=True)
 
-        # in-degree seqeunce of g as a list of integers.
+        # in-degree sequence of g as a list of integers.
         in_degrees = list(dict(g.in_degree()).values())
         # out-degree sequence of g as a list of integers.
         out_degrees = list(dict(g.out_degree()).values())
diff --git a/networkx/generators/tests/test_lattice.py b/networkx/generators/tests/test_lattice.py
index bb99f62..5012324 100644
--- a/networkx/generators/tests/test_lattice.py
+++ b/networkx/generators/tests/test_lattice.py
@@ -159,7 +159,7 @@ class TestTriangularLatticeGraph:
             G = nx.triangular_lattice_graph(m, n)
             N = (n + 1) // 2
             assert len(G) == (m + 1) * (1 + N) - (n % 2) * ((m + 1) // 2)
-        for (i, j) in G.nodes():
+        for i, j in G.nodes():
             nbrs = G[(i, j)]
             if i < N:
                 assert (i + 1, j) in nbrs
diff --git a/networkx/generators/tests/test_line.py b/networkx/generators/tests/test_line.py
index 96380ac..7f5454e 100644
--- a/networkx/generators/tests/test_line.py
+++ b/networkx/generators/tests/test_line.py
@@ -1,7 +1,7 @@
 import pytest
 
 import networkx as nx
-import networkx.generators.line as line
+from networkx.generators import line
 from networkx.utils import edges_equal
 
 
@@ -167,55 +167,62 @@ class TestGeneratorInverseLine:
         solution = nx.path_graph(2)
         assert nx.is_isomorphic(H, solution)
 
-    def test_claw(self):
-        # This is the simplest non-line graph
-        G = nx.Graph()
-        G_edges = [[0, 1], [0, 2], [0, 3]]
-        G.add_edges_from(G_edges)
+    def test_edgeless_graph(self):
+        G = nx.empty_graph(5)
+        with pytest.raises(nx.NetworkXError, match="edgeless graph"):
+            nx.inverse_line_graph(G)
+
+    def test_selfloops_error(self):
+        G = nx.cycle_graph(4)
+        G.add_edge(0, 0)
         pytest.raises(nx.NetworkXError, nx.inverse_line_graph, G)
 
-    def test_non_line_graph(self):
-        # These are other non-line graphs
+    def test_non_line_graphs(self):
+        # Tests several known non-line graphs for impossibility
+        # Adapted from L.W.Beineke, "Characterizations of derived graphs"
+
+        # claw graph
+        claw = nx.star_graph(3)
+        pytest.raises(nx.NetworkXError, nx.inverse_line_graph, claw)
 
         # wheel graph with 6 nodes
-        G = nx.Graph()
-        G_edges = [
-            [0, 1],
-            [0, 2],
-            [0, 3],
-            [0, 4],
-            [0, 5],
-            [1, 2],
-            [2, 3],
-            [3, 4],
-            [4, 5],
-            [5, 1],
-        ]
-        G.add_edges_from(G_edges)
+        wheel = nx.wheel_graph(6)
+        pytest.raises(nx.NetworkXError, nx.inverse_line_graph, wheel)
+
+        # K5 with one edge remove
+        K5m = nx.complete_graph(5)
+        K5m.remove_edge(0, 1)
+        pytest.raises(nx.NetworkXError, nx.inverse_line_graph, K5m)
+
+        # graph without any odd triangles (contains claw as induced subgraph)
+        G = nx.compose(nx.path_graph(2), nx.complete_bipartite_graph(2, 3))
         pytest.raises(nx.NetworkXError, nx.inverse_line_graph, G)
 
-        #   3---4---5
-        #  / \ / \ /
-        # 0---1---2
-        G = nx.Graph()
-        G_edges = [
-            [0, 1],
-            [1, 2],
-            [3, 4],
-            [4, 5],
-            [0, 3],
-            [1, 3],
-            [1, 4],
-            [2, 4],
-            [2, 5],
-        ]
-        G.add_edges_from(G_edges)
+        ## Variations on a diamond graph
+
+        # Diamond + 2 edges (+ "roof")
+        G = nx.diamond_graph()
+        G.add_edges_from([(4, 0), (5, 3)])
+        pytest.raises(nx.NetworkXError, nx.inverse_line_graph, G)
+        G.add_edge(4, 5)
         pytest.raises(nx.NetworkXError, nx.inverse_line_graph, G)
 
-        # K_5 minus an edge
-        K5me = nx.complete_graph(5)
-        K5me.remove_edge(0, 1)
-        pytest.raises(nx.NetworkXError, nx.inverse_line_graph, K5me)
+        # Diamond + 2 connected edges
+        G = nx.diamond_graph()
+        G.add_edges_from([(4, 0), (4, 3)])
+        pytest.raises(nx.NetworkXError, nx.inverse_line_graph, G)
+
+        # Diamond + K3 + one edge (+ 2*K3)
+        G = nx.diamond_graph()
+        G.add_edges_from([(4, 0), (4, 1), (4, 2), (5, 3)])
+        pytest.raises(nx.NetworkXError, nx.inverse_line_graph, G)
+        G.add_edges_from([(5, 1), (5, 2)])
+        pytest.raises(nx.NetworkXError, nx.inverse_line_graph, G)
+
+        # 4 triangles
+        G = nx.diamond_graph()
+        G.add_edges_from([(4, 0), (4, 1), (5, 2), (5, 3)])
+        pytest.raises(nx.NetworkXError, nx.inverse_line_graph, G)
 
     def test_wrong_graph_type(self):
         G = nx.DiGraph()
@@ -275,3 +282,28 @@ class TestGeneratorInverseLine:
         H = nx.line_graph(G)
         J = nx.inverse_line_graph(H)
         assert nx.is_isomorphic(G, J)
+
+
+class TestGeneratorPrivateFunctions:
+    def test_triangles_error(self):
+        G = nx.diamond_graph()
+        pytest.raises(nx.NetworkXError, line._triangles, G, (4, 0))
+        pytest.raises(nx.NetworkXError, line._triangles, G, (0, 3))
+
+    def test_odd_triangles_error(self):
+        G = nx.diamond_graph()
+        pytest.raises(nx.NetworkXError, line._odd_triangle, G, (0, 1, 4))
+        pytest.raises(nx.NetworkXError, line._odd_triangle, G, (0, 1, 3))
+
+    def test_select_starting_cell_error(self):
+        G = nx.diamond_graph()
+        pytest.raises(nx.NetworkXError, line._select_starting_cell, G, (4, 0))
+        pytest.raises(nx.NetworkXError, line._select_starting_cell, G, (0, 3))
+
+    def test_diamond_graph(self):
+        G = nx.diamond_graph()
+        for edge in G.edges:
+            cell = line._select_starting_cell(G, starting_edge=edge)
+            # Starting cell should always be one of the two triangles
+            assert len(cell) == 3
+            assert all(v in G[u] for u in cell for v in cell if u != v)
diff --git a/networkx/generators/tests/test_random_graphs.py b/networkx/generators/tests/test_random_graphs.py
index c614ded..4edadec 100644
--- a/networkx/generators/tests/test_random_graphs.py
+++ b/networkx/generators/tests/test_random_graphs.py
@@ -65,7 +65,7 @@ def test_gnp_generators_edge_probability(generator, p, directed):
     edge_counts = [[0] * n for _ in range(n)]
     for i in range(runs):
         G = generator(n, p, directed=directed)
-        for (v, w) in G.edges:
+        for v, w in G.edges:
             edge_counts[v][w] += 1
             if not directed:
                 edge_counts[w][v] += 1
@@ -207,7 +207,6 @@ class TestGeneratorsRandom:
         initial_graph = nx.complete_graph(10)
 
         for seed in seeds:
-
             # This should be BA with m = m1
             BA1 = nx.barabasi_albert_graph(100, m1, seed)
             DBA1 = nx.dual_barabasi_albert_graph(100, m1, m2, 1, seed)
diff --git a/networkx/generators/tests/test_small.py b/networkx/generators/tests/test_small.py
index 5f5406f..836cbce 100644
--- a/networkx/generators/tests/test_small.py
+++ b/networkx/generators/tests/test_small.py
@@ -15,15 +15,6 @@ null = nx.null_graph()
 
 
 class TestGeneratorsSmall:
-    def test_make_small_graph(self):
-        d = ["adjacencylist", "Bull Graph", 5, [[2, 3], [1, 3, 4], [1, 2, 5], [2], [3]]]
-        G = nx.make_small_graph(d)
-        assert is_isomorphic(G, nx.bull_graph())
-
-        # Test small graph creation error with wrong ltype
-        d[0] = "erroneouslist"
-        pytest.raises(nx.NetworkXError, nx.make_small_graph, graph_description=d)
-
     def test__LCF_graph(self):
         # If n<=0, then return the null_graph
         G = nx.LCF_graph(-10, [1, 2], 100)
@@ -54,21 +45,21 @@ class TestGeneratorsSmall:
         G = nx.chvatal_graph()
         assert sorted(G) == list(range(12))
         assert G.number_of_edges() == 24
-        assert list(d for n, d in G.degree()) == 12 * [4]
+        assert [d for n, d in G.degree()] == 12 * [4]
         assert nx.diameter(G) == 2
         assert nx.radius(G) == 2
 
         G = nx.cubical_graph()
         assert sorted(G) == list(range(8))
         assert G.number_of_edges() == 12
-        assert list(d for n, d in G.degree()) == 8 * [3]
+        assert [d for n, d in G.degree()] == 8 * [3]
         assert nx.diameter(G) == 3
         assert nx.radius(G) == 3
 
         G = nx.desargues_graph()
         assert sorted(G) == list(range(20))
         assert G.number_of_edges() == 30
-        assert list(d for n, d in G.degree()) == 20 * [3]
+        assert [d for n, d in G.degree()] == 20 * [3]
 
         G = nx.diamond_graph()
         assert sorted(G) == list(range(4))
@@ -79,28 +70,28 @@ class TestGeneratorsSmall:
         G = nx.dodecahedral_graph()
         assert sorted(G) == list(range(20))
         assert G.number_of_edges() == 30
-        assert list(d for n, d in G.degree()) == 20 * [3]
+        assert [d for n, d in G.degree()] == 20 * [3]
         assert nx.diameter(G) == 5
         assert nx.radius(G) == 5
 
         G = nx.frucht_graph()
         assert sorted(G) == list(range(12))
         assert G.number_of_edges() == 18
-        assert list(d for n, d in G.degree()) == 12 * [3]
+        assert [d for n, d in G.degree()] == 12 * [3]
         assert nx.diameter(G) == 4
         assert nx.radius(G) == 3
 
         G = nx.heawood_graph()
         assert sorted(G) == list(range(14))
         assert G.number_of_edges() == 21
-        assert list(d for n, d in G.degree()) == 14 * [3]
+        assert [d for n, d in G.degree()] == 14 * [3]
         assert nx.diameter(G) == 3
         assert nx.radius(G) == 3
 
         G = nx.hoffman_singleton_graph()
         assert sorted(G) == list(range(50))
         assert G.number_of_edges() == 175
-        assert list(d for n, d in G.degree()) == 50 * [7]
+        assert [d for n, d in G.degree()] == 50 * [7]
         assert nx.diameter(G) == 2
         assert nx.radius(G) == 2
 
@@ -121,7 +112,7 @@ class TestGeneratorsSmall:
         G = nx.icosahedral_graph()
         assert sorted(G) == list(range(12))
         assert G.number_of_edges() == 30
-        assert list(d for n, d in G.degree()) == [5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5]
+        assert [d for n, d in G.degree()] == [5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5]
         assert nx.diameter(G) == 3
         assert nx.radius(G) == 3
 
@@ -133,26 +124,26 @@ class TestGeneratorsSmall:
         G = nx.moebius_kantor_graph()
         assert sorted(G) == list(range(16))
         assert G.number_of_edges() == 24
-        assert list(d for n, d in G.degree()) == 16 * [3]
+        assert [d for n, d in G.degree()] == 16 * [3]
         assert nx.diameter(G) == 4
 
         G = nx.octahedral_graph()
         assert sorted(G) == list(range(6))
         assert G.number_of_edges() == 12
-        assert list(d for n, d in G.degree()) == 6 * [4]
+        assert [d for n, d in G.degree()] == 6 * [4]
         assert nx.diameter(G) == 2
         assert nx.radius(G) == 2
 
         G = nx.pappus_graph()
         assert sorted(G) == list(range(18))
         assert G.number_of_edges() == 27
-        assert list(d for n, d in G.degree()) == 18 * [3]
+        assert [d for n, d in G.degree()] == 18 * [3]
         assert nx.diameter(G) == 4
 
         G = nx.petersen_graph()
         assert sorted(G) == list(range(10))
         assert G.number_of_edges() == 15
-        assert list(d for n, d in G.degree()) == 10 * [3]
+        assert [d for n, d in G.degree()] == 10 * [3]
         assert nx.diameter(G) == 2
         assert nx.radius(G) == 2
 
@@ -164,24 +155,24 @@ class TestGeneratorsSmall:
         G = nx.tetrahedral_graph()
         assert sorted(G) == list(range(4))
         assert G.number_of_edges() == 6
-        assert list(d for n, d in G.degree()) == [3, 3, 3, 3]
+        assert [d for n, d in G.degree()] == [3, 3, 3, 3]
         assert nx.diameter(G) == 1
         assert nx.radius(G) == 1
 
         G = nx.truncated_cube_graph()
         assert sorted(G) == list(range(24))
         assert G.number_of_edges() == 36
-        assert list(d for n, d in G.degree()) == 24 * [3]
+        assert [d for n, d in G.degree()] == 24 * [3]
 
         G = nx.truncated_tetrahedron_graph()
         assert sorted(G) == list(range(12))
         assert G.number_of_edges() == 18
-        assert list(d for n, d in G.degree()) == 12 * [3]
+        assert [d for n, d in G.degree()] == 12 * [3]
 
         G = nx.tutte_graph()
         assert sorted(G) == list(range(46))
         assert G.number_of_edges() == 69
-        assert list(d for n, d in G.degree()) == 46 * [3]
+        assert [d for n, d in G.degree()] == 46 * [3]
 
         # Test create_using with directed or multigraphs on small graphs
         pytest.raises(nx.NetworkXError, nx.tutte_graph, create_using=nx.DiGraph)
diff --git a/networkx/generators/tests/test_stochastic.py b/networkx/generators/tests/test_stochastic.py
index d75a6a0..09be4c1 100644
--- a/networkx/generators/tests/test_stochastic.py
+++ b/networkx/generators/tests/test_stochastic.py
@@ -43,7 +43,7 @@ class TestStochasticGraph:
         G = nx.MultiDiGraph()
         G.add_edges_from([(0, 1), (0, 1), (0, 2), (0, 2)])
         S = nx.stochastic_graph(G)
-        d = dict(weight=0.25)
+        d = {"weight": 0.25}
         assert sorted(S.edges(data=True)) == [
             (0, 1, d),
             (0, 1, d),
@@ -51,6 +51,17 @@ class TestStochasticGraph:
             (0, 2, d),
         ]
 
+    def test_zero_weights(self):
+        """Smoke test: ensure ZeroDivisionError is not raised."""
+        G = nx.DiGraph()
+        G.add_edge(0, 1, weight=0)
+        G.add_edge(0, 2, weight=0)
+        S = nx.stochastic_graph(G)
+        assert sorted(S.edges(data=True)) == [
+            (0, 1, {"weight": 0}),
+            (0, 2, {"weight": 0}),
+        ]
+
     def test_graph_disallowed(self):
         with pytest.raises(nx.NetworkXNotImplemented):
             nx.stochastic_graph(nx.Graph())
diff --git a/networkx/generators/trees.py b/networkx/generators/trees.py
index e380826..b512396 100644
--- a/networkx/generators/trees.py
+++ b/networkx/generators/trees.py
@@ -323,29 +323,29 @@ def random_tree(n, seed=None, create_using=None):
     Examples
     --------
     >>> tree = nx.random_tree(n=10, seed=0)
-    >>> print(nx.forest_str(tree, sources=[0]))
+    >>> nx.write_network_text(tree, sources=[0])
     ╙── 0
         ├── 3
         └── 4
             ├── 6
-            │   ├── 1
-            │   ├── 2
-            │   └── 7
-            │       └── 8
-            │           └── 5
+            │   ├── 1
+            │   ├── 2
+            │   └── 7
+            │       └── 8
+            │           └── 5
             └── 9
 
     >>> tree = nx.random_tree(n=10, seed=0, create_using=nx.DiGraph)
-    >>> print(nx.forest_str(tree))
+    >>> nx.write_network_text(tree)
     ╙── 0
         ├─╼ 3
         └─╼ 4
             ├─╼ 6
-            │   ├─╼ 1
-            │   ├─╼ 2
-            │   └─╼ 7
-            │       └─╼ 8
-            │           └─╼ 5
+            │   ├─╼ 1
+            │   ├─╼ 2
+            │   └─╼ 7
+            │       └─╼ 8
+            │           └─╼ 5
             └─╼ 9
     """
     if n == 0:
diff --git a/networkx/lazy_imports.py b/networkx/lazy_imports.py
index 49344db..6201088 100644
--- a/networkx/lazy_imports.py
+++ b/networkx/lazy_imports.py
@@ -93,7 +93,7 @@ class DelayedImportErrorModule(types.ModuleType):
             fd = self.__frame_data
             raise ModuleNotFoundError(
                 f"No module named '{fd['spec']}'\n\n"
-                "This error is lazily reported, having originally occured in\n"
+                "This error is lazily reported, having originally occurred in\n"
                 f'  File {fd["filename"]}, line {fd["lineno"]}, in {fd["function"]}\n\n'
                 f'----> {"".join(fd["code_context"]).strip()}'
             )
diff --git a/networkx/linalg/algebraicconnectivity.py b/networkx/linalg/algebraicconnectivity.py
index b1b9ef6..2e1aca6 100644
--- a/networkx/linalg/algebraicconnectivity.py
+++ b/networkx/linalg/algebraicconnectivity.py
@@ -10,7 +10,12 @@ from networkx.utils import (
     reverse_cuthill_mckee_ordering,
 )
 
-__all__ = ["algebraic_connectivity", "fiedler_vector", "spectral_ordering"]
+__all__ = [
+    "algebraic_connectivity",
+    "fiedler_vector",
+    "spectral_ordering",
+    "spectral_bisection",
+]
 
 
 class _PCGSolver:
@@ -317,7 +322,7 @@ def _get_fiedler_func(method):
 def algebraic_connectivity(
     G, weight="weight", normalized=False, tol=1e-8, method="tracemin_pcg", seed=None
 ):
-    """Returns the algebraic connectivity of an undirected graph.
+    r"""Returns the algebraic connectivity of an undirected graph.
 
     The algebraic connectivity of a connected undirected graph is the second
     smallest eigenvalue of its Laplacian matrix.
@@ -377,6 +382,19 @@ def algebraic_connectivity(
     See Also
     --------
     laplacian_matrix
+
+    Examples
+    --------
+    For undirected graphs algebraic connectivity can tell us if a graph is connected or not
+    `G` is connected iff  ``algebraic_connectivity(G) > 0``:
+
+    >>> G = nx.complete_graph(5)
+    >>> nx.algebraic_connectivity(G) > 0
+    True
+    >>> G.add_node(10)  # G is no longer connected
+    >>> nx.algebraic_connectivity(G) > 0
+    False
+
     """
     if len(G) < 2:
         raise nx.NetworkXError("graph has less than two nodes.")
@@ -572,3 +590,70 @@ def spectral_ordering(
             order.extend(component)
 
     return order
+
+
+def spectral_bisection(
+    G, weight="weight", normalized=False, tol=1e-8, method="tracemin_pcg", seed=None
+):
+    """Bisect the graph using the Fiedler vector.
+
+    This method uses the Fiedler vector to bisect a graph.
+    The partition is defined by the nodes which are associated with
+    either positive or negative values in the vector.
+
+    Parameters
+    ----------
+    G : NetworkX Graph
+
+    weight : str, optional (default: weight)
+        The data key used to determine the weight of each edge. If None, then
+        each edge has unit weight.
+
+    normalized : bool, optional (default: False)
+        Whether the normalized Laplacian matrix is used.
+
+    tol : float, optional (default: 1e-8)
+        Tolerance of relative residual in eigenvalue computation.
+
+    method : string, optional (default: 'tracemin_pcg')
+        Method of eigenvalue computation. It must be one of the tracemin
+        options shown below (TraceMIN), 'lanczos' (Lanczos iteration)
+        or 'lobpcg' (LOBPCG).
+
+        The TraceMIN algorithm uses a linear system solver. The following
+        values allow specifying the solver to be used.
+
+        =============== ========================================
+        Value           Solver
+        =============== ========================================
+        'tracemin_pcg'  Preconditioned conjugate gradient method
+        'tracemin_lu'   LU factorization
+        =============== ========================================
+
+    seed : integer, random_state, or None (default)
+        Indicator of random number generation state.
+        See :ref:`Randomness<randomness>`.
+
+    Returns
+    --------
+    bisection : tuple of sets
+        Sets with the bisection of nodes
+
+    Examples
+    --------
+    >>> G = nx.barbell_graph(3, 0)
+    >>> nx.spectral_bisection(G)
+    ({0, 1, 2}, {3, 4, 5})
+
+    References
+    ----------
+    .. [1] M. E. J Newman 'Networks: An Introduction', pages 364-370
+       Oxford University Press 2011.
+    """
+    import numpy as np
+
+    v = nx.fiedler_vector(G, weight, normalized, tol, method, seed)
+    nodes = np.array(list(G))
+    pos_vals = v >= 0
+
+    return set(nodes[~pos_vals]), set(nodes[pos_vals])
diff --git a/networkx/linalg/attrmatrix.py b/networkx/linalg/attrmatrix.py
index 685d393..d202999 100644
--- a/networkx/linalg/attrmatrix.py
+++ b/networkx/linalg/attrmatrix.py
@@ -150,7 +150,7 @@ def attr_matrix(
     dtype=None,
     order=None,
 ):
-    """Returns a NumPy matrix using attributes from G.
+    """Returns the attribute matrix using attributes from `G` as a numpy array.
 
     If only `G` is passed in, then the adjacency matrix is constructed.
 
@@ -164,12 +164,12 @@ def attr_matrix(
     Parameters
     ----------
     G : graph
-        The NetworkX graph used to construct the NumPy matrix.
+        The NetworkX graph used to construct the attribute matrix.
 
     edge_attr : str, optional
         Each element of the matrix represents a running total of the
         specified edge attribute for edges whose node attributes correspond
-        to the rows/cols of the matirx. The attribute must be present for
+        to the rows/cols of the matrix. The attribute must be present for
         all edges in the graph. If no attribute is specified, then we
         just count the number of edges whose node attributes correspond
         to the matrix element.
@@ -204,11 +204,11 @@ def attr_matrix(
 
     Returns
     -------
-    M : NumPy matrix
+    M : 2D NumPy ndarray
         The attribute matrix.
 
     ordering : list
-        If `rc_order` was specified, then only the matrix is returned.
+        If `rc_order` was specified, then only the attribute matrix is returned.
         However, if `rc_order` was None, then the ordering used to construct
         the matrix is returned as well.
 
@@ -221,16 +221,16 @@ def attr_matrix(
     >>> G.add_edge(0, 2, thickness=2)
     >>> G.add_edge(1, 2, thickness=3)
     >>> nx.attr_matrix(G, rc_order=[0, 1, 2])
-    matrix([[0., 1., 1.],
-            [1., 0., 1.],
-            [1., 1., 0.]])
+    array([[0., 1., 1.],
+           [1., 0., 1.],
+           [1., 1., 0.]])
 
     Alternatively, we can obtain the matrix describing edge thickness.
 
     >>> nx.attr_matrix(G, edge_attr="thickness", rc_order=[0, 1, 2])
-    matrix([[0., 1., 2.],
-            [1., 0., 3.],
-            [2., 3., 0.]])
+    array([[0., 1., 2.],
+           [1., 0., 3.],
+           [2., 3., 0.]])
 
     We can also color the nodes and ask for the probability distribution over
     all edges (u,v) describing:
@@ -242,8 +242,8 @@ def attr_matrix(
     >>> G.nodes[2]["color"] = "blue"
     >>> rc = ["red", "blue"]
     >>> nx.attr_matrix(G, node_attr="color", normalized=True, rc_order=rc)
-    matrix([[0.33333333, 0.66666667],
-            [1.        , 0.        ]])
+    array([[0.33333333, 0.66666667],
+           [1.        , 0.        ]])
 
     For example, the above tells us that for all edges (u,v):
 
@@ -256,8 +256,8 @@ def attr_matrix(
     Finally, we can obtain the total weights listed by the node colors.
 
     >>> nx.attr_matrix(G, edge_attr="weight", node_attr="color", rc_order=rc)
-    matrix([[3., 2.],
-            [2., 0.]])
+    array([[3., 2.],
+           [2., 0.]])
 
     Thus, the total weight over all edges (u,v) with u and v having colors:
 
@@ -298,19 +298,6 @@ def attr_matrix(
     if normalized:
         M /= M.sum(axis=1).reshape((N, 1))
 
-    import warnings
-
-    warnings.warn(
-        (
-            "attr_matrix will return an numpy.ndarray instead of a numpy.matrix "
-            "in NetworkX 3.0."
-        ),
-        category=FutureWarning,
-        stacklevel=2,
-    )
-    # TODO: Remove asmatrix in NetworkX 3.0
-    M = np.asmatrix(M)
-
     if rc_order is None:
         return M, ordering
     else:
@@ -320,7 +307,7 @@ def attr_matrix(
 def attr_sparse_matrix(
     G, edge_attr=None, node_attr=None, normalized=False, rc_order=None, dtype=None
 ):
-    """Returns a SciPy sparse matrix using attributes from G.
+    """Returns a SciPy sparse array using attributes from G.
 
     If only `G` is passed in, then the adjacency matrix is constructed.
 
@@ -339,7 +326,7 @@ def attr_sparse_matrix(
     edge_attr : str, optional
         Each element of the matrix represents a running total of the
         specified edge attribute for edges whose node attributes correspond
-        to the rows/cols of the matirx. The attribute must be present for
+        to the rows/cols of the matrix. The attribute must be present for
         all edges in the graph. If no attribute is specified, then we
         just count the number of edges whose node attributes correspond
         to the matrix element.
@@ -369,7 +356,7 @@ def attr_sparse_matrix(
 
     Returns
     -------
-    M : SciPy sparse matrix
+    M : SciPy sparse array
         The attribute matrix.
 
     ordering : list
diff --git a/networkx/linalg/bethehessianmatrix.py b/networkx/linalg/bethehessianmatrix.py
index c2595e4..443ba5b 100644
--- a/networkx/linalg/bethehessianmatrix.py
+++ b/networkx/linalg/bethehessianmatrix.py
@@ -32,7 +32,7 @@ def bethe_hessian_matrix(G, r=None, nodelist=None):
 
     Returns
     -------
-    H : scipy.sparse.csr_matrix
+    H : scipy.sparse.csr_array
       The Bethe Hessian matrix of `G`, with parameter `r`.
 
     Examples
@@ -75,12 +75,4 @@ def bethe_hessian_matrix(G, r=None, nodelist=None):
     D = sp.sparse.csr_array(sp.sparse.spdiags(A.sum(axis=1), 0, m, n, format="csr"))
     # TODO: Rm csr_array wrapper when eye array creation becomes available
     I = sp.sparse.csr_array(sp.sparse.eye(m, n, format="csr"))
-    import warnings
-
-    warnings.warn(
-        "bethe_hessian_matrix will return a scipy.sparse array instead of a matrix in Networkx 3.0",
-        FutureWarning,
-        stacklevel=2,
-    )
-    # TODO: Remove the csr_matrix wrapper in NetworkX 3.0
-    return sp.sparse.csr_matrix((r**2 - 1) * I - r * A + D)
+    return (r**2 - 1) * I - r * A + D
diff --git a/networkx/linalg/graphmatrix.py b/networkx/linalg/graphmatrix.py
index ad0d2e3..24354e5 100644
--- a/networkx/linalg/graphmatrix.py
+++ b/networkx/linalg/graphmatrix.py
@@ -3,7 +3,7 @@ Adjacency matrix and incidence matrix of graphs.
 """
 import networkx as nx
 
-__all__ = ["incidence_matrix", "adj_matrix", "adjacency_matrix"]
+__all__ = ["incidence_matrix", "adjacency_matrix"]
 
 
 def incidence_matrix(G, nodelist=None, edgelist=None, oriented=False, weight=None):
@@ -40,7 +40,7 @@ def incidence_matrix(G, nodelist=None, edgelist=None, oriented=False, weight=Non
 
     Returns
     -------
-    A : SciPy sparse matrix
+    A : SciPy sparse array
       The incidence matrix of G.
 
     Notes
@@ -93,14 +93,6 @@ def incidence_matrix(G, nodelist=None, edgelist=None, oriented=False, weight=Non
         else:
             A[ui, ei] = wt
             A[vi, ei] = wt
-    import warnings
-
-    warnings.warn(
-        "incidence_matrix will return a scipy.sparse array instead of a matrix in Networkx 3.0.",
-        FutureWarning,
-        stacklevel=2,
-    )
-    # TODO: Rm sp.sparse.csc_matrix in Networkx 3.0
     return A.asformat("csc")
 
 
@@ -126,7 +118,7 @@ def adjacency_matrix(G, nodelist=None, dtype=None, weight="weight"):
 
     Returns
     -------
-    A : SciPy sparse matrix
+    A : SciPy sparse array
       Adjacency matrix representation of G.
 
     Notes
@@ -145,7 +137,7 @@ def adjacency_matrix(G, nodelist=None, dtype=None, weight="weight"):
     diagonal matrix entry value to the edge weight attribute
     (or the number 1 if the edge has no weight attribute).  If the
     alternate convention of doubling the edge weight is desired the
-    resulting Scipy sparse matrix can be modified as follows:
+    resulting SciPy sparse array can be modified as follows:
 
     >>> G = nx.Graph([(1, 1)])
     >>> A = nx.adjacency_matrix(G)
@@ -162,29 +154,4 @@ def adjacency_matrix(G, nodelist=None, dtype=None, weight="weight"):
     to_dict_of_dicts
     adjacency_spectrum
     """
-    import warnings
-
-    warnings.warn(
-        "adjacency_matrix will return a scipy.sparse array instead of a matrix in Networkx 3.0.",
-        FutureWarning,
-        stacklevel=2,
-    )
-    # TODO: Change to `to_scipy_sparse_array` for networkx 3.0
-    return nx.to_scipy_sparse_matrix(G, nodelist=nodelist, dtype=dtype, weight=weight)
-
-
-def _adj_matrix_warning(G, nodelist=None, dtype=None, weight="weight"):
-    import warnings
-
-    warnings.warn(
-        (
-            "adj_matrix is deprecated and will be removed in version 3.0.\n"
-            "Use `adjacency_matrix` instead\n"
-        ),
-        DeprecationWarning,
-        stacklevel=2,
-    )
-    return adjacency_matrix(G, nodelist, dtype, weight)
-
-
-adj_matrix = _adj_matrix_warning
+    return nx.to_scipy_sparse_array(G, nodelist=nodelist, dtype=dtype, weight=weight)
diff --git a/networkx/linalg/laplacianmatrix.py b/networkx/linalg/laplacianmatrix.py
index f1053b7..4448b3e 100644
--- a/networkx/linalg/laplacianmatrix.py
+++ b/networkx/linalg/laplacianmatrix.py
@@ -34,7 +34,7 @@ def laplacian_matrix(G, nodelist=None, weight="weight"):
 
     Returns
     -------
-    L : SciPy sparse matrix
+    L : SciPy sparse array
       The Laplacian matrix of G.
 
     Notes
@@ -46,6 +46,21 @@ def laplacian_matrix(G, nodelist=None, weight="weight"):
     to_numpy_array
     normalized_laplacian_matrix
     laplacian_spectrum
+
+    Examples
+    --------
+    For graphs with multiple connected components, L is permutation-similar
+    to a block diagonal matrix where each block is the respective Laplacian
+    matrix for each component.
+
+    >>> G = nx.Graph([(1, 2), (2, 3), (4, 5)])
+    >>> print(nx.laplacian_matrix(G).toarray())
+    [[ 1 -1  0  0  0]
+     [-1  2 -1  0  0]
+     [ 0 -1  1  0  0]
+     [ 0  0  0  1 -1]
+     [ 0  0  0 -1  1]]
+
     """
     import scipy as sp
     import scipy.sparse  # call as sp.sparse
@@ -56,15 +71,7 @@ def laplacian_matrix(G, nodelist=None, weight="weight"):
     n, m = A.shape
     # TODO: rm csr_array wrapper when spdiags can produce arrays
     D = sp.sparse.csr_array(sp.sparse.spdiags(A.sum(axis=1), 0, m, n, format="csr"))
-    import warnings
-
-    warnings.warn(
-        "laplacian_matrix will return a scipy.sparse array instead of a matrix in Networkx 3.0.",
-        FutureWarning,
-        stacklevel=2,
-    )
-    # TODO: rm sp.sparse.csr_matrix in version 3.0
-    return sp.sparse.csr_matrix(D - A)
+    return D - A
 
 
 @not_implemented_for("directed")
@@ -95,7 +102,7 @@ def normalized_laplacian_matrix(G, nodelist=None, weight="weight"):
 
     Returns
     -------
-    N : Scipy sparse matrix
+    N : SciPy sparse array
       The normalized Laplacian matrix of G.
 
     Notes
@@ -136,15 +143,7 @@ def normalized_laplacian_matrix(G, nodelist=None, weight="weight"):
     diags_sqrt[np.isinf(diags_sqrt)] = 0
     # TODO: rm csr_array wrapper when spdiags can produce arrays
     DH = sp.sparse.csr_array(sp.sparse.spdiags(diags_sqrt, 0, m, n, format="csr"))
-    import warnings
-
-    warnings.warn(
-        "normalized_laplacian_matrix will return a scipy.sparse array instead of a matrix in Networkx 3.0.",
-        FutureWarning,
-        stacklevel=2,
-    )
-    # TODO: rm csr_matrix wrapper for NX 3.0
-    return sp.sparse.csr_matrix(DH @ (L @ DH))
+    return DH @ (L @ DH)
 
 
 def total_spanning_tree_weight(G, weight=None):
@@ -269,15 +268,7 @@ def directed_laplacian_matrix(
     # NOTE: This could be sparsified for the non-pagerank cases
     I = np.identity(len(G))
 
-    import warnings
-
-    warnings.warn(
-        "directed_laplacian_matrix will return a numpy array instead of a matrix in NetworkX 3.0",
-        FutureWarning,
-        stacklevel=2,
-    )
-    # TODO: rm np.asmatrix for networkx 3.0
-    return np.asmatrix(I - (Q + Q.T) / 2.0)
+    return I - (Q + Q.T) / 2.0
 
 
 @not_implemented_for("undirected")
@@ -356,17 +347,7 @@ def directed_combinatorial_laplacian_matrix(
     # TODO: Rm csr_array wrapper when spdiags array creation becomes available
     Phi = sp.sparse.csr_array(sp.sparse.spdiags(p, 0, n, n)).toarray()
 
-    import warnings
-
-    warnings.warn(
-        "directed_combinatorial_laplacian_matrix will return a numpy array instead of a matrix in NetworkX 3.0",
-        FutureWarning,
-        stacklevel=2,
-    )
-    # TODO: Rm np.asmatrix for networkx 3.0
-    import numpy as np
-
-    return np.asmatrix(Phi - (Phi @ P + P.T @ Phi) / 2.0)
+    return Phi - (Phi @ P + P.T @ Phi) / 2.0
 
 
 def _transition_matrix(G, nodelist=None, weight="weight", walk_type=None, alpha=0.95):
diff --git a/networkx/linalg/modularitymatrix.py b/networkx/linalg/modularitymatrix.py
index 978d226..59087b8 100644
--- a/networkx/linalg/modularitymatrix.py
+++ b/networkx/linalg/modularitymatrix.py
@@ -39,7 +39,7 @@ def modularity_matrix(G, nodelist=None, weight=None):
 
     Returns
     -------
-    B : Numpy matrix
+    B : Numpy array
       The modularity matrix of G.
 
     Examples
@@ -71,15 +71,7 @@ def modularity_matrix(G, nodelist=None, weight=None):
     # Expected adjacency matrix
     X = np.outer(k, k) / (2 * m)
 
-    import warnings
-
-    warnings.warn(
-        "modularity_matrix will return a numpy array instead of a matrix in NetworkX 3.0.",
-        FutureWarning,
-        stacklevel=2,
-    )
-    # TODO: rm np.asmatrix for networkx 3.0
-    return np.asmatrix(A - X)
+    return A - X
 
 
 @not_implemented_for("undirected")
@@ -116,7 +108,7 @@ def directed_modularity_matrix(G, nodelist=None, weight=None):
 
     Returns
     -------
-    B : Numpy matrix
+    B : Numpy array
       The modularity matrix of G.
 
     Examples
@@ -169,12 +161,4 @@ def directed_modularity_matrix(G, nodelist=None, weight=None):
     # Expected adjacency matrix
     X = np.outer(k_out, k_in) / m
 
-    import warnings
-
-    warnings.warn(
-        "directed_modularity_matrix will return a numpy array instead of a matrix in NetworkX 3.0.",
-        FutureWarning,
-        stacklevel=2,
-    )
-    # TODO: rm np.asmatrix for networkx 3.0
-    return np.asmatrix(A - X)
+    return A - X
diff --git a/networkx/linalg/spectrum.py b/networkx/linalg/spectrum.py
index caa95fc..66aedbb 100644
--- a/networkx/linalg/spectrum.py
+++ b/networkx/linalg/spectrum.py
@@ -32,11 +32,23 @@ def laplacian_spectrum(G, weight="weight"):
     Notes
     -----
     For MultiGraph/MultiDiGraph, the edges weights are summed.
-    See to_numpy_array for other options.
+    See :func:`~networkx.convert_matrix.to_numpy_array` for other options.
 
     See Also
     --------
     laplacian_matrix
+
+    Examples
+    --------
+    The multiplicity of 0 as an eigenvalue of the laplacian matrix is equal
+    to the number of connected components of G.
+
+    >>> G = nx.Graph()  # Create a graph with 5 nodes and 3 connected components
+    >>> G.add_nodes_from(range(5))
+    >>> G.add_edges_from([(0, 2), (3, 4)])
+    >>> nx.laplacian_spectrum(G)
+    array([0., 0., 0., 2., 2.])
+
     """
     import scipy as sp
     import scipy.linalg  # call as sp.linalg
diff --git a/networkx/linalg/tests/test_algebraic_connectivity.py b/networkx/linalg/tests/test_algebraic_connectivity.py
index 7a86bd0..089d917 100644
--- a/networkx/linalg/tests/test_algebraic_connectivity.py
+++ b/networkx/linalg/tests/test_algebraic_connectivity.py
@@ -46,10 +46,25 @@ def test_fiedler_vector_tracemin_unknown():
         )
 
 
+def test_spectral_bisection():
+    pytest.importorskip("scipy")
+    G = nx.barbell_graph(3, 0)
+    C = nx.spectral_bisection(G)
+    assert C == ({0, 1, 2}, {3, 4, 5})
+
+    mapping = dict(enumerate("badfec"))
+    G = nx.relabel_nodes(G, mapping)
+    C = nx.spectral_bisection(G)
+    assert C == (
+        {mapping[0], mapping[1], mapping[2]},
+        {mapping[3], mapping[4], mapping[5]},
+    )
+
+
 def check_eigenvector(A, l, x):
     nx = np.linalg.norm(x)
     # Check zeroness.
-    assert not nx == pytest.approx(0, abs=1e-7)
+    assert nx != pytest.approx(0, abs=1e-07)
     y = A @ x
     ny = np.linalg.norm(y)
     # Check collinearity.
diff --git a/networkx/readwrite/__init__.py b/networkx/readwrite/__init__.py
index b97724b..f655098 100644
--- a/networkx/readwrite/__init__.py
+++ b/networkx/readwrite/__init__.py
@@ -4,50 +4,9 @@ A package for reading and writing graphs in various formats.
 """
 
 
-def __getattr__(name):
-    """Remove functions and provide informative error messages."""
-    if name == "nx_yaml":
-        raise ImportError(
-            "\nThe nx_yaml module has been removed from NetworkX.\n"
-            "Please use the `yaml` package directly for working with yaml data.\n"
-            "For example, a networkx.Graph `G` can be written to and loaded\n"
-            "from a yaml file with:\n\n"
-            "    import yaml\n\n"
-            "    with open('path_to_yaml_file', 'w') as fh:\n"
-            "        yaml.dump(G, fh)\n"
-            "    with open('path_to_yaml_file', 'r') as fh:\n"
-            "        G = yaml.load(fh, Loader=yaml.Loader)\n\n"
-            "Note that yaml.Loader is considered insecure - see the pyyaml\n"
-            "documentation for further details.\n\n"
-            "This message will be removed in NetworkX 3.0."
-        )
-    if name == "read_yaml":
-        raise ImportError(
-            "\nread_yaml has been removed from NetworkX, please use `yaml`\n"
-            "directly:\n\n"
-            "    import yaml\n\n"
-            "    with open('path', 'r') as fh:\n"
-            "        yaml.load(fh, Loader=yaml.Loader)\n\n"
-            "Note that yaml.Loader is considered insecure - see the pyyaml\n"
-            "documentation for further details.\n\n"
-            "This message will be removed in NetworkX 3.0."
-        )
-    if name == "write_yaml":
-        raise ImportError(
-            "\nwrite_yaml has been removed from NetworkX, please use `yaml`\n"
-            "directly:\n\n"
-            "    import yaml\n\n"
-            "    with open('path_for_yaml_output', 'w') as fh:\n"
-            "        yaml.dump(G_to_be_yaml, fh)\n\n"
-            "This message will be removed in NetworkX 3.0."
-        )
-    raise AttributeError(f"module {__name__} has no attribute {name}")
-
-
 from networkx.readwrite.adjlist import *
 from networkx.readwrite.multiline_adjlist import *
 from networkx.readwrite.edgelist import *
-from networkx.readwrite.gpickle import *
 from networkx.readwrite.pajek import *
 from networkx.readwrite.leda import *
 from networkx.readwrite.sparse6 import *
@@ -55,6 +14,5 @@ from networkx.readwrite.graph6 import *
 from networkx.readwrite.gml import *
 from networkx.readwrite.graphml import *
 from networkx.readwrite.gexf import *
-from networkx.readwrite.nx_shp import *
 from networkx.readwrite.json_graph import *
 from networkx.readwrite.text import *
diff --git a/networkx/readwrite/gexf.py b/networkx/readwrite/gexf.py
index b7c5a5a..e7e8a2b 100644
--- a/networkx/readwrite/gexf.py
+++ b/networkx/readwrite/gexf.py
@@ -2,7 +2,7 @@
 
 .. warning::
     This parser uses the standard xml library present in Python, which is
-    insecure - see :doc:`library/xml` for additional information.
+    insecure - see :external+python:mod:`xml` for additional information.
     Only parse GEFX files you trust.
 
 GEXF (Graph Exchange XML Format) is a language for describing complex
@@ -177,27 +177,32 @@ def read_gexf(path, node_type=None, relabel=False, version="1.2draft"):
 
 
 class GEXF:
-    versions = {}
-    d = {
-        "NS_GEXF": "http://www.gexf.net/1.1draft",
-        "NS_VIZ": "http://www.gexf.net/1.1draft/viz",
-        "NS_XSI": "http://www.w3.org/2001/XMLSchema-instance",
-        "SCHEMALOCATION": " ".join(
-            ["http://www.gexf.net/1.1draft", "http://www.gexf.net/1.1draft/gexf.xsd"]
-        ),
-        "VERSION": "1.1",
+    versions = {
+        "1.1draft": {
+            "NS_GEXF": "http://www.gexf.net/1.1draft",
+            "NS_VIZ": "http://www.gexf.net/1.1draft/viz",
+            "NS_XSI": "http://www.w3.org/2001/XMLSchema-instance",
+            "SCHEMALOCATION": " ".join(
+                [
+                    "http://www.gexf.net/1.1draft",
+                    "http://www.gexf.net/1.1draft/gexf.xsd",
+                ]
+            ),
+            "VERSION": "1.1",
+        },
+        "1.2draft": {
+            "NS_GEXF": "http://www.gexf.net/1.2draft",
+            "NS_VIZ": "http://www.gexf.net/1.2draft/viz",
+            "NS_XSI": "http://www.w3.org/2001/XMLSchema-instance",
+            "SCHEMALOCATION": " ".join(
+                [
+                    "http://www.gexf.net/1.2draft",
+                    "http://www.gexf.net/1.2draft/gexf.xsd",
+                ]
+            ),
+            "VERSION": "1.2",
+        },
     }
-    versions["1.1draft"] = d
-    d = {
-        "NS_GEXF": "http://www.gexf.net/1.2draft",
-        "NS_VIZ": "http://www.gexf.net/1.2draft/viz",
-        "NS_XSI": "http://www.w3.org/2001/XMLSchema-instance",
-        "SCHEMALOCATION": " ".join(
-            ["http://www.gexf.net/1.2draft", "http://www.gexf.net/1.2draft/gexf.xsd"]
-        ),
-        "VERSION": "1.2",
-    }
-    versions["1.2draft"] = d
 
     def construct_types(self):
         types = [
@@ -564,7 +569,7 @@ class GEXFWriter(GEXF):
                         r=str(color.get("r")),
                         g=str(color.get("g")),
                         b=str(color.get("b")),
-                        a=str(color.get("a")),
+                        a=str(color.get("a", 1.0)),
                     )
                 element.append(e)
 
diff --git a/networkx/readwrite/gml.py b/networkx/readwrite/gml.py
index 1a14b80..ffa5cb3 100644
--- a/networkx/readwrite/gml.py
+++ b/networkx/readwrite/gml.py
@@ -363,6 +363,11 @@ def parse_gml_lines(lines, label, destringizer):
                         value = destringizer(value)
                     except ValueError:
                         pass
+                # Special handling for empty lists and tuples
+                if value == "()":
+                    value = ()
+                if value == "[]":
+                    value = []
                 curr_token = next(tokens)
             elif category == Pattern.DICT_START:
                 curr_token, value = parse_dict(curr_token)
@@ -381,7 +386,7 @@ def parse_gml_lines(lines, label, destringizer):
                     except Exception:
                         msg = (
                             "an int, float, string, '[' or string"
-                            + " convertable ASCII value for node id or label"
+                            + " convertible ASCII value for node id or label"
                         )
                         unexpected(curr_token, msg)
                 # Special handling for nan and infinity.  Since the gml language
@@ -658,7 +663,7 @@ def generate_gml(G, stringizer=None):
         label "1"
       ]
     ]
-    >>> G = nx.OrderedMultiGraph([("a", "b"), ("a", "b")])
+    >>> G = nx.MultiGraph([("a", "b"), ("a", "b")])
     >>> print("\n".join(nx.generate_gml(G)))
     graph [
       multigraph 1
@@ -728,12 +733,9 @@ def generate_gml(G, stringizer=None):
                 for key, value in value.items():
                     yield from stringize(key, value, (), next_indent)
                 yield indent + "]"
-            elif (
-                isinstance(value, (list, tuple))
-                and key != "label"
-                and value
-                and not in_list
-            ):
+            elif isinstance(value, (list, tuple)) and key != "label" and not in_list:
+                if len(value) == 0:
+                    yield indent + key + " " + f'"{value!r}"'
                 if len(value) == 1:
                     yield indent + key + " " + f'"{LIST_START_VALUE}"'
                 for val in value:
diff --git a/networkx/readwrite/gpickle.py b/networkx/readwrite/gpickle.py
deleted file mode 100644
index 0054afd..0000000
--- a/networkx/readwrite/gpickle.py
+++ /dev/null
@@ -1,109 +0,0 @@
-"""
-**************
-Pickled Graphs
-**************
-Read and write NetworkX graphs as Python pickles.
-
-.. warning::
-    The pickle library is not secure and can be used to create arbitray objects. 
-    Only unpickle data you trust - see :doc:`library/pickle` for additional information.
-
-"The pickle module implements a fundamental, but powerful algorithm
-for serializing and de-serializing a Python object
-structure. "Pickling" is the process whereby a Python object hierarchy
-is converted into a byte stream, and "unpickling" is the inverse
-operation, whereby a byte stream is converted back into an object
-hierarchy."
-
-Note that NetworkX graphs can contain any hashable Python object as
-node (not just integers and strings).  For arbitrary data types it may
-be difficult to represent the data as text.  In that case using Python
-pickles to store the graph data can be used.
-
-Format
-------
-See https://docs.python.org/3/library/pickle.html
-"""
-
-__all__ = ["read_gpickle", "write_gpickle"]
-
-import pickle
-import warnings
-
-from networkx.utils import open_file
-
-
-@open_file(1, mode="wb")
-def write_gpickle(G, path, protocol=pickle.HIGHEST_PROTOCOL):
-    """Write graph in Python pickle format.
-
-    Pickles are a serialized byte stream of a Python object [1]_.
-    This format will preserve Python objects used as nodes or edges.
-
-    Parameters
-    ----------
-    G : graph
-       A NetworkX graph
-
-    path : file or string
-       File or filename to write.
-       Filenames ending in .gz or .bz2 will be compressed.
-
-    protocol : integer
-        Pickling protocol to use. Default value: ``pickle.HIGHEST_PROTOCOL``.
-
-    Examples
-    --------
-    >>> G = nx.path_graph(4)
-    >>> nx.write_gpickle(G, "test.gpickle")
-
-    References
-    ----------
-    .. [1] https://docs.python.org/3/library/pickle.html
-
-    .. deprecated:: 2.6
-    """
-    msg = (
-        "write_gpickle is deprecated and will be removed in 3.0."
-        "Use ``pickle.dump(G, path, protocol)``"
-    )
-    warnings.warn(msg, DeprecationWarning, stacklevel=2)
-    pickle.dump(G, path, protocol)
-
-
-@open_file(0, mode="rb")
-def read_gpickle(path):
-    """Read graph object in Python pickle format.
-
-    Pickles are a serialized byte stream of a Python object [1]_.
-    This format will preserve Python objects used as nodes or edges.
-
-    Parameters
-    ----------
-    path : file or string
-       File or filename to write.
-       Filenames ending in .gz or .bz2 will be uncompressed.
-
-    Returns
-    -------
-    G : graph
-       A NetworkX graph
-
-    Examples
-    --------
-    >>> G = nx.path_graph(4)
-    >>> nx.write_gpickle(G, "test.gpickle")
-    >>> G = nx.read_gpickle("test.gpickle")
-
-    References
-    ----------
-    .. [1] https://docs.python.org/3/library/pickle.html
-
-    .. deprecated:: 2.6
-    """
-    msg = (
-        "read_gpickle is deprecated and will be removed in 3.0."
-        "Use ``pickle.load(path)``"
-    )
-    warnings.warn(msg, DeprecationWarning, stacklevel=2)
-    return pickle.load(path)
diff --git a/networkx/readwrite/graph6.py b/networkx/readwrite/graph6.py
index 140d9fa..ef5345b 100644
--- a/networkx/readwrite/graph6.py
+++ b/networkx/readwrite/graph6.py
@@ -121,7 +121,7 @@ def from_graph6_bytes(bytes_in):
 
     G = nx.Graph()
     G.add_nodes_from(range(n))
-    for (i, j), b in zip([(i, j) for j in range(1, n) for i in range(j)], bits()):
+    for (i, j), b in zip(((i, j) for j in range(1, n) for i in range(j)), bits()):
         if b:
             G.add_edge(i, j)
 
diff --git a/networkx/readwrite/graphml.py b/networkx/readwrite/graphml.py
index 0a17592..e8fb377 100644
--- a/networkx/readwrite/graphml.py
+++ b/networkx/readwrite/graphml.py
@@ -7,7 +7,7 @@ Read and write graphs in GraphML format.
 .. warning::
 
     This parser uses the standard xml library present in Python, which is
-    insecure - see :doc:`library/xml` for additional information.
+    insecure - see :external+python:mod:`xml` for additional information.
     Only parse GraphML files you trust.
 
 This implementation does not support mixed graphs (directed and unidirected
@@ -643,8 +643,8 @@ class GraphMLWriter(GraphML):
         # data that needs to be added to them.
         # We postpone processing in order to do type inference/generalization.
         # See self.attr_type
-        for (xml_obj, data) in self.attributes.items():
-            for (k, v, scope, default) in data:
+        for xml_obj, data in self.attributes.items():
+            for k, v, scope, default in data:
                 xml_obj.append(
                     self.add_data(
                         str(k), self.attr_type(k, scope, v), str(v), scope, default
diff --git a/networkx/readwrite/json_graph/__init__.py b/networkx/readwrite/json_graph/__init__.py
index 7715fbb..2ee9d12 100644
--- a/networkx/readwrite/json_graph/__init__.py
+++ b/networkx/readwrite/json_graph/__init__.py
@@ -15,5 +15,4 @@ The three formats that you can generate with NetworkX are:
 from networkx.readwrite.json_graph.node_link import *
 from networkx.readwrite.json_graph.adjacency import *
 from networkx.readwrite.json_graph.tree import *
-from networkx.readwrite.json_graph.jit import *
 from networkx.readwrite.json_graph.cytoscape import *
diff --git a/networkx/readwrite/json_graph/adjacency.py b/networkx/readwrite/json_graph/adjacency.py
index 7962ea9..0cb866a 100644
--- a/networkx/readwrite/json_graph/adjacency.py
+++ b/networkx/readwrite/json_graph/adjacency.py
@@ -4,7 +4,7 @@ import networkx as nx
 
 __all__ = ["adjacency_data", "adjacency_graph"]
 
-_attrs = dict(id="id", key="key")
+_attrs = {"id": "id", "key": "key"}
 
 
 def adjacency_data(G, attrs=_attrs):
diff --git a/networkx/readwrite/json_graph/cytoscape.py b/networkx/readwrite/json_graph/cytoscape.py
index 296242c..c0c0e3f 100644
--- a/networkx/readwrite/json_graph/cytoscape.py
+++ b/networkx/readwrite/json_graph/cytoscape.py
@@ -3,24 +3,13 @@ import networkx as nx
 __all__ = ["cytoscape_data", "cytoscape_graph"]
 
 
-def cytoscape_data(G, attrs=None, name="name", ident="id"):
+def cytoscape_data(G, name="name", ident="id"):
     """Returns data in Cytoscape JSON format (cyjs).
 
     Parameters
     ----------
     G : NetworkX Graph
         The graph to convert to cytoscape format
-    attrs : dict or None (default=None)
-        A dictionary containing the keys 'name' and 'ident' which are mapped to
-        the 'name' and 'id' node elements in cyjs format. All other keys are
-        ignored. Default is `None` which results in the default mapping
-        ``dict(name="name", ident="id")``.
-
-        .. deprecated:: 2.6
-
-           The `attrs` keyword argument will be replaced with `name` and
-           `ident` in networkx 3.0
-
     name : string
         A string which is mapped to the 'name' node element in cyjs format.
         Must not have the same value as `ident`.
@@ -58,30 +47,6 @@ def cytoscape_data(G, attrs=None, name="name", ident="id"):
        {'data': {'id': '1', 'value': 1, 'name': '1'}}],
       'edges': [{'data': {'source': 0, 'target': 1}}]}}
     """
-    # ------ TODO: Remove between the lines in 3.0 ----- #
-    if attrs is not None:
-        import warnings
-
-        msg = (
-            "\nThe `attrs` keyword argument of cytoscape_data is deprecated\n"
-            "and will be removed in networkx 3.0.\n"
-            "It is replaced with explicit `name` and `ident` keyword\n"
-            "arguments.\n"
-            "To make this warning go away and ensure usage is forward\n"
-            "compatible, replace `attrs` with `name` and `ident`,\n"
-            "for example:\n\n"
-            "   >>> cytoscape_data(G, attrs={'name': 'foo', 'ident': 'bar'})\n\n"
-            "should instead be written as\n\n"
-            "   >>> cytoscape_data(G, name='foo', ident='bar')\n\n"
-            "in networkx 3.0.\n"
-            "The default values of 'name' and 'id' will not change."
-        )
-        warnings.warn(msg, DeprecationWarning, stacklevel=2)
-
-        name = attrs["name"]
-        ident = attrs["ident"]
-    # -------------------------------------------------- #
-
     if name == ident:
         raise nx.NetworkXError("name and ident must be different.")
 
@@ -115,7 +80,7 @@ def cytoscape_data(G, attrs=None, name="name", ident="id"):
     return jsondata
 
 
-def cytoscape_graph(data, attrs=None, name="name", ident="id"):
+def cytoscape_graph(data, name="name", ident="id"):
     """
     Create a NetworkX graph from a dictionary in cytoscape JSON format.
 
@@ -123,17 +88,6 @@ def cytoscape_graph(data, attrs=None, name="name", ident="id"):
     ----------
     data : dict
         A dictionary of data conforming to cytoscape JSON format.
-    attrs : dict or None (default=None)
-        A dictionary containing the keys 'name' and 'ident' which are mapped to
-        the 'name' and 'id' node elements in cyjs format. All other keys are
-        ignored. Default is `None` which results in the default mapping
-        ``dict(name="name", ident="id")``.
-
-        .. deprecated:: 2.6
-
-           The `attrs` keyword argument will be replaced with `name` and
-           `ident` in networkx 3.0
-
     name : string
         A string which is mapped to the 'name' node element in cyjs format.
         Must not have the same value as `ident`.
@@ -181,29 +135,6 @@ def cytoscape_graph(data, attrs=None, name="name", ident="id"):
     >>> G.edges(data=True)
     EdgeDataView([(0, 1, {'source': 0, 'target': 1})])
     """
-    # ------ TODO: Remove between the lines in 3.0 ----- #
-    if attrs is not None:
-        import warnings
-
-        msg = (
-            "\nThe `attrs` keyword argument of cytoscape_data is deprecated\n"
-            "and will be removed in networkx 3.0.\n"
-            "It is replaced with explicit `name` and `ident` keyword\n"
-            "arguments.\n"
-            "To make this warning go away and ensure usage is forward\n"
-            "compatible, replace `attrs` with `name` and `ident`,\n"
-            "for example:\n\n"
-            "   >>> cytoscape_data(G, attrs={'name': 'foo', 'ident': 'bar'})\n\n"
-            "should instead be written as\n\n"
-            "   >>> cytoscape_data(G, name='foo', ident='bar')\n\n"
-            "The default values of 'name' and 'id' will not change."
-        )
-        warnings.warn(msg, DeprecationWarning, stacklevel=2)
-
-        name = attrs["name"]
-        ident = attrs["ident"]
-    # -------------------------------------------------- #
-
     if name == ident:
         raise nx.NetworkXError("name and ident must be different.")
 
diff --git a/networkx/readwrite/json_graph/jit.py b/networkx/readwrite/json_graph/jit.py
deleted file mode 100644
index 043f1a1..0000000
--- a/networkx/readwrite/json_graph/jit.py
+++ /dev/null
@@ -1,118 +0,0 @@
-"""
-Read and write NetworkX graphs as JavaScript InfoVis Toolkit (JIT) format JSON.
-
-See the `JIT documentation`_ for more examples.
-
-Format
-------
-var json = [
-  {
-    "id": "aUniqueIdentifier",
-    "name": "usually a nodes name",
-    "data": {
-      "some key": "some value",
-      "some other key": "some other value"
-     },
-    "adjacencies": [
-    {
-      nodeTo:"aNodeId",
-      data: {} //put whatever you want here
-    },
-    'other adjacencies go here...'
-  },
-
-  'other nodes go here...'
-];
-.. _JIT documentation: http://thejit.org
-"""
-
-import json
-import warnings
-
-import networkx as nx
-from networkx.utils.decorators import not_implemented_for
-
-__all__ = ["jit_graph", "jit_data"]
-
-
-def jit_graph(data, create_using=None):
-    """Read a graph from JIT JSON.
-
-    Parameters
-    ----------
-    data : JSON Graph Object
-
-    create_using : Networkx Graph, optional (default: Graph())
-        Return graph of this type. The provided instance will be cleared.
-
-    Returns
-    -------
-    G : NetworkX Graph built from create_using if provided.
-
-    .. deprecated:: 2.6
-    """
-    warnings.warn(
-        ("jit_graph is deprecated and will be removed in NetworkX 3.0."),
-        DeprecationWarning,
-    )
-
-    if create_using is None:
-        G = nx.Graph()
-    else:
-        G = create_using
-        G.clear()
-
-    if isinstance(data, str):
-        data = json.loads(data)
-
-    for node in data:
-        G.add_node(node["id"], **node["data"])
-        if node.get("adjacencies") is not None:
-            for adj in node["adjacencies"]:
-                G.add_edge(node["id"], adj["nodeTo"], **adj["data"])
-    return G
-
-
-@not_implemented_for("multigraph")
-def jit_data(G, indent=None, default=None):
-    """Returns data in JIT JSON format.
-
-    Parameters
-    ----------
-    G : NetworkX Graph
-
-    indent: optional, default=None
-        If indent is a non-negative integer, then JSON array elements and
-        object members will be pretty-printed with that indent level.
-        An indent level of 0, or negative, will only insert newlines.
-        None (the default) selects the most compact representation.
-
-    default: optional, default=None
-         It will pass the value to the json.dumps function in order to
-         be able to serialize custom objects used as nodes.
-
-    Returns
-    -------
-    data: JIT JSON string
-
-    .. deprecated:: 2.6
-    """
-    warnings.warn(
-        ("jit_data is deprecated and will be removed in NetworkX 3.0."),
-        DeprecationWarning,
-    )
-    json_graph = []
-    for node in G.nodes():
-        json_node = {"id": node, "name": node}
-        # node data
-        json_node["data"] = G.nodes[node]
-        # adjacencies
-        if G[node]:
-            json_node["adjacencies"] = []
-            for neighbour in G[node]:
-                adjacency = {"nodeTo": neighbour}
-                # adjacency data
-                adjacency["data"] = G.edges[node, neighbour]
-                json_node["adjacencies"].append(adjacency)
-        json_graph.append(json_node)
-    return json.dumps(json_graph, indent=indent, default=default)
diff --git a/networkx/readwrite/json_graph/node_link.py b/networkx/readwrite/json_graph/node_link.py
index 7005107..86d3a4c 100644
--- a/networkx/readwrite/json_graph/node_link.py
+++ b/networkx/readwrite/json_graph/node_link.py
@@ -5,7 +5,13 @@ import networkx as nx
 __all__ = ["node_link_data", "node_link_graph"]
 
 
-_attrs = dict(source="source", target="target", name="id", key="key", link="links")
+_attrs = {
+    "source": "source",
+    "target": "target",
+    "name": "id",
+    "key": "key",
+    "link": "links",
+}
 
 
 def _to_tuple(x):
diff --git a/networkx/readwrite/json_graph/tests/test_adjacency.py b/networkx/readwrite/json_graph/tests/test_adjacency.py
index 0f34ef1..3115d77 100644
--- a/networkx/readwrite/json_graph/tests/test_adjacency.py
+++ b/networkx/readwrite/json_graph/tests/test_adjacency.py
@@ -56,5 +56,5 @@ class TestAdjacency:
     def test_exception(self):
         with pytest.raises(nx.NetworkXError):
             G = nx.MultiDiGraph()
-            attrs = dict(id="node", key="node")
+            attrs = {"id": "node", "key": "node"}
             adjacency_data(G, attrs)
diff --git a/networkx/readwrite/json_graph/tests/test_cytoscape.py b/networkx/readwrite/json_graph/tests/test_cytoscape.py
index e92e737..5d47f21 100644
--- a/networkx/readwrite/json_graph/tests/test_cytoscape.py
+++ b/networkx/readwrite/json_graph/tests/test_cytoscape.py
@@ -7,23 +7,6 @@ import networkx as nx
 from networkx.readwrite.json_graph import cytoscape_data, cytoscape_graph
 
 
-# TODO: To be removed when signature change complete in 3.0
-def test_attrs_deprecation(recwarn):
-    G = nx.path_graph(3)
-
-    # No warnings when `attrs` kwarg not used
-    data = cytoscape_data(G)
-    H = cytoscape_graph(data)
-    assert len(recwarn) == 0
-
-    # Future warning raised with `attrs` kwarg
-    attrs = {"name": "foo", "ident": "bar"}
-    with pytest.warns(DeprecationWarning):
-        data = cytoscape_data(G, attrs)
-    with pytest.warns(DeprecationWarning):
-        H = cytoscape_graph(data, attrs)
-
-
 def test_graph():
     G = nx.path_graph(4)
     H = cytoscape_graph(cytoscape_data(G))
diff --git a/networkx/readwrite/json_graph/tests/test_jit.py b/networkx/readwrite/json_graph/tests/test_jit.py
deleted file mode 100644
index 309c405..0000000
--- a/networkx/readwrite/json_graph/tests/test_jit.py
+++ /dev/null
@@ -1,66 +0,0 @@
-import json
-
-import pytest
-
-import networkx as nx
-from networkx.readwrite.json_graph import jit_data, jit_graph
-
-
-class TestJIT:
-    def test_jit(self):
-        G = nx.Graph()
-        G.add_node("Node1", node_data="foobar")
-        G.add_node("Node3", node_data="bar")
-        G.add_node("Node4")
-        G.add_edge("Node1", "Node2", weight=9, something="isSomething")
-        G.add_edge("Node2", "Node3", weight=4, something="isNotSomething")
-        G.add_edge("Node1", "Node2")
-        d = jit_data(G)
-        K = jit_graph(json.loads(d))
-        assert nx.is_isomorphic(G, K)
-
-    def test_jit_2(self):
-        G = nx.Graph()
-        G.add_node(1, node_data=3)
-        G.add_node(3, node_data=0)
-        G.add_edge(1, 2, weight=9, something=0)
-        G.add_edge(2, 3, weight=4, something=3)
-        G.add_edge(1, 2)
-        d = jit_data(G)
-        K = jit_graph(json.loads(d))
-        assert nx.is_isomorphic(G, K)
-
-    def test_jit_directed(self):
-        G = nx.DiGraph()
-        G.add_node(1, node_data=3)
-        G.add_node(3, node_data=0)
-        G.add_edge(1, 2, weight=9, something=0)
-        G.add_edge(2, 3, weight=4, something=3)
-        G.add_edge(1, 2)
-        d = jit_data(G)
-        K = jit_graph(json.loads(d), create_using=nx.DiGraph())
-        assert nx.is_isomorphic(G, K)
-
-    def test_jit_multi_directed(self):
-        G = nx.MultiDiGraph()
-        G.add_node(1, node_data=3)
-        G.add_node(3, node_data=0)
-        G.add_edge(1, 2, weight=9, something=0)
-        G.add_edge(2, 3, weight=4, something=3)
-        G.add_edge(1, 2)
-        pytest.raises(nx.NetworkXNotImplemented, jit_data, G)
-
-        H = nx.DiGraph(G)
-        d = jit_data(H)
-        K = jit_graph(json.loads(d), create_using=nx.MultiDiGraph())
-        assert nx.is_isomorphic(H, K)
-        K.add_edge(1, 2)
-        assert not nx.is_isomorphic(H, K)
-        assert nx.is_isomorphic(G, K)
-
-    def test_jit_round_trip(self):
-        G = nx.Graph()
-        d = nx.jit_data(G)
-        H = jit_graph(json.loads(d))
-        K = jit_graph(d)
-        assert nx.is_isomorphic(H, K)
diff --git a/networkx/readwrite/json_graph/tests/test_node_link.py b/networkx/readwrite/json_graph/tests/test_node_link.py
index 8db06d4..54078c4 100644
--- a/networkx/readwrite/json_graph/tests/test_node_link.py
+++ b/networkx/readwrite/json_graph/tests/test_node_link.py
@@ -16,7 +16,13 @@ def test_attrs_deprecation(recwarn):
     assert len(recwarn) == 0
 
     # Future warning raised with `attrs` kwarg
-    attrs = dict(source="source", target="target", name="id", key="key", link="links")
+    attrs = {
+        "source": "source",
+        "target": "target",
+        "name": "id",
+        "key": "key",
+        "link": "links",
+    }
     data = node_link_data(G, attrs=attrs)
     assert len(recwarn) == 1
 
@@ -26,7 +32,6 @@ def test_attrs_deprecation(recwarn):
 
 
 class TestNodeLink:
-
     # TODO: To be removed when signature change complete
     def test_custom_attrs_dep(self):
         G = nx.path_graph(4)
@@ -35,13 +40,13 @@ class TestNodeLink:
         G.graph[1] = "one"
         G.graph["foo"] = "bar"
 
-        attrs = dict(
-            source="c_source",
-            target="c_target",
-            name="c_id",
-            key="c_key",
-            link="c_links",
-        )
+        attrs = {
+            "source": "c_source",
+            "target": "c_target",
+            "name": "c_id",
+            "key": "c_key",
+            "link": "c_links",
+        }
 
         H = node_link_graph(
             node_link_data(G, attrs=attrs), multigraph=False, attrs=attrs
@@ -53,11 +58,11 @@ class TestNodeLink:
 
         # provide only a partial dictionary of keywords.
         # This is similar to an example in the doc string
-        attrs = dict(
-            link="c_links",
-            source="c_source",
-            target="c_target",
-        )
+        attrs = {
+            "link": "c_links",
+            "source": "c_source",
+            "target": "c_target",
+        }
         H = node_link_graph(
             node_link_data(G, attrs=attrs), multigraph=False, attrs=attrs
         )
@@ -70,7 +75,7 @@ class TestNodeLink:
     def test_exception_dep(self):
         with pytest.raises(nx.NetworkXError):
             G = nx.MultiDiGraph()
-            attrs = dict(name="node", source="node", target="node", key="node")
+            attrs = {"name": "node", "source": "node", "target": "node", "key": "node"}
             node_link_data(G, attrs)
 
     def test_graph(self):
@@ -133,7 +138,7 @@ class TestNodeLink:
     def test_exception(self):
         with pytest.raises(nx.NetworkXError):
             G = nx.MultiDiGraph()
-            attrs = dict(name="node", source="node", target="node", key="node")
+            attrs = {"name": "node", "source": "node", "target": "node", "key": "node"}
             node_link_data(G, **attrs)
 
     def test_string_ids(self):
@@ -155,13 +160,13 @@ class TestNodeLink:
         G.graph[1] = "one"
         G.graph["foo"] = "bar"
 
-        attrs = dict(
-            source="c_source",
-            target="c_target",
-            name="c_id",
-            key="c_key",
-            link="c_links",
-        )
+        attrs = {
+            "source": "c_source",
+            "target": "c_target",
+            "name": "c_id",
+            "key": "c_key",
+            "link": "c_links",
+        }
 
         H = node_link_graph(node_link_data(G, **attrs), multigraph=False, **attrs)
         assert nx.is_isomorphic(G, H)
diff --git a/networkx/readwrite/json_graph/tests/test_tree.py b/networkx/readwrite/json_graph/tests/test_tree.py
index 59a81df..643a14d 100644
--- a/networkx/readwrite/json_graph/tests/test_tree.py
+++ b/networkx/readwrite/json_graph/tests/test_tree.py
@@ -46,20 +46,3 @@ def test_exceptions():
         G = nx.MultiDiGraph()
         G.add_node(0)
         tree_data(G, 0, ident="node", children="node")
-
-
-# NOTE: To be removed when deprecation expires in 3.0
-def test_attrs_deprecation(recwarn):
-    G = nx.path_graph(3, create_using=nx.DiGraph)
-
-    # No warnings when `attrs` kwarg not used
-    data = tree_data(G, 0)
-    H = tree_graph(data)
-    assert len(recwarn) == 0
-
-    # DeprecationWarning issued when `attrs` is used
-    attrs = {"id": "foo", "children": "bar"}
-    with pytest.warns(DeprecationWarning):
-        data = tree_data(G, 0, attrs=attrs)
-    with pytest.warns(DeprecationWarning):
-        H = tree_graph(data, attrs=attrs)
diff --git a/networkx/readwrite/json_graph/tree.py b/networkx/readwrite/json_graph/tree.py
index ab7098c..3e9a4c9 100644
--- a/networkx/readwrite/json_graph/tree.py
+++ b/networkx/readwrite/json_graph/tree.py
@@ -5,8 +5,7 @@ import networkx as nx
 __all__ = ["tree_data", "tree_graph"]
 
 
-# NOTE: Remove attrs from signature in 3.0
-def tree_data(G, root, attrs=None, ident="id", children="children"):
+def tree_data(G, root, ident="id", children="children"):
     """Returns data in tree format that is suitable for JSON serialization
     and use in Javascript documents.
 
@@ -18,20 +17,6 @@ def tree_data(G, root, attrs=None, ident="id", children="children"):
     root : node
        The root of the tree
 
-    attrs : dict
-        A dictionary that contains two keys 'id' and 'children'. The
-        corresponding values provide the attribute names for storing
-        NetworkX-internal graph data. The values should be unique. Default
-        value: :samp:`dict(id='id', children='children')`.
-
-        If some user-defined graph data use these attribute names as data keys,
-        they may be silently dropped.
-
-        .. deprecated:: 2.6
-
-           The `attrs` keyword argument is replaced by `ident` and `children`
-           and will be removed in networkx 3.0
-
     ident : string
         Attribute name for storing NetworkX-internal graph data. `ident` must
         have a different value than `children`. The default is 'id'.
@@ -79,28 +64,6 @@ def tree_data(G, root, attrs=None, ident="id", children="children"):
     if not nx.is_weakly_connected(G):
         raise TypeError("G is not weakly connected.")
 
-    # NOTE: to be removed in 3.0
-    if attrs is not None:
-        import warnings
-
-        msg = (
-            "\nThe `attrs` keyword argument of tree_data is deprecated\n"
-            "and will be removed in networkx 3.0.\n"
-            "It is replaced with explicit `ident` and `children` "
-            "keyword arguments.\n"
-            "To make this warning go away and ensure usage is forward\n"
-            "compatible, replace `attrs` with `ident` and `children,\n"
-            "for example:\n\n"
-            "    >>> tree_data(G, root, attrs={'id': 'foo', 'children': 'bar'})\n\n"
-            "should instead be written as\n\n"
-            "    >>> tree_data(G, root, ident='foo', children='bar')\n\n"
-            "The default values of 'id' and 'children' will not change."
-        )
-        warnings.warn(msg, DeprecationWarning, stacklevel=2)
-
-        ident = attrs["id"]
-        children = attrs["children"]
-
     if ident == children:
         raise nx.NetworkXError("The values for `id` and `children` must be different.")
 
@@ -122,23 +85,13 @@ def tree_data(G, root, attrs=None, ident="id", children="children"):
     return data
 
 
-def tree_graph(data, attrs=None, ident="id", children="children"):
+def tree_graph(data, ident="id", children="children"):
     """Returns graph from tree data format.
 
     Parameters
     ----------
     data : dict
         Tree formatted graph data
-    attrs : dict
-        A dictionary that contains two keys 'id' and 'children'. The
-        corresponding values provide the attribute names for storing
-        NetworkX-internal graph data. The values should be unique. Default
-        value: :samp:`dict(id='id', children='children')`.
-
-        .. deprecated:: 2.6
-
-           The `attrs` keyword argument is replaced by `ident` and `children`
-           and will be removed in networkx 3.0
 
     ident : string
         Attribute name for storing NetworkX-internal graph data. `ident` must
@@ -164,26 +117,6 @@ def tree_graph(data, attrs=None, ident="id", children="children"):
     tree_data, node_link_data, adjacency_data
     """
     graph = nx.DiGraph()
-    if attrs is not None:
-        import warnings
-
-        msg = (
-            "\nThe `attrs` keyword argument of tree_graph is deprecated\n"
-            "and will be removed in networkx 3.0.\n"
-            "It is replaced with explicit `ident` and `children` "
-            "keyword arguments.\n"
-            "To make this warning go away and ensure usage is\n"
-            "forward compatible, replace `attrs` with `ident` and `children,\n"
-            "for example:\n\n"
-            "    >>> tree_graph(data, attrs={'id': 'foo', 'children': 'bar'})\n\n"
-            "should instead be written as\n\n"
-            "    >>> tree_graph(data, ident='foo', children='bar')\n\n"
-            "The default values of 'id' and 'children' will not change."
-        )
-        warnings.warn(msg, DeprecationWarning, stacklevel=2)
-
-        ident = attrs["id"]
-        children = attrs["children"]
 
     def add_children(parent, children_):
         for data in children_:
diff --git a/networkx/readwrite/leda.py b/networkx/readwrite/leda.py
index cde778d..ee7ad64 100644
--- a/networkx/readwrite/leda.py
+++ b/networkx/readwrite/leda.py
@@ -71,7 +71,7 @@ def parse_leda(lines):
         [
             line.rstrip("\n")
             for line in lines
-            if not (line.startswith("#") or line.startswith("\n") or line == "")
+            if not (line.startswith(("#", "\n")) or line == "")
         ]
     )
     for i in range(3):
diff --git a/networkx/readwrite/nx_shp.py b/networkx/readwrite/nx_shp.py
deleted file mode 100644
index dd48712..0000000
--- a/networkx/readwrite/nx_shp.py
+++ /dev/null
@@ -1,350 +0,0 @@
-"""
-*********
-Shapefile
-*********
-
-Generates a networkx.DiGraph from point and line shapefiles.
-
-"The Esri Shapefile or simply a shapefile is a popular geospatial vector
-data format for geographic information systems software. It is developed
-and regulated by Esri as a (mostly) open specification for data
-interoperability among Esri and other software products."
-See https://en.wikipedia.org/wiki/Shapefile for additional information.
-"""
-import warnings
-
-import networkx as nx
-
-__all__ = ["read_shp", "write_shp"]
-
-
-def read_shp(path, simplify=True, geom_attrs=True, strict=True):
-    """Generates a networkx.DiGraph from shapefiles.
-
-    .. deprecated:: 2.6
-
-       read_shp is deprecated and will be removed in NetworkX 3.0.
-       See https://networkx.org/documentation/latest/auto_examples/index.html#geospatial.
-
-    Point geometries are
-    translated into nodes, lines into edges. Coordinate tuples are used as
-    keys. Attributes are preserved, line geometries are simplified into start
-    and end coordinates. Accepts a single shapefile or directory of many
-    shapefiles.
-
-    "The Esri Shapefile or simply a shapefile is a popular geospatial vector
-    data format for geographic information systems software [1]_."
-
-    Parameters
-    ----------
-    path : file or string
-       File, directory, or filename to read.
-
-    simplify:  bool
-        If True, simplify line geometries to start and end coordinates.
-        If False, and line feature geometry has multiple segments, the
-        non-geometric attributes for that feature will be repeated for each
-        edge comprising that feature.
-
-    geom_attrs: bool
-        If True, include the Wkb, Wkt and Json geometry attributes with
-        each edge.
-
-        NOTE:  if these attributes are available, write_shp will use them
-        to write the geometry.  If nodes store the underlying coordinates for
-        the edge geometry as well (as they do when they are read via
-        this method) and they change, your geomety will be out of sync.
-
-    strict: bool
-        If True, raise NetworkXError when feature geometry is missing or
-        GeometryType is not supported.
-        If False, silently ignore missing or unsupported geometry in features.
-
-    Returns
-    -------
-    G : NetworkX graph
-
-    Raises
-    ------
-    ImportError
-       If ogr module is not available.
-
-    RuntimeError
-       If file cannot be open or read.
-
-    NetworkXError
-       If strict=True and feature is missing geometry or GeometryType is
-       not supported.
-
-    Examples
-    --------
-    >>> G = nx.read_shp("test.shp")  # doctest: +SKIP
-
-    References
-    ----------
-    .. [1] https://en.wikipedia.org/wiki/Shapefile
-    """
-    msg = (
-        "read_shp is deprecated and will be removed in 3.0."
-        "See https://networkx.org/documentation/latest/auto_examples/index.html#geospatial."
-    )
-    warnings.warn(msg, DeprecationWarning, stacklevel=2)
-    try:
-        from osgeo import ogr
-    except ImportError as err:
-        raise ImportError("read_shp requires OGR: http://www.gdal.org/") from err
-
-    if not isinstance(path, str):
-        return
-
-    net = nx.DiGraph()
-    shp = ogr.Open(path)
-    if shp is None:
-        raise RuntimeError(f"Unable to open {path}")
-    for lyr in shp:
-        fields = [x.GetName() for x in lyr.schema]
-        for f in lyr:
-            g = f.geometry()
-            if g is None:
-                if strict:
-                    raise nx.NetworkXError("Bad data: feature missing geometry")
-                else:
-                    continue
-            flddata = [f.GetField(f.GetFieldIndex(x)) for x in fields]
-            attributes = dict(zip(fields, flddata))
-            attributes["ShpName"] = lyr.GetName()
-            # Note:  Using layer level geometry type
-            if g.GetGeometryType() == ogr.wkbPoint:
-                net.add_node((g.GetPoint_2D(0)), **attributes)
-            elif g.GetGeometryType() in (ogr.wkbLineString, ogr.wkbMultiLineString):
-                for edge in edges_from_line(g, attributes, simplify, geom_attrs):
-                    e1, e2, attr = edge
-                    net.add_edge(e1, e2)
-                    net[e1][e2].update(attr)
-            else:
-                if strict:
-                    raise nx.NetworkXError(
-                        f"GeometryType {g.GetGeometryType()} not supported"
-                    )
-
-    return net
-
-
-def edges_from_line(geom, attrs, simplify=True, geom_attrs=True):
-    """
-    Generate edges for each line in geom
-    Written as a helper for read_shp
-
-    Parameters
-    ----------
-
-    geom:  ogr line geometry
-        To be converted into an edge or edges
-
-    attrs:  dict
-        Attributes to be associated with all geoms
-
-    simplify:  bool
-        If True, simplify the line as in read_shp
-
-    geom_attrs:  bool
-        If True, add geom attributes to edge as in read_shp
-
-
-    Returns
-    -------
-     edges:  generator of edges
-        each edge is a tuple of form
-        (node1_coord, node2_coord, attribute_dict)
-        suitable for expanding into a networkx Graph add_edge call
-
-    .. deprecated:: 2.6
-    """
-    msg = (
-        "edges_from_line is deprecated and will be removed in 3.0."
-        "See https://networkx.org/documentation/latest/auto_examples/index.html#geospatial."
-    )
-    warnings.warn(msg, DeprecationWarning, stacklevel=2)
-    try:
-        from osgeo import ogr
-    except ImportError as err:
-        raise ImportError(
-            "edges_from_line requires OGR: " "http://www.gdal.org/"
-        ) from err
-
-    if geom.GetGeometryType() == ogr.wkbLineString:
-        if simplify:
-            edge_attrs = attrs.copy()
-            last = geom.GetPointCount() - 1
-            if geom_attrs:
-                edge_attrs["Wkb"] = geom.ExportToWkb()
-                edge_attrs["Wkt"] = geom.ExportToWkt()
-                edge_attrs["Json"] = geom.ExportToJson()
-            yield (geom.GetPoint_2D(0), geom.GetPoint_2D(last), edge_attrs)
-        else:
-            for i in range(0, geom.GetPointCount() - 1):
-                pt1 = geom.GetPoint_2D(i)
-                pt2 = geom.GetPoint_2D(i + 1)
-                edge_attrs = attrs.copy()
-                if geom_attrs:
-                    segment = ogr.Geometry(ogr.wkbLineString)
-                    segment.AddPoint_2D(pt1[0], pt1[1])
-                    segment.AddPoint_2D(pt2[0], pt2[1])
-                    edge_attrs["Wkb"] = segment.ExportToWkb()
-                    edge_attrs["Wkt"] = segment.ExportToWkt()
-                    edge_attrs["Json"] = segment.ExportToJson()
-                    del segment
-                yield (pt1, pt2, edge_attrs)
-
-    elif geom.GetGeometryType() == ogr.wkbMultiLineString:
-        for i in range(geom.GetGeometryCount()):
-            geom_i = geom.GetGeometryRef(i)
-            yield from edges_from_line(geom_i, attrs, simplify, geom_attrs)
-
-
-def write_shp(G, outdir):
-    """Writes a networkx.DiGraph to two shapefiles, edges and nodes.
-
-    .. deprecated:: 2.6
-
-       write_shp is deprecated and will be removed in 3.0.
-       See https://networkx.org/documentation/latest/auto_examples/index.html#geospatial.
-
-    Nodes and edges are expected to have a Well Known Binary (Wkb) or
-    Well Known Text (Wkt) key in order to generate geometries. Also
-    acceptable are nodes with a numeric tuple key (x,y).
-
-    "The Esri Shapefile or simply a shapefile is a popular geospatial vector
-    data format for geographic information systems software [1]_."
-
-    Parameters
-    ----------
-    G : NetworkX graph
-        Directed graph
-    outdir : directory path
-       Output directory for the two shapefiles.
-
-    Returns
-    -------
-    None
-
-    Examples
-    --------
-    nx.write_shp(digraph, '/shapefiles') # doctest +SKIP
-
-    References
-    ----------
-    .. [1] https://en.wikipedia.org/wiki/Shapefile
-    """
-    msg = (
-        "write_shp is deprecated and will be removed in 3.0."
-        "See https://networkx.org/documentation/latest/auto_examples/index.html#geospatial."
-    )
-    warnings.warn(msg, DeprecationWarning, stacklevel=2)
-    try:
-        from osgeo import ogr
-    except ImportError as err:
-        raise ImportError("write_shp requires OGR: http://www.gdal.org/") from err
-    # easier to debug in python if ogr throws exceptions
-    ogr.UseExceptions()
-
-    def netgeometry(key, data):
-        if "Wkb" in data:
-            geom = ogr.CreateGeometryFromWkb(data["Wkb"])
-        elif "Wkt" in data:
-            geom = ogr.CreateGeometryFromWkt(data["Wkt"])
-        elif type(key[0]).__name__ == "tuple":  # edge keys are packed tuples
-            geom = ogr.Geometry(ogr.wkbLineString)
-            _from, _to = key[0], key[1]
-            try:
-                geom.SetPoint(0, *_from)
-                geom.SetPoint(1, *_to)
-            except TypeError:
-                # assume user used tuple of int and choked ogr
-                _ffrom = [float(x) for x in _from]
-                _fto = [float(x) for x in _to]
-                geom.SetPoint(0, *_ffrom)
-                geom.SetPoint(1, *_fto)
-        else:
-            geom = ogr.Geometry(ogr.wkbPoint)
-            try:
-                geom.SetPoint(0, *key)
-            except TypeError:
-                # assume user used tuple of int and choked ogr
-                fkey = [float(x) for x in key]
-                geom.SetPoint(0, *fkey)
-
-        return geom
-
-    # Create_feature with new optional attributes arg (should be dict type)
-    def create_feature(geometry, lyr, attributes=None):
-        feature = ogr.Feature(lyr.GetLayerDefn())
-        feature.SetGeometry(g)
-        if attributes is not None:
-            # Loop through attributes, assigning data to each field
-            for field, data in attributes.items():
-                feature.SetField(field, data)
-        lyr.CreateFeature(feature)
-        feature.Destroy()
-
-    # Conversion dict between python and ogr types
-    OGRTypes = {int: ogr.OFTInteger, str: ogr.OFTString, float: ogr.OFTReal}
-
-    # Check/add fields from attribute data to Shapefile layers
-    def add_fields_to_layer(key, value, fields, layer):
-        # Field not in previous edges so add to dict
-        if type(value) in OGRTypes:
-            fields[key] = OGRTypes[type(value)]
-        else:
-            # Data type not supported, default to string (char 80)
-            fields[key] = ogr.OFTString
-        # Create the new field
-        newfield = ogr.FieldDefn(key, fields[key])
-        layer.CreateField(newfield)
-
-    drv = ogr.GetDriverByName("ESRI Shapefile")
-    shpdir = drv.CreateDataSource(outdir)
-    # delete pre-existing output first otherwise ogr chokes
-    try:
-        shpdir.DeleteLayer("nodes")
-    except:
-        pass
-    nodes = shpdir.CreateLayer("nodes", None, ogr.wkbPoint)
-
-    # Storage for node field names and their data types
-    node_fields = {}
-
-    def create_attributes(data, fields, layer):
-        attributes = {}  # storage for attribute data (indexed by field names)
-        for key, value in data.items():
-            # Reject spatial data not required for attribute table
-            if key != "Json" and key != "Wkt" and key != "Wkb" and key != "ShpName":
-                # Check/add field and data type to fields dict
-                if key not in fields:
-                    add_fields_to_layer(key, value, fields, layer)
-                # Store the data from new field to dict for CreateLayer()
-                attributes[key] = value
-        return attributes, layer
-
-    for n in G:
-        data = G.nodes[n]
-        g = netgeometry(n, data)
-        attributes, nodes = create_attributes(data, node_fields, nodes)
-        create_feature(g, nodes, attributes)
-
-    try:
-        shpdir.DeleteLayer("edges")
-    except:
-        pass
-    edges = shpdir.CreateLayer("edges", None, ogr.wkbLineString)
-
-    # New edge attribute write support merged into edge loop
-    edge_fields = {}  # storage for field names and their data types
-
-    for edge in G.edges(data=True):
-        data = G.get_edge_data(*edge)
-        g = netgeometry(edge, data)
-        attributes, edges = create_attributes(edge[2], edge_fields, edges)
-        create_feature(g, edges, attributes)
-
-    nodes, edges = None, None
diff --git a/networkx/readwrite/nx_yaml.py b/networkx/readwrite/nx_yaml.py
deleted file mode 100644
index b8ed9e5..0000000
--- a/networkx/readwrite/nx_yaml.py
+++ /dev/null
@@ -1,59 +0,0 @@
-"""
-****
-YAML
-****
-Read and write NetworkX graphs in YAML format.
-
-"YAML is a data serialization format designed for human readability
-and interaction with scripting languages."
-See http://www.yaml.org for documentation.
-
-Format
-------
-http://pyyaml.org/wiki/PyYAML
-
-"""
-
-
-def __dir__():
-    return ["read_yaml", "write_yaml"]
-
-
-def __getattr__(name):
-    """Remove functions and provide informative error messages."""
-    if name == "nx_yaml":
-        raise ImportError(
-            "\nThe nx_yaml module has been removed from NetworkX.\n"
-            "Please use the `yaml` package directly for working with yaml data.\n"
-            "For example, a networkx.Graph `G` can be written to and loaded\n"
-            "from a yaml file with:\n\n"
-            "    import yaml\n\n"
-            "    with open('path_to_yaml_file', 'w') as fh:\n"
-            "        yaml.dump(G, fh)\n"
-            "    with open('path_to_yaml_file', 'r') as fh:\n"
-            "        G = yaml.load(fh, Loader=yaml.Loader)\n\n"
-            "Note that yaml.Loader is considered insecure - see the pyyaml\n"
-            "documentation for further details.\n\n"
-            "This message will be removed in NetworkX 3.0."
-        )
-    if name == "read_yaml":
-        raise ImportError(
-            "\nread_yaml has been removed from NetworkX, please use `yaml`\n"
-            "directly:\n\n"
-            "    import yaml\n\n"
-            "    with open('path', 'r') as fh:\n"
-            "        yaml.load(fh, Loader=yaml.Loader)\n\n"
-            "Note that yaml.Loader is considered insecure - see the pyyaml\n"
-            "documentation for further details.\n\n"
-            "This message will be removed in NetworkX 3.0."
-        )
-    if name == "write_yaml":
-        raise ImportError(
-            "\nwrite_yaml has been removed from NetworkX, please use `yaml`\n"
-            "directly:\n\n"
-            "    import yaml\n\n"
-            "    with open('path_for_yaml_output', 'w') as fh:\n"
-            "        yaml.dump(G_to_be_yaml, fh)\n\n"
-            "This message will be removed in NetworkX 3.0."
-        )
-    raise AttributeError(f"module {__name__} has no attribute {name}")
diff --git a/networkx/readwrite/sparse6.py b/networkx/readwrite/sparse6.py
index beae8ac..3b9e814 100644
--- a/networkx/readwrite/sparse6.py
+++ b/networkx/readwrite/sparse6.py
@@ -62,7 +62,7 @@ def _generate_sparse6_bytes(G, nodes, header):
     edges = sorted((max(u, v), min(u, v)) for u, v in G.edges())
     bits = []
     curv = 0
-    for (v, u) in edges:
+    for v, u in edges:
         if v == curv:  # current vertex edge
             bits.append(0)
             bits.extend(enc(u))
diff --git a/networkx/readwrite/tests/test_edgelist.py b/networkx/readwrite/tests/test_edgelist.py
index abd1d3c..18b726f 100644
--- a/networkx/readwrite/tests/test_edgelist.py
+++ b/networkx/readwrite/tests/test_edgelist.py
@@ -183,7 +183,7 @@ class TestEdgelist:
 
     def test_write_edgelist_1(self):
         fh = io.BytesIO()
-        G = nx.OrderedGraph()
+        G = nx.Graph()
         G.add_edges_from([(1, 2), (2, 3)])
         nx.write_edgelist(G, fh, data=False)
         fh.seek(0)
@@ -191,7 +191,7 @@ class TestEdgelist:
 
     def test_write_edgelist_2(self):
         fh = io.BytesIO()
-        G = nx.OrderedGraph()
+        G = nx.Graph()
         G.add_edges_from([(1, 2), (2, 3)])
         nx.write_edgelist(G, fh, data=True)
         fh.seek(0)
@@ -199,7 +199,7 @@ class TestEdgelist:
 
     def test_write_edgelist_3(self):
         fh = io.BytesIO()
-        G = nx.OrderedGraph()
+        G = nx.Graph()
         G.add_edge(1, 2, weight=2.0)
         G.add_edge(2, 3, weight=3.0)
         nx.write_edgelist(G, fh, data=True)
@@ -208,7 +208,7 @@ class TestEdgelist:
 
     def test_write_edgelist_4(self):
         fh = io.BytesIO()
-        G = nx.OrderedGraph()
+        G = nx.Graph()
         G.add_edge(1, 2, weight=2.0)
         G.add_edge(2, 3, weight=3.0)
         nx.write_edgelist(G, fh, data=[("weight")])
diff --git a/networkx/readwrite/tests/test_getattr_nxyaml_removal.py b/networkx/readwrite/tests/test_getattr_nxyaml_removal.py
deleted file mode 100644
index 83fc059..0000000
--- a/networkx/readwrite/tests/test_getattr_nxyaml_removal.py
+++ /dev/null
@@ -1,38 +0,0 @@
-"""Test that informative exception messages are raised when attempting to
-access nx_yaml."""
-
-import pytest
-
-_msg_stub = "\n.* has been removed from NetworkX"
-
-
-def test_access_from_module():
-    with pytest.raises(ImportError, match=_msg_stub):
-        from networkx.readwrite.nx_yaml import read_yaml
-    with pytest.raises(ImportError, match=_msg_stub):
-        from networkx.readwrite.nx_yaml import write_yaml
-
-
-def test_access_from_nx_namespace():
-    import networkx as nx
-
-    with pytest.raises(ImportError, match=_msg_stub):
-        nx.read_yaml
-    with pytest.raises(ImportError, match=_msg_stub):
-        nx.write_yaml
-
-
-def test_access_from_readwrite_pkg():
-    from networkx import readwrite
-
-    with pytest.raises(ImportError, match=_msg_stub):
-        readwrite.read_yaml
-    with pytest.raises(ImportError, match=_msg_stub):
-        readwrite.write_yaml
-
-
-def test_accessing_nx_yaml():
-    import networkx as nx
-
-    with pytest.raises(ImportError, match=_msg_stub):
-        nx.nx_yaml
diff --git a/networkx/readwrite/tests/test_gexf.py b/networkx/readwrite/tests/test_gexf.py
index 7166c09..6ff14c9 100644
--- a/networkx/readwrite/tests/test_gexf.py
+++ b/networkx/readwrite/tests/test_gexf.py
@@ -276,7 +276,7 @@ org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.gexf.net/\
 
     def test_write_with_node_attributes(self):
         # Addresses #673.
-        G = nx.OrderedGraph()
+        G = nx.Graph()
         G.add_edges_from([(0, 1), (1, 2), (2, 3)])
         for i in range(4):
             G.nodes[i]["id"] = i
@@ -462,7 +462,7 @@ gexf.net/1.2draft http://www.gexf.net/1.2draft/gexf.xsd" version="1.2">
         G = nx.MultiGraph()
         G.add_node(0, label="1", color="green")
         G.add_node(1, label="2", color="green")
-        G.add_edge(0, 1, id="0", wight=3, type="undirected", start=0, end=1)
+        G.add_edge(0, 1, id="0", weight=3, type="undirected", start=0, end=1)
         G.add_edge(0, 1, id="1", label="foo", start=0, end=1)
         G.add_edge(0, 1)
         fh = io.BytesIO()
@@ -491,6 +491,16 @@ gexf.net/1.2draft http://www.gexf.net/1.2draft/gexf.xsd" version="1.2">
             sorted(e) for e in H.edges()
         )
 
+        # Test missing alpha value for version >draft1.1 - set default alpha value
+        # to 1.0 instead of `None` when writing for better general compatibility
+        fh = io.BytesIO()
+        # G.nodes[0]["viz"]["color"] does not have an alpha value explicitly defined
+        # so the default is used instead
+        nx.write_gexf(G, fh, version="1.2draft")
+        fh.seek(0)
+        H = nx.read_gexf(fh, node_type=int)
+        assert H.nodes[0]["viz"]["color"]["a"] == 1.0
+
         # Second graph for the other branch
         G = nx.Graph()
         G.add_node(0, label="1", color="green")
diff --git a/networkx/readwrite/tests/test_gml.py b/networkx/readwrite/tests/test_gml.py
index 19fb7ed..0d0bebf 100644
--- a/networkx/readwrite/tests/test_gml.py
+++ b/networkx/readwrite/tests/test_gml.py
@@ -146,13 +146,13 @@ graph   [
     def test_parse_gml(self):
         G = nx.parse_gml(self.simple_data, label="label")
         assert sorted(G.nodes()) == ["Node 1", "Node 2", "Node 3"]
-        assert [e for e in sorted(G.edges())] == [
+        assert sorted(G.edges()) == [
             ("Node 1", "Node 2"),
             ("Node 2", "Node 3"),
             ("Node 3", "Node 1"),
         ]
 
-        assert [e for e in sorted(G.edges(data=True))] == [
+        assert sorted(G.edges(data=True)) == [
             (
                 "Node 1",
                 "Node 2",
@@ -215,7 +215,7 @@ graph
     def test_tuplelabels(self):
         # https://github.com/networkx/networkx/pull/1048
         # Writing tuple labels to GML failed.
-        G = nx.OrderedGraph()
+        G = nx.Graph()
         G.add_edge((0, 1), (1, 0))
         data = "\n".join(nx.generate_gml(G, stringizer=literal_stringizer))
         answer = """graph [
@@ -446,14 +446,14 @@ graph
         G = nx.Graph()
         G.name = data
         G.graph["data"] = data
-        G.add_node(0, int=-1, data=dict(data=data))
+        G.add_node(0, int=-1, data={"data": data})
         G.add_edge(0, 0, float=-2.5, data=data)
         gml = "\n".join(nx.generate_gml(G, stringizer=literal_stringizer))
         G = nx.parse_gml(gml, destringizer=literal_destringizer)
         assert data == G.name
         assert {"name": data, "data": data} == G.graph
-        assert list(G.nodes(data=True)) == [(0, dict(int=-1, data=dict(data=data)))]
-        assert list(G.edges(data=True)) == [(0, 0, dict(float=-2.5, data=data))]
+        assert list(G.nodes(data=True)) == [(0, {"int": -1, "data": {"data": data}})]
+        assert list(G.edges(data=True)) == [(0, 0, {"float": -2.5, "data": data})]
         G = nx.Graph()
         G.graph["data"] = "frozenset([1, 2, 3])"
         G = nx.parse_gml(nx.generate_gml(G), destringizer=literal_eval)
@@ -544,7 +544,7 @@ graph
             "directed 1 multigraph 1 ]"
         )
 
-        # Tests for string convertable alphanumeric id and label values
+        # Tests for string convertible alphanumeric id and label values
         nx.parse_gml("graph [edge [ source a target a ] node [ id a label b ] ]")
         nx.parse_gml(
             "graph [ node [ id n42 label 0 ] node [ id x43 label 1 ]"
@@ -571,10 +571,6 @@ graph
         G = nx.Graph()
         G.graph["data"] = frozenset([1, 2, 3])
         assert_generate_error(G, stringizer=literal_stringizer)
-        G = nx.Graph()
-        G.graph["data"] = []
-        assert_generate_error(G)
-        assert_generate_error(G, stringizer=len)
 
     def test_label_kwarg(self):
         G = nx.parse_gml(self.simple_data, label="id")
@@ -712,3 +708,23 @@ class TestPropertyLists:
             f.seek(0)
             graph = nx.read_gml(f)
         assert graph.nodes(data=True)["n1"] == {"properties": ["element"]}
+
+
+@pytest.mark.parametrize("coll", ([], ()))
+def test_stringize_empty_list_tuple(coll):
+    G = nx.path_graph(2)
+    G.nodes[0]["test"] = coll  # test serializing an empty collection
+    f = io.BytesIO()
+    nx.write_gml(G, f)  # Smoke test - should not raise
+    f.seek(0)
+    H = nx.read_gml(f)
+    assert H.nodes["0"]["test"] == coll  # Check empty list round-trips properly
+    # Check full round-tripping. Note that nodes are loaded as strings by
+    # default, so there needs to be some remapping prior to comparison
+    H = nx.relabel_nodes(H, {"0": 0, "1": 1})
+    assert nx.utils.graphs_equal(G, H)
+    # Same as above, but use destringizer for node remapping. Should have no
+    # effect on node attr
+    f.seek(0)
+    H = nx.read_gml(f, destringizer=int)
+    assert nx.utils.graphs_equal(G, H)
diff --git a/networkx/readwrite/tests/test_gpickle.py b/networkx/readwrite/tests/test_gpickle.py
deleted file mode 100644
index 3ef83b5..0000000
--- a/networkx/readwrite/tests/test_gpickle.py
+++ /dev/null
@@ -1,75 +0,0 @@
-import os
-import tempfile
-
-import networkx as nx
-from networkx.utils import edges_equal, graphs_equal, nodes_equal
-
-
-class TestGpickle:
-    @classmethod
-    def setup_class(cls):
-        G = nx.Graph(name="test")
-        e = [("a", "b"), ("b", "c"), ("c", "d"), ("d", "e"), ("e", "f"), ("a", "f")]
-        G.add_edges_from(e, width=10)
-        G.add_node("g", color="green")
-        G.graph["number"] = 1
-        DG = nx.DiGraph(G)
-        MG = nx.MultiGraph(G)
-        MG.add_edge("a", "a")
-        MDG = nx.MultiDiGraph(G)
-        MDG.add_edge("a", "a")
-        fG = G.copy()
-        fDG = DG.copy()
-        fMG = MG.copy()
-        fMDG = MDG.copy()
-        nx.freeze(fG)
-        nx.freeze(fDG)
-        nx.freeze(fMG)
-        nx.freeze(fMDG)
-        cls.G = G
-        cls.DG = DG
-        cls.MG = MG
-        cls.MDG = MDG
-        cls.fG = fG
-        cls.fDG = fDG
-        cls.fMG = fMG
-        cls.fMDG = fMDG
-
-    def test_gpickle(self):
-        for G in [
-            self.G,
-            self.DG,
-            self.MG,
-            self.MDG,
-            self.fG,
-            self.fDG,
-            self.fMG,
-            self.fMDG,
-        ]:
-            (fd, fname) = tempfile.mkstemp()
-            nx.write_gpickle(G, fname)
-            Gin = nx.read_gpickle(fname)
-            assert nodes_equal(list(G.nodes(data=True)), list(Gin.nodes(data=True)))
-            assert edges_equal(list(G.edges(data=True)), list(Gin.edges(data=True)))
-            assert graphs_equal(G, Gin)
-            os.close(fd)
-            os.unlink(fname)
-
-    def test_protocol(self):
-        for G in [
-            self.G,
-            self.DG,
-            self.MG,
-            self.MDG,
-            self.fG,
-            self.fDG,
-            self.fMG,
-            self.fMDG,
-        ]:
-            with tempfile.TemporaryFile() as f:
-                nx.write_gpickle(G, f, 0)
-                f.seek(0)
-                Gin = nx.read_gpickle(f)
-                assert nodes_equal(list(G.nodes(data=True)), list(Gin.nodes(data=True)))
-                assert edges_equal(list(G.edges(data=True)), list(Gin.edges(data=True)))
-                assert graphs_equal(G, Gin)
diff --git a/networkx/readwrite/tests/test_graphml.py b/networkx/readwrite/tests/test_graphml.py
index cc37132..a215a37 100644
--- a/networkx/readwrite/tests/test_graphml.py
+++ b/networkx/readwrite/tests/test_graphml.py
@@ -522,13 +522,13 @@ class TestReadGraphML(BaseGraphML):
                 # edges with no data, no keys:
                 (1, 2),
                 # edges with only data:
-                (1, 2, dict(key="data_key1")),
-                (1, 2, dict(id="data_id2")),
-                (1, 2, dict(key="data_key3", id="data_id3")),
+                (1, 2, {"key": "data_key1"}),
+                (1, 2, {"id": "data_id2"}),
+                (1, 2, {"key": "data_key3", "id": "data_id3"}),
                 # edges with both data and keys:
-                (1, 2, 103, dict(key="data_key4")),
-                (1, 2, 104, dict(id="data_id5")),
-                (1, 2, 105, dict(key="data_key6", id="data_id7")),
+                (1, 2, 103, {"key": "data_key4"}),
+                (1, 2, 104, {"id": "data_id5"}),
+                (1, 2, 105, {"key": "data_key6", "id": "data_id7"}),
             ]
         )
         fh = io.BytesIO()
@@ -1352,7 +1352,7 @@ class TestWriteGraphML(BaseGraphML):
                 ".//{http://graphml.graphdrawing.org/xmlns}edge"
             )
         ]
-        # verify edge id value is equal to sepcified attribute value
+        # verify edge id value is equal to specified attribute value
         assert sorted(edge_ids) == sorted(edge_attributes.values())
 
         # check graphml generated from generate_graphml()
@@ -1404,7 +1404,7 @@ class TestWriteGraphML(BaseGraphML):
                 ".//{http://graphml.graphdrawing.org/xmlns}edge"
             )
         ]
-        # verify edge id value is equal to sepcified attribute value
+        # verify edge id value is equal to specified attribute value
         assert sorted(edge_ids) == sorted(edge_attributes.values())
 
         # check graphml generated from generate_graphml()
@@ -1482,10 +1482,10 @@ class TestWriteGraphML(BaseGraphML):
         os.unlink(fname)
 
     def test_unicode_escape(self):
-        # test for handling json escaped stings in python 2 Issue #1880
+        # test for handling json escaped strings in python 2 Issue #1880
         import json
 
-        a = dict(a='{"a": "123"}')  # an object with many chars to escape
+        a = {"a": '{"a": "123"}'}  # an object with many chars to escape
         sa = json.dumps(a)
         G = nx.Graph()
         G.graph["test"] = sa
diff --git a/networkx/readwrite/tests/test_p2g.py b/networkx/readwrite/tests/test_p2g.py
index 8280870..e4c50de 100644
--- a/networkx/readwrite/tests/test_p2g.py
+++ b/networkx/readwrite/tests/test_p2g.py
@@ -43,7 +43,7 @@ c
 
 """
         fh = io.BytesIO()
-        G = nx.OrderedDiGraph()
+        G = nx.DiGraph()
         G.name = "foo"
         G.add_edges_from([(1, 2), (2, 3)])
         write_p2g(G, fh)
diff --git a/networkx/readwrite/tests/test_shp.py b/networkx/readwrite/tests/test_shp.py
deleted file mode 100644
index 7ada2e8..0000000
--- a/networkx/readwrite/tests/test_shp.py
+++ /dev/null
@@ -1,288 +0,0 @@
-"""Unit tests for shp.
-"""
-
-import os
-import tempfile
-
-import pytest
-
-ogr = pytest.importorskip("osgeo.ogr")
-
-import networkx as nx
-
-
-class TestShp:
-    def setup_method(self):
-        def createlayer(driver, layerType=ogr.wkbLineString):
-            lyr = driver.CreateLayer("edges", None, layerType)
-            namedef = ogr.FieldDefn("Name", ogr.OFTString)
-            namedef.SetWidth(32)
-            lyr.CreateField(namedef)
-            return lyr
-
-        drv = ogr.GetDriverByName("ESRI Shapefile")
-
-        testdir = os.path.join(tempfile.gettempdir(), "shpdir")
-        shppath = os.path.join(tempfile.gettempdir(), "tmpshp.shp")
-        multi_shppath = os.path.join(tempfile.gettempdir(), "tmp_mshp.shp")
-
-        self.deletetmp(drv, testdir, shppath, multi_shppath)
-        os.mkdir(testdir)
-
-        self.names = ["a", "b", "c", "c"]  # edgenames
-        self.paths = (
-            [(1.0, 1.0), (2.0, 2.0)],
-            [(2.0, 2.0), (3.0, 3.0)],
-            [(0.9, 0.9), (4.0, 0.9), (4.0, 2.0)],
-        )
-
-        self.simplified_names = ["a", "b", "c"]  # edgenames
-        self.simplified_paths = (
-            [(1.0, 1.0), (2.0, 2.0)],
-            [(2.0, 2.0), (3.0, 3.0)],
-            [(0.9, 0.9), (4.0, 2.0)],
-        )
-
-        self.multi_names = ["a", "a", "a", "a"]  # edgenames
-
-        shp = drv.CreateDataSource(shppath)
-        lyr = createlayer(shp)
-
-        for path, name in zip(self.paths, self.names):
-            feat = ogr.Feature(lyr.GetLayerDefn())
-            g = ogr.Geometry(ogr.wkbLineString)
-            for p in path:
-                g.AddPoint_2D(*p)
-            feat.SetGeometry(g)
-            feat.SetField("Name", name)
-            lyr.CreateFeature(feat)
-
-        # create single record multiline shapefile for testing
-        multi_shp = drv.CreateDataSource(multi_shppath)
-        multi_lyr = createlayer(multi_shp, ogr.wkbMultiLineString)
-
-        multi_g = ogr.Geometry(ogr.wkbMultiLineString)
-        for path in self.paths:
-
-            g = ogr.Geometry(ogr.wkbLineString)
-            for p in path:
-                g.AddPoint_2D(*p)
-
-            multi_g.AddGeometry(g)
-
-        multi_feat = ogr.Feature(multi_lyr.GetLayerDefn())
-        multi_feat.SetGeometry(multi_g)
-        multi_feat.SetField("Name", "a")
-        multi_lyr.CreateFeature(multi_feat)
-
-        self.shppath = shppath
-        self.multi_shppath = multi_shppath
-        self.testdir = testdir
-        self.drv = drv
-
-    def deletetmp(self, drv, *paths):
-        for p in paths:
-            if os.path.exists(p):
-                drv.DeleteDataSource(p)
-
-    def testload(self):
-        def compare_graph_paths_names(g, paths, names):
-            expected = nx.DiGraph()
-            for p in paths:
-                nx.add_path(expected, p)
-            assert sorted(expected.nodes) == sorted(g.nodes)
-            assert sorted(expected.edges()) == sorted(g.edges())
-            g_names = [g.get_edge_data(s, e)["Name"] for s, e in g.edges()]
-            assert names == sorted(g_names)
-
-        # simplified
-        G = nx.read_shp(self.shppath)
-        compare_graph_paths_names(G, self.simplified_paths, self.simplified_names)
-
-        # unsimplified
-        G = nx.read_shp(self.shppath, simplify=False)
-        compare_graph_paths_names(G, self.paths, self.names)
-
-        # multiline unsimplified
-        G = nx.read_shp(self.multi_shppath, simplify=False)
-        compare_graph_paths_names(G, self.paths, self.multi_names)
-
-    def checkgeom(self, lyr, expected):
-        feature = lyr.GetNextFeature()
-        actualwkt = []
-        while feature:
-            actualwkt.append(feature.GetGeometryRef().ExportToWkt())
-            feature = lyr.GetNextFeature()
-        assert sorted(expected) == sorted(actualwkt)
-
-    def test_geometryexport(self):
-        expectedpoints_simple = (
-            "POINT (1 1)",
-            "POINT (2 2)",
-            "POINT (3 3)",
-            "POINT (0.9 0.9)",
-            "POINT (4 2)",
-        )
-        expectedlines_simple = (
-            "LINESTRING (1 1,2 2)",
-            "LINESTRING (2 2,3 3)",
-            "LINESTRING (0.9 0.9,4.0 0.9,4 2)",
-        )
-        expectedpoints = (
-            "POINT (1 1)",
-            "POINT (2 2)",
-            "POINT (3 3)",
-            "POINT (0.9 0.9)",
-            "POINT (4.0 0.9)",
-            "POINT (4 2)",
-        )
-        expectedlines = (
-            "LINESTRING (1 1,2 2)",
-            "LINESTRING (2 2,3 3)",
-            "LINESTRING (0.9 0.9,4.0 0.9)",
-            "LINESTRING (4.0 0.9,4 2)",
-        )
-
-        tpath = os.path.join(tempfile.gettempdir(), "shpdir")
-        G = nx.read_shp(self.shppath)
-        nx.write_shp(G, tpath)
-        shpdir = ogr.Open(tpath)
-        self.checkgeom(shpdir.GetLayerByName("nodes"), expectedpoints_simple)
-        self.checkgeom(shpdir.GetLayerByName("edges"), expectedlines_simple)
-
-        # Test unsimplified
-        # Nodes should have additional point,
-        # edges should be 'flattened'
-        G = nx.read_shp(self.shppath, simplify=False)
-        nx.write_shp(G, tpath)
-        shpdir = ogr.Open(tpath)
-        self.checkgeom(shpdir.GetLayerByName("nodes"), expectedpoints)
-        self.checkgeom(shpdir.GetLayerByName("edges"), expectedlines)
-
-    def test_attributeexport(self):
-        def testattributes(lyr, graph):
-            feature = lyr.GetNextFeature()
-            while feature:
-                coords = []
-                ref = feature.GetGeometryRef()
-                last = ref.GetPointCount() - 1
-                edge_nodes = (ref.GetPoint_2D(0), ref.GetPoint_2D(last))
-                name = feature.GetFieldAsString("Name")
-                assert graph.get_edge_data(*edge_nodes)["Name"] == name
-                feature = lyr.GetNextFeature()
-
-        tpath = os.path.join(tempfile.gettempdir(), "shpdir")
-
-        G = nx.read_shp(self.shppath)
-        nx.write_shp(G, tpath)
-        shpdir = ogr.Open(tpath)
-        edges = shpdir.GetLayerByName("edges")
-        testattributes(edges, G)
-
-    # Test export of node attributes in nx.write_shp (#2778)
-    def test_nodeattributeexport(self):
-        tpath = os.path.join(tempfile.gettempdir(), "shpdir")
-
-        G = nx.DiGraph()
-        A = (0, 0)
-        B = (1, 1)
-        C = (2, 2)
-        G.add_edge(A, B)
-        G.add_edge(A, C)
-        label = "node_label"
-        for n, d in G.nodes(data=True):
-            d["label"] = label
-        nx.write_shp(G, tpath)
-
-        H = nx.read_shp(tpath)
-        for n, d in H.nodes(data=True):
-            assert d["label"] == label
-
-    def test_wkt_export(self):
-        G = nx.DiGraph()
-        tpath = os.path.join(tempfile.gettempdir(), "shpdir")
-        points = ("POINT (0.9 0.9)", "POINT (4 2)")
-        line = ("LINESTRING (0.9 0.9,4 2)",)
-        G.add_node(1, Wkt=points[0])
-        G.add_node(2, Wkt=points[1])
-        G.add_edge(1, 2, Wkt=line[0])
-        try:
-            nx.write_shp(G, tpath)
-        except Exception as err:
-            assert False, err
-        shpdir = ogr.Open(tpath)
-        self.checkgeom(shpdir.GetLayerByName("nodes"), points)
-        self.checkgeom(shpdir.GetLayerByName("edges"), line)
-
-    def teardown_method(self):
-        self.deletetmp(self.drv, self.testdir, self.shppath)
-
-
-def test_read_shp_nofile():
-    with pytest.raises(RuntimeError):
-        G = nx.read_shp("hopefully_this_file_will_not_be_available")
-
-
-class TestMissingGeometry:
-    def setup_method(self):
-        self.setup_path()
-        self.delete_shapedir()
-        self.create_shapedir()
-
-    def teardown_method(self):
-        self.delete_shapedir()
-
-    def setup_path(self):
-        self.path = os.path.join(tempfile.gettempdir(), "missing_geometry")
-
-    def create_shapedir(self):
-        drv = ogr.GetDriverByName("ESRI Shapefile")
-        shp = drv.CreateDataSource(self.path)
-        lyr = shp.CreateLayer("nodes", None, ogr.wkbPoint)
-        feature = ogr.Feature(lyr.GetLayerDefn())
-        feature.SetGeometry(None)
-        lyr.CreateFeature(feature)
-        feature.Destroy()
-
-    def delete_shapedir(self):
-        drv = ogr.GetDriverByName("ESRI Shapefile")
-        if os.path.exists(self.path):
-            drv.DeleteDataSource(self.path)
-
-    def test_missing_geometry(self):
-        with pytest.raises(nx.NetworkXError):
-            G = nx.read_shp(self.path)
-
-
-class TestMissingAttrWrite:
-    def setup_method(self):
-        self.setup_path()
-        self.delete_shapedir()
-
-    def teardown_method(self):
-        self.delete_shapedir()
-
-    def setup_path(self):
-        self.path = os.path.join(tempfile.gettempdir(), "missing_attributes")
-
-    def delete_shapedir(self):
-        drv = ogr.GetDriverByName("ESRI Shapefile")
-        if os.path.exists(self.path):
-            drv.DeleteDataSource(self.path)
-
-    def test_missing_attributes(self):
-        G = nx.DiGraph()
-        A = (0, 0)
-        B = (1, 1)
-        C = (2, 2)
-        G.add_edge(A, B, foo=100)
-        G.add_edge(A, C)
-
-        nx.write_shp(G, self.path)
-        H = nx.read_shp(self.path)
-
-        for u, v, d in H.edges(data=True):
-            if u == A and v == B:
-                assert d["foo"] == 100
-            if u == A and v == C:
-                assert d["foo"] is None
diff --git a/networkx/readwrite/tests/test_text.py b/networkx/readwrite/tests/test_text.py
index 4cd618c..539927e 100644
--- a/networkx/readwrite/tests/test_text.py
+++ b/networkx/readwrite/tests/test_text.py
@@ -1,3 +1,5 @@
+import random
+from itertools import product
 from textwrap import dedent
 
 import pytest
@@ -5,7 +7,7 @@ import pytest
 import networkx as nx
 
 
-def test_directed_tree_str():
+def test_forest_str_directed():
     # Create a directed forest with labels
     graph = nx.balanced_tree(r=2, h=2, create_using=nx.DiGraph)
     for node in graph.nodes:
@@ -15,8 +17,8 @@ def test_directed_tree_str():
         """
         ╙── 0
             ├─╼ 1
-            │   ├─╼ 3
-            │   └─╼ 4
+            │   ├─╼ 3
+            │   └─╼ 4
             └─╼ 2
                 ├─╼ 5
                 └─╼ 6
@@ -27,8 +29,8 @@ def test_directed_tree_str():
         """
         ╙── node_a
             ├─╼ node_b
-            │   ├─╼ node_d
-            │   └─╼ node_e
+            │   ├─╼ node_d
+            │   └─╼ node_e
             └─╼ node_c
                 ├─╼ node_f
                 └─╼ node_g
@@ -58,12 +60,44 @@ def test_directed_tree_str():
     assert ret is None
 
 
-def test_empty_graph():
-    assert nx.forest_str(nx.DiGraph()) == "╙"
-    assert nx.forest_str(nx.Graph()) == "╙"
+def test_write_network_text_empty_graph():
+    def _graph_str(g, **kw):
+        printbuf = []
+        nx.write_network_text(g, printbuf.append, end="", **kw)
+        return "\n".join(printbuf)
 
+    assert _graph_str(nx.DiGraph()) == "╙"
+    assert _graph_str(nx.Graph()) == "╙"
+    assert _graph_str(nx.DiGraph(), ascii_only=True) == "+"
+    assert _graph_str(nx.Graph(), ascii_only=True) == "+"
 
-def test_directed_multi_tree_forest():
+
+def test_write_network_text_within_forest_glyph():
+    g = nx.DiGraph()
+    g.add_nodes_from([1, 2, 3, 4])
+    g.add_edge(2, 4)
+    lines = []
+    write = lines.append
+    nx.write_network_text(g, path=write, end="")
+    nx.write_network_text(g, path=write, ascii_only=True, end="")
+    text = "\n".join(lines)
+    print(text)
+    target = dedent(
+        """
+        ╟── 1
+        ╟── 2
+        ╎   └─╼ 4
+        ╙── 3
+        +-- 1
+        +-- 2
+        :   L-> 4
+        +-- 3
+        """
+    ).strip()
+    assert text == target
+
+
+def test_forest_str_directed_multi_tree():
     tree1 = nx.balanced_tree(r=2, h=2, create_using=nx.DiGraph)
     tree2 = nx.balanced_tree(r=2, h=2, create_using=nx.DiGraph)
     forest = nx.disjoint_union_all([tree1, tree2])
@@ -73,16 +107,16 @@ def test_directed_multi_tree_forest():
     target = dedent(
         """
         ╟── 0
-        ╎   ├─╼ 1
-        ╎   │   ├─╼ 3
-        ╎   │   └─╼ 4
-        ╎   └─╼ 2
-        ╎       ├─╼ 5
-        ╎       └─╼ 6
+        ╎   ├─╼ 1
+        ╎   │   ├─╼ 3
+        ╎   │   └─╼ 4
+        ╎   └─╼ 2
+        ╎       ├─╼ 5
+        ╎       └─╼ 6
         ╙── 7
             ├─╼ 8
-            │   ├─╼ 10
-            │   └─╼ 11
+            │   ├─╼ 10
+            │   └─╼ 11
             └─╼ 9
                 ├─╼ 12
                 └─╼ 13
@@ -98,23 +132,23 @@ def test_directed_multi_tree_forest():
     target = dedent(
         """
         ╟── 0
-        ╎   ├─╼ 1
-        ╎   │   ├─╼ 3
-        ╎   │   └─╼ 4
-        ╎   └─╼ 2
-        ╎       ├─╼ 5
-        ╎       └─╼ 6
+        ╎   ├─╼ 1
+        ╎   │   ├─╼ 3
+        ╎   │   └─╼ 4
+        ╎   └─╼ 2
+        ╎       ├─╼ 5
+        ╎       └─╼ 6
         ╟── 14
-        ╎   ├─╼ 15
-        ╎   │   ├─╼ 17
-        ╎   │   └─╼ 18
-        ╎   └─╼ 16
-        ╎       ├─╼ 19
-        ╎       └─╼ 20
+        ╎   ├─╼ 15
+        ╎   │   ├─╼ 17
+        ╎   │   └─╼ 18
+        ╎   └─╼ 16
+        ╎       ├─╼ 19
+        ╎       └─╼ 20
         ╙── 7
             ├─╼ 8
-            │   ├─╼ 10
-            │   └─╼ 11
+            │   ├─╼ 10
+            │   └─╼ 11
             └─╼ 9
                 ├─╼ 12
                 └─╼ 13
@@ -128,23 +162,23 @@ def test_directed_multi_tree_forest():
     target = dedent(
         """
         +-- 0
-        :   |-> 1
-        :   |   |-> 3
-        :   |   L-> 4
-        :   L-> 2
-        :       |-> 5
-        :       L-> 6
+        :   |-> 1
+        :   |   |-> 3
+        :   |   L-> 4
+        :   L-> 2
+        :       |-> 5
+        :       L-> 6
         +-- 14
-        :   |-> 15
-        :   |   |-> 17
-        :   |   L-> 18
-        :   L-> 16
-        :       |-> 19
-        :       L-> 20
+        :   |-> 15
+        :   |   |-> 17
+        :   |   L-> 18
+        :   L-> 16
+        :       |-> 19
+        :       L-> 20
         +-- 7
             |-> 8
-            |   |-> 10
-            |   L-> 11
+            |   |-> 10
+            |   L-> 11
             L-> 9
                 |-> 12
                 L-> 13
@@ -153,7 +187,7 @@ def test_directed_multi_tree_forest():
     assert ret == target
 
 
-def test_undirected_multi_tree_forest():
+def test_forest_str_undirected_multi_tree():
     tree1 = nx.balanced_tree(r=2, h=2, create_using=nx.Graph)
     tree2 = nx.balanced_tree(r=2, h=2, create_using=nx.Graph)
     tree2 = nx.relabel_nodes(tree2, {n: n + len(tree1) for n in tree2.nodes})
@@ -164,16 +198,16 @@ def test_undirected_multi_tree_forest():
     target = dedent(
         """
         ╟── 0
-        ╎   ├── 1
-        ╎   │   ├── 3
-        ╎   │   └── 4
-        ╎   └── 2
-        ╎       ├── 5
-        ╎       └── 6
+        ╎   ├── 1
+        ╎   │   ├── 3
+        ╎   │   └── 4
+        ╎   └── 2
+        ╎       ├── 5
+        ╎       └── 6
         ╙── 7
             ├── 8
-            │   ├── 10
-            │   └── 11
+            │   ├── 10
+            │   └── 11
             └── 9
                 ├── 12
                 └── 13
@@ -187,16 +221,16 @@ def test_undirected_multi_tree_forest():
     target = dedent(
         """
         +-- 0
-        :   |-- 1
-        :   |   |-- 3
-        :   |   L-- 4
-        :   L-- 2
-        :       |-- 5
-        :       L-- 6
+        :   |-- 1
+        :   |   |-- 3
+        :   |   L-- 4
+        :   L-- 2
+        :       |-- 5
+        :       L-- 6
         +-- 7
             |-- 8
-            |   |-- 10
-            |   L-- 11
+            |   |-- 10
+            |   L-- 11
             L-- 9
                 |-- 12
                 L-- 13
@@ -205,8 +239,8 @@ def test_undirected_multi_tree_forest():
     assert ret == target
 
 
-def test_undirected_tree_str():
-    # Create a directed forest with labels
+def test_forest_str_undirected():
+    # Create a directed forest
     graph = nx.balanced_tree(r=2, h=2, create_using=nx.Graph)
 
     # arbitrary starting point
@@ -216,8 +250,8 @@ def test_undirected_tree_str():
         """
         ╙── 0
             ├── 1
-            │   ├── 3
-            │   └── 4
+            │   ├── 3
+            │   └── 4
             └── 2
                 ├── 5
                 └── 6
@@ -234,9 +268,9 @@ def test_undirected_tree_str():
         """
         ╙── 2
             ├── 0
-            │   └── 1
-            │       ├── 3
-            │       └── 4
+            │   └── 1
+            │       ├── 3
+            │       └── 4
             ├── 5
             └── 6
         """
@@ -258,9 +292,9 @@ def test_forest_str_errors():
         nx.forest_str(dgraph)
 
 
-def test_overspecified_sources():
+def test_forest_str_overspecified_sources():
     """
-    When sources are directly specified, we wont be able to determine when we
+    When sources are directly specified, we won't be able to determine when we
     are in the last component, so there will always be a trailing, leftmost
     pipe.
     """
@@ -276,25 +310,25 @@ def test_overspecified_sources():
     target1 = dedent(
         """
         ╟── 0
-        ╎   ├─╼ 1
-        ╎   └─╼ 2
+        ╎   ├─╼ 1
+        ╎   └─╼ 2
         ╟── 3
-        ╎   └─╼ 4
-        ╎       └─╼ 5
+        ╎   └─╼ 4
+        ╎       └─╼ 5
         ╟── 6
-        ╎   ├─╼ 7
-        ╎   └─╼ 8
+        ╎   ├─╼ 7
+        ╎   └─╼ 8
         """
     ).strip()
 
     target2 = dedent(
         """
         ╟── 0
-        ╎   ├─╼ 1
-        ╎   └─╼ 2
+        ╎   ├─╼ 1
+        ╎   └─╼ 2
         ╟── 3
-        ╎   └─╼ 4
-        ╎       └─╼ 5
+        ╎   └─╼ 4
+        ╎       └─╼ 5
         ╙── 6
             ├─╼ 7
             └─╼ 8
@@ -303,15 +337,1162 @@ def test_overspecified_sources():
 
     lines = []
     nx.forest_str(graph, write=lines.append, sources=graph.nodes)
-    got1 = chr(10).join(lines)
+    got1 = "\n".join(lines)
     print("got1: ")
     print(got1)
 
     lines = []
     nx.forest_str(graph, write=lines.append)
-    got2 = chr(10).join(lines)
+    got2 = "\n".join(lines)
     print("got2: ")
     print(got2)
 
     assert got1 == target1
     assert got2 == target2
+
+
+def test_write_network_text_iterative_add_directed_edges():
+    """
+    Walk through the cases going from a disconnected to fully connected graph
+    """
+    graph = nx.DiGraph()
+    graph.add_nodes_from([1, 2, 3, 4])
+    lines = []
+    write = lines.append
+    write("--- initial state ---")
+    nx.write_network_text(graph, path=write, end="")
+    for i, j in product(graph.nodes, graph.nodes):
+        write(f"--- add_edge({i}, {j}) ---")
+        graph.add_edge(i, j)
+        nx.write_network_text(graph, path=write, end="")
+    text = "\n".join(lines)
+    print(text)
+    # defined starting point
+    target = dedent(
+        """
+        --- initial state ---
+        ╟── 1
+        ╟── 2
+        ╟── 3
+        ╙── 4
+        --- add_edge(1, 1) ---
+        ╟── 1 ╾ 1
+        ╎   └─╼  ...
+        ╟── 2
+        ╟── 3
+        ╙── 4
+        --- add_edge(1, 2) ---
+        ╟── 1 ╾ 1
+        ╎   ├─╼ 2
+        ╎   └─╼  ...
+        ╟── 3
+        ╙── 4
+        --- add_edge(1, 3) ---
+        ╟── 1 ╾ 1
+        ╎   ├─╼ 2
+        ╎   ├─╼ 3
+        ╎   └─╼  ...
+        ╙── 4
+        --- add_edge(1, 4) ---
+        ╙── 1 ╾ 1
+            ├─╼ 2
+            ├─╼ 3
+            ├─╼ 4
+            └─╼  ...
+        --- add_edge(2, 1) ---
+        ╙── 2 ╾ 1
+            └─╼ 1 ╾ 1
+                ├─╼ 3
+                ├─╼ 4
+                └─╼  ...
+        --- add_edge(2, 2) ---
+        ╙── 1 ╾ 1, 2
+            ├─╼ 2 ╾ 2
+            │   └─╼  ...
+            ├─╼ 3
+            ├─╼ 4
+            └─╼  ...
+        --- add_edge(2, 3) ---
+        ╙── 1 ╾ 1, 2
+            ├─╼ 2 ╾ 2
+            │   ├─╼ 3 ╾ 1
+            │   └─╼  ...
+            ├─╼ 4
+            └─╼  ...
+        --- add_edge(2, 4) ---
+        ╙── 1 ╾ 1, 2
+            ├─╼ 2 ╾ 2
+            │   ├─╼ 3 ╾ 1
+            │   ├─╼ 4 ╾ 1
+            │   └─╼  ...
+            └─╼  ...
+        --- add_edge(3, 1) ---
+        ╙── 2 ╾ 1, 2
+            ├─╼ 1 ╾ 1, 3
+            │   ├─╼ 3 ╾ 2
+            │   │   └─╼  ...
+            │   ├─╼ 4 ╾ 2
+            │   └─╼  ...
+            └─╼  ...
+        --- add_edge(3, 2) ---
+        ╙── 3 ╾ 1, 2
+            ├─╼ 1 ╾ 1, 2
+            │   ├─╼ 2 ╾ 2, 3
+            │   │   ├─╼ 4 ╾ 1
+            │   │   └─╼  ...
+            │   └─╼  ...
+            └─╼  ...
+        --- add_edge(3, 3) ---
+        ╙── 1 ╾ 1, 2, 3
+            ├─╼ 2 ╾ 2, 3
+            │   ├─╼ 3 ╾ 1, 3
+            │   │   └─╼  ...
+            │   ├─╼ 4 ╾ 1
+            │   └─╼  ...
+            └─╼  ...
+        --- add_edge(3, 4) ---
+        ╙── 1 ╾ 1, 2, 3
+            ├─╼ 2 ╾ 2, 3
+            │   ├─╼ 3 ╾ 1, 3
+            │   │   ├─╼ 4 ╾ 1, 2
+            │   │   └─╼  ...
+            │   └─╼  ...
+            └─╼  ...
+        --- add_edge(4, 1) ---
+        ╙── 2 ╾ 1, 2, 3
+            ├─╼ 1 ╾ 1, 3, 4
+            │   ├─╼ 3 ╾ 2, 3
+            │   │   ├─╼ 4 ╾ 1, 2
+            │   │   │   └─╼  ...
+            │   │   └─╼  ...
+            │   └─╼  ...
+            └─╼  ...
+        --- add_edge(4, 2) ---
+        ╙── 3 ╾ 1, 2, 3
+            ├─╼ 1 ╾ 1, 2, 4
+            │   ├─╼ 2 ╾ 2, 3, 4
+            │   │   ├─╼ 4 ╾ 1, 3
+            │   │   │   └─╼  ...
+            │   │   └─╼  ...
+            │   └─╼  ...
+            └─╼  ...
+        --- add_edge(4, 3) ---
+        ╙── 4 ╾ 1, 2, 3
+            ├─╼ 1 ╾ 1, 2, 3
+            │   ├─╼ 2 ╾ 2, 3, 4
+            │   │   ├─╼ 3 ╾ 1, 3, 4
+            │   │   │   └─╼  ...
+            │   │   └─╼  ...
+            │   └─╼  ...
+            └─╼  ...
+        --- add_edge(4, 4) ---
+        ╙── 1 ╾ 1, 2, 3, 4
+            ├─╼ 2 ╾ 2, 3, 4
+            │   ├─╼ 3 ╾ 1, 3, 4
+            │   │   ├─╼ 4 ╾ 1, 2, 4
+            │   │   │   └─╼  ...
+            │   │   └─╼  ...
+            │   └─╼  ...
+            └─╼  ...
+        """
+    ).strip()
+    assert target == text
+
+
+def test_write_network_text_iterative_add_undirected_edges():
+    """
+    Walk through the cases going from a disconnected to fully connected graph
+    """
+    graph = nx.Graph()
+    graph.add_nodes_from([1, 2, 3, 4])
+    lines = []
+    write = lines.append
+    write("--- initial state ---")
+    nx.write_network_text(graph, path=write, end="")
+    for i, j in product(graph.nodes, graph.nodes):
+        if i == j:
+            continue
+        write(f"--- add_edge({i}, {j}) ---")
+        graph.add_edge(i, j)
+        nx.write_network_text(graph, path=write, end="")
+    text = "\n".join(lines)
+    print(text)
+    target = dedent(
+        """
+        --- initial state ---
+        ╟── 1
+        ╟── 2
+        ╟── 3
+        ╙── 4
+        --- add_edge(1, 2) ---
+        ╟── 3
+        ╟── 4
+        ╙── 1
+            └── 2
+        --- add_edge(1, 3) ---
+        ╟── 4
+        ╙── 2
+            └── 1
+                └── 3
+        --- add_edge(1, 4) ---
+        ╙── 2
+            └── 1
+                ├── 3
+                └── 4
+        --- add_edge(2, 1) ---
+        ╙── 2
+            └── 1
+                ├── 3
+                └── 4
+        --- add_edge(2, 3) ---
+        ╙── 4
+            └── 1
+                ├── 2
+                │   └── 3 ─ 1
+                └──  ...
+        --- add_edge(2, 4) ---
+        ╙── 3
+            ├── 1
+            │   ├── 2 ─ 3
+            │   │   └── 4 ─ 1
+            │   └──  ...
+            └──  ...
+        --- add_edge(3, 1) ---
+        ╙── 3
+            ├── 1
+            │   ├── 2 ─ 3
+            │   │   └── 4 ─ 1
+            │   └──  ...
+            └──  ...
+        --- add_edge(3, 2) ---
+        ╙── 3
+            ├── 1
+            │   ├── 2 ─ 3
+            │   │   └── 4 ─ 1
+            │   └──  ...
+            └──  ...
+        --- add_edge(3, 4) ---
+        ╙── 1
+            ├── 2
+            │   ├── 3 ─ 1
+            │   │   └── 4 ─ 1, 2
+            │   └──  ...
+            └──  ...
+        --- add_edge(4, 1) ---
+        ╙── 1
+            ├── 2
+            │   ├── 3 ─ 1
+            │   │   └── 4 ─ 1, 2
+            │   └──  ...
+            └──  ...
+        --- add_edge(4, 2) ---
+        ╙── 1
+            ├── 2
+            │   ├── 3 ─ 1
+            │   │   └── 4 ─ 1, 2
+            │   └──  ...
+            └──  ...
+        --- add_edge(4, 3) ---
+        ╙── 1
+            ├── 2
+            │   ├── 3 ─ 1
+            │   │   └── 4 ─ 1, 2
+            │   └──  ...
+            └──  ...
+        """
+    ).strip()
+    assert target == text
+
+
+def test_write_network_text_iterative_add_random_directed_edges():
+    """
+    Walk through the cases going from a disconnected to fully connected graph
+    """
+
+    rng = random.Random(724466096)
+    graph = nx.DiGraph()
+    graph.add_nodes_from([1, 2, 3, 4, 5])
+    possible_edges = list(product(graph.nodes, graph.nodes))
+    rng.shuffle(possible_edges)
+    graph.add_edges_from(possible_edges[0:8])
+    lines = []
+    write = lines.append
+    write("--- initial state ---")
+    nx.write_network_text(graph, path=write, end="")
+    for i, j in possible_edges[8:12]:
+        write(f"--- add_edge({i}, {j}) ---")
+        graph.add_edge(i, j)
+        nx.write_network_text(graph, path=write, end="")
+    text = "\n".join(lines)
+    print(text)
+    target = dedent(
+        """
+        --- initial state ---
+        ╙── 3 ╾ 5
+            └─╼ 2 ╾ 2
+                ├─╼ 4 ╾ 4
+                │   ├─╼ 5
+                │   │   ├─╼ 1 ╾ 1
+                │   │   │   └─╼  ...
+                │   │   └─╼  ...
+                │   └─╼  ...
+                └─╼  ...
+        --- add_edge(4, 1) ---
+        ╙── 3 ╾ 5
+            └─╼ 2 ╾ 2
+                ├─╼ 4 ╾ 4
+                │   ├─╼ 5
+                │   │   ├─╼ 1 ╾ 1, 4
+                │   │   │   └─╼  ...
+                │   │   └─╼  ...
+                │   └─╼  ...
+                └─╼  ...
+        --- add_edge(2, 1) ---
+        ╙── 3 ╾ 5
+            └─╼ 2 ╾ 2
+                ├─╼ 4 ╾ 4
+                │   ├─╼ 5
+                │   │   ├─╼ 1 ╾ 1, 4, 2
+                │   │   │   └─╼  ...
+                │   │   └─╼  ...
+                │   └─╼  ...
+                └─╼  ...
+        --- add_edge(5, 2) ---
+        ╙── 3 ╾ 5
+            └─╼ 2 ╾ 2, 5
+                ├─╼ 4 ╾ 4
+                │   ├─╼ 5
+                │   │   ├─╼ 1 ╾ 1, 4, 2
+                │   │   │   └─╼  ...
+                │   │   └─╼  ...
+                │   └─╼  ...
+                └─╼  ...
+        --- add_edge(1, 5) ---
+        ╙── 3 ╾ 5
+            └─╼ 2 ╾ 2, 5
+                ├─╼ 4 ╾ 4
+                │   ├─╼ 5 ╾ 1
+                │   │   ├─╼ 1 ╾ 1, 4, 2
+                │   │   │   └─╼  ...
+                │   │   └─╼  ...
+                │   └─╼  ...
+                └─╼  ...
+
+        """
+    ).strip()
+    assert target == text
+
+
+def test_write_network_text_nearly_forest():
+    g = nx.DiGraph()
+    g.add_edge(1, 2)
+    g.add_edge(1, 5)
+    g.add_edge(2, 3)
+    g.add_edge(3, 4)
+    g.add_edge(5, 6)
+    g.add_edge(6, 7)
+    g.add_edge(6, 8)
+    orig = g.copy()
+    g.add_edge(1, 8)  # forward edge
+    g.add_edge(4, 2)  # back edge
+    g.add_edge(6, 3)  # cross edge
+    lines = []
+    write = lines.append
+    write("--- directed case ---")
+    nx.write_network_text(orig, path=write, end="")
+    write("--- add (1, 8), (4, 2), (6, 3) ---")
+    nx.write_network_text(g, path=write, end="")
+    write("--- undirected case ---")
+    nx.write_network_text(orig.to_undirected(), path=write, sources=[1], end="")
+    write("--- add (1, 8), (4, 2), (6, 3) ---")
+    nx.write_network_text(g.to_undirected(), path=write, sources=[1], end="")
+    text = "\n".join(lines)
+    print(text)
+    target = dedent(
+        """
+        --- directed case ---
+        ╙── 1
+            ├─╼ 2
+            │   └─╼ 3
+            │       └─╼ 4
+            └─╼ 5
+                └─╼ 6
+                    ├─╼ 7
+                    └─╼ 8
+        --- add (1, 8), (4, 2), (6, 3) ---
+        ╙── 1
+            ├─╼ 2 ╾ 4
+            │   └─╼ 3 ╾ 6
+            │       └─╼ 4
+            │           └─╼  ...
+            ├─╼ 5
+            │   └─╼ 6
+            │       ├─╼ 7
+            │       ├─╼ 8 ╾ 1
+            │       └─╼  ...
+            └─╼  ...
+        --- undirected case ---
+        ╙── 1
+            ├── 2
+            │   └── 3
+            │       └── 4
+            └── 5
+                └── 6
+                    ├── 7
+                    └── 8
+        --- add (1, 8), (4, 2), (6, 3) ---
+        ╙── 1
+            ├── 2
+            │   ├── 3
+            │   │   ├── 4 ─ 2
+            │   │   └── 6
+            │   │       ├── 5 ─ 1
+            │   │       ├── 7
+            │   │       └── 8 ─ 1
+            │   └──  ...
+            └──  ...
+        """
+    ).strip()
+    assert target == text
+
+
+def test_write_network_text_complete_graph_ascii_only():
+    graph = nx.generators.complete_graph(5, create_using=nx.DiGraph)
+    lines = []
+    write = lines.append
+    write("--- directed case ---")
+    nx.write_network_text(graph, path=write, ascii_only=True, end="")
+    write("--- undirected case ---")
+    nx.write_network_text(graph.to_undirected(), path=write, ascii_only=True, end="")
+    text = "\n".join(lines)
+    print(text)
+    target = dedent(
+        """
+        --- directed case ---
+        +-- 0 <- 1, 2, 3, 4
+            |-> 1 <- 2, 3, 4
+            |   |-> 2 <- 0, 3, 4
+            |   |   |-> 3 <- 0, 1, 4
+            |   |   |   |-> 4 <- 0, 1, 2
+            |   |   |   |   L->  ...
+            |   |   |   L->  ...
+            |   |   L->  ...
+            |   L->  ...
+            L->  ...
+        --- undirected case ---
+        +-- 0
+            |-- 1
+            |   |-- 2 - 0
+            |   |   |-- 3 - 0, 1
+            |   |   |   L-- 4 - 0, 1, 2
+            |   |   L--  ...
+            |   L--  ...
+            L--  ...
+        """
+    ).strip()
+    assert target == text
+
+
+def test_write_network_text_with_labels():
+    graph = nx.generators.complete_graph(5, create_using=nx.DiGraph)
+    for n in graph.nodes:
+        graph.nodes[n]["label"] = f"Node(n={n})"
+    lines = []
+    write = lines.append
+    nx.write_network_text(graph, path=write, with_labels=True, ascii_only=False, end="")
+    text = "\n".join(lines)
+    print(text)
+    # Non trees with labels can get somewhat out of hand with network text
+    # because we need to immediately show every non-tree edge to the right
+    target = dedent(
+        """
+        ╙── Node(n=0) ╾ Node(n=1), Node(n=2), Node(n=3), Node(n=4)
+            ├─╼ Node(n=1) ╾ Node(n=2), Node(n=3), Node(n=4)
+            │   ├─╼ Node(n=2) ╾ Node(n=0), Node(n=3), Node(n=4)
+            │   │   ├─╼ Node(n=3) ╾ Node(n=0), Node(n=1), Node(n=4)
+            │   │   │   ├─╼ Node(n=4) ╾ Node(n=0), Node(n=1), Node(n=2)
+            │   │   │   │   └─╼  ...
+            │   │   │   └─╼  ...
+            │   │   └─╼  ...
+            │   └─╼  ...
+            └─╼  ...
+        """
+    ).strip()
+    assert target == text
+
+
+def test_write_network_text_complete_graphs():
+    lines = []
+    write = lines.append
+    for k in [0, 1, 2, 3, 4, 5]:
+        g = nx.generators.complete_graph(k)
+        write(f"--- undirected k={k} ---")
+        nx.write_network_text(g, path=write, end="")
+
+    for k in [0, 1, 2, 3, 4, 5]:
+        g = nx.generators.complete_graph(k, nx.DiGraph)
+        write(f"--- directed k={k} ---")
+        nx.write_network_text(g, path=write, end="")
+    text = "\n".join(lines)
+    print(text)
+    target = dedent(
+        """
+        --- undirected k=0 ---
+        ╙
+        --- undirected k=1 ---
+        ╙── 0
+        --- undirected k=2 ---
+        ╙── 0
+            └── 1
+        --- undirected k=3 ---
+        ╙── 0
+            ├── 1
+            │   └── 2 ─ 0
+            └──  ...
+        --- undirected k=4 ---
+        ╙── 0
+            ├── 1
+            │   ├── 2 ─ 0
+            │   │   └── 3 ─ 0, 1
+            │   └──  ...
+            └──  ...
+        --- undirected k=5 ---
+        ╙── 0
+            ├── 1
+            │   ├── 2 ─ 0
+            │   │   ├── 3 ─ 0, 1
+            │   │   │   └── 4 ─ 0, 1, 2
+            │   │   └──  ...
+            │   └──  ...
+            └──  ...
+        --- directed k=0 ---
+        ╙
+        --- directed k=1 ---
+        ╙── 0
+        --- directed k=2 ---
+        ╙── 0 ╾ 1
+            └─╼ 1
+                └─╼  ...
+        --- directed k=3 ---
+        ╙── 0 ╾ 1, 2
+            ├─╼ 1 ╾ 2
+            │   ├─╼ 2 ╾ 0
+            │   │   └─╼  ...
+            │   └─╼  ...
+            └─╼  ...
+        --- directed k=4 ---
+        ╙── 0 ╾ 1, 2, 3
+            ├─╼ 1 ╾ 2, 3
+            │   ├─╼ 2 ╾ 0, 3
+            │   │   ├─╼ 3 ╾ 0, 1
+            │   │   │   └─╼  ...
+            │   │   └─╼  ...
+            │   └─╼  ...
+            └─╼  ...
+        --- directed k=5 ---
+        ╙── 0 ╾ 1, 2, 3, 4
+            ├─╼ 1 ╾ 2, 3, 4
+            │   ├─╼ 2 ╾ 0, 3, 4
+            │   │   ├─╼ 3 ╾ 0, 1, 4
+            │   │   │   ├─╼ 4 ╾ 0, 1, 2
+            │   │   │   │   └─╼  ...
+            │   │   │   └─╼  ...
+            │   │   └─╼  ...
+            │   └─╼  ...
+            └─╼  ...
+        """
+    ).strip()
+    assert target == text
+
+
+def test_write_network_text_multiple_sources():
+    g = nx.DiGraph()
+    g.add_edge(1, 2)
+    g.add_edge(1, 3)
+    g.add_edge(2, 4)
+    g.add_edge(3, 5)
+    g.add_edge(3, 6)
+    g.add_edge(5, 4)
+    g.add_edge(4, 1)
+    g.add_edge(1, 5)
+    lines = []
+    write = lines.append
+    # Use each node as the starting point to demonstrate how the representation
+    # changes.
+    nodes = sorted(g.nodes())
+    for n in nodes:
+        write(f"--- source node: {n} ---")
+        nx.write_network_text(g, path=write, sources=[n], end="")
+    text = "\n".join(lines)
+    print(text)
+    target = dedent(
+        """
+        --- source node: 1 ---
+        ╙── 1 ╾ 4
+            ├─╼ 2
+            │   └─╼ 4 ╾ 5
+            │       └─╼  ...
+            ├─╼ 3
+            │   ├─╼ 5 ╾ 1
+            │   │   └─╼  ...
+            │   └─╼ 6
+            └─╼  ...
+        --- source node: 2 ---
+        ╙── 2 ╾ 1
+            └─╼ 4 ╾ 5
+                └─╼ 1
+                    ├─╼ 3
+                    │   ├─╼ 5 ╾ 1
+                    │   │   └─╼  ...
+                    │   └─╼ 6
+                    └─╼  ...
+        --- source node: 3 ---
+        ╙── 3 ╾ 1
+            ├─╼ 5 ╾ 1
+            │   └─╼ 4 ╾ 2
+            │       └─╼ 1
+            │           ├─╼ 2
+            │           │   └─╼  ...
+            │           └─╼  ...
+            └─╼ 6
+        --- source node: 4 ---
+        ╙── 4 ╾ 2, 5
+            └─╼ 1
+                ├─╼ 2
+                │   └─╼  ...
+                ├─╼ 3
+                │   ├─╼ 5 ╾ 1
+                │   │   └─╼  ...
+                │   └─╼ 6
+                └─╼  ...
+        --- source node: 5 ---
+        ╙── 5 ╾ 3, 1
+            └─╼ 4 ╾ 2
+                └─╼ 1
+                    ├─╼ 2
+                    │   └─╼  ...
+                    ├─╼ 3
+                    │   ├─╼ 6
+                    │   └─╼  ...
+                    └─╼  ...
+        --- source node: 6 ---
+        ╙── 6 ╾ 3
+        """
+    ).strip()
+    assert target == text
+
+
+def test_write_network_text_star_graph():
+    graph = nx.star_graph(5, create_using=nx.Graph)
+    lines = []
+    write = lines.append
+    nx.write_network_text(graph, path=write, end="")
+    text = "\n".join(lines)
+    print(text)
+    target = dedent(
+        """
+        ╙── 1
+            └── 0
+                ├── 2
+                ├── 3
+                ├── 4
+                └── 5
+        """
+    ).strip()
+    assert target == text
+
+
+def test_write_network_text_path_graph():
+    graph = nx.path_graph(3, create_using=nx.Graph)
+    lines = []
+    write = lines.append
+    nx.write_network_text(graph, path=write, end="")
+    text = "\n".join(lines)
+    print(text)
+    target = dedent(
+        """
+        ╙── 0
+            └── 1
+                └── 2
+        """
+    ).strip()
+    assert target == text
+
+
+def test_write_network_text_lollipop_graph():
+    graph = nx.lollipop_graph(4, 2, create_using=nx.Graph)
+    lines = []
+    write = lines.append
+    nx.write_network_text(graph, path=write, end="")
+    text = "\n".join(lines)
+    print(text)
+    target = dedent(
+        """
+        ╙── 5
+            └── 4
+                └── 3
+                    ├── 0
+                    │   ├── 1 ─ 3
+                    │   │   └── 2 ─ 0, 3
+                    │   └──  ...
+                    └──  ...
+        """
+    ).strip()
+    assert target == text
+
+
+def test_write_network_text_wheel_graph():
+    graph = nx.wheel_graph(7, create_using=nx.Graph)
+    lines = []
+    write = lines.append
+    nx.write_network_text(graph, path=write, end="")
+    text = "\n".join(lines)
+    print(text)
+    target = dedent(
+        """
+        ╙── 1
+            ├── 0
+            │   ├── 2 ─ 1
+            │   │   └── 3 ─ 0
+            │   │       └── 4 ─ 0
+            │   │           └── 5 ─ 0
+            │   │               └── 6 ─ 0, 1
+            │   └──  ...
+            └──  ...
+        """
+    ).strip()
+    assert target == text
+
+
+def test_write_network_text_circular_ladder_graph():
+    graph = nx.circular_ladder_graph(4, create_using=nx.Graph)
+    lines = []
+    write = lines.append
+    nx.write_network_text(graph, path=write, end="")
+    text = "\n".join(lines)
+    print(text)
+    target = dedent(
+        """
+        ╙── 0
+            ├── 1
+            │   ├── 2
+            │   │   ├── 3 ─ 0
+            │   │   │   └── 7
+            │   │   │       ├── 6 ─ 2
+            │   │   │       │   └── 5 ─ 1
+            │   │   │       │       └── 4 ─ 0, 7
+            │   │   │       └──  ...
+            │   │   └──  ...
+            │   └──  ...
+            └──  ...
+        """
+    ).strip()
+    assert target == text
+
+
+def test_write_network_text_dorogovtsev_goltsev_mendes_graph():
+    graph = nx.dorogovtsev_goltsev_mendes_graph(4, create_using=nx.Graph)
+    lines = []
+    write = lines.append
+    nx.write_network_text(graph, path=write, end="")
+    text = "\n".join(lines)
+    print(text)
+    target = dedent(
+        """
+        ╙── 15
+            ├── 0
+            │   ├── 1 ─ 15
+            │   │   ├── 2 ─ 0
+            │   │   │   ├── 4 ─ 0
+            │   │   │   │   ├── 9 ─ 0
+            │   │   │   │   │   ├── 22 ─ 0
+            │   │   │   │   │   └── 38 ─ 4
+            │   │   │   │   ├── 13 ─ 2
+            │   │   │   │   │   ├── 34 ─ 2
+            │   │   │   │   │   └── 39 ─ 4
+            │   │   │   │   ├── 18 ─ 0
+            │   │   │   │   ├── 30 ─ 2
+            │   │   │   │   └──  ...
+            │   │   │   ├── 5 ─ 1
+            │   │   │   │   ├── 12 ─ 1
+            │   │   │   │   │   ├── 29 ─ 1
+            │   │   │   │   │   └── 40 ─ 5
+            │   │   │   │   ├── 14 ─ 2
+            │   │   │   │   │   ├── 35 ─ 2
+            │   │   │   │   │   └── 41 ─ 5
+            │   │   │   │   ├── 25 ─ 1
+            │   │   │   │   ├── 31 ─ 2
+            │   │   │   │   └──  ...
+            │   │   │   ├── 7 ─ 0
+            │   │   │   │   ├── 20 ─ 0
+            │   │   │   │   └── 32 ─ 2
+            │   │   │   ├── 10 ─ 1
+            │   │   │   │   ├── 27 ─ 1
+            │   │   │   │   └── 33 ─ 2
+            │   │   │   ├── 16 ─ 0
+            │   │   │   ├── 23 ─ 1
+            │   │   │   └──  ...
+            │   │   ├── 3 ─ 0
+            │   │   │   ├── 8 ─ 0
+            │   │   │   │   ├── 21 ─ 0
+            │   │   │   │   └── 36 ─ 3
+            │   │   │   ├── 11 ─ 1
+            │   │   │   │   ├── 28 ─ 1
+            │   │   │   │   └── 37 ─ 3
+            │   │   │   ├── 17 ─ 0
+            │   │   │   ├── 24 ─ 1
+            │   │   │   └──  ...
+            │   │   ├── 6 ─ 0
+            │   │   │   ├── 19 ─ 0
+            │   │   │   └── 26 ─ 1
+            │   │   └──  ...
+            │   └──  ...
+            └──  ...
+        """
+    ).strip()
+    assert target == text
+
+
+def test_write_network_text_tree_max_depth():
+    orig = nx.balanced_tree(r=1, h=3, create_using=nx.DiGraph)
+    lines = []
+    write = lines.append
+    write("--- directed case, max_depth=0 ---")
+    nx.write_network_text(orig, path=write, end="", max_depth=0)
+    write("--- directed case, max_depth=1 ---")
+    nx.write_network_text(orig, path=write, end="", max_depth=1)
+    write("--- directed case, max_depth=2 ---")
+    nx.write_network_text(orig, path=write, end="", max_depth=2)
+    write("--- directed case, max_depth=3 ---")
+    nx.write_network_text(orig, path=write, end="", max_depth=3)
+    write("--- directed case, max_depth=4 ---")
+    nx.write_network_text(orig, path=write, end="", max_depth=4)
+    write("--- undirected case, max_depth=0 ---")
+    nx.write_network_text(orig.to_undirected(), path=write, end="", max_depth=0)
+    write("--- undirected case, max_depth=1 ---")
+    nx.write_network_text(orig.to_undirected(), path=write, end="", max_depth=1)
+    write("--- undirected case, max_depth=2 ---")
+    nx.write_network_text(orig.to_undirected(), path=write, end="", max_depth=2)
+    write("--- undirected case, max_depth=3 ---")
+    nx.write_network_text(orig.to_undirected(), path=write, end="", max_depth=3)
+    write("--- undirected case, max_depth=4 ---")
+    nx.write_network_text(orig.to_undirected(), path=write, end="", max_depth=4)
+    text = "\n".join(lines)
+    print(text)
+    target = dedent(
+        """
+        --- directed case, max_depth=0 ---
+        ╙ ...
+        --- directed case, max_depth=1 ---
+        ╙── 0
+            └─╼  ...
+        --- directed case, max_depth=2 ---
+        ╙── 0
+            └─╼ 1
+                └─╼  ...
+        --- directed case, max_depth=3 ---
+        ╙── 0
+            └─╼ 1
+                └─╼ 2
+                    └─╼  ...
+        --- directed case, max_depth=4 ---
+        ╙── 0
+            └─╼ 1
+                └─╼ 2
+                    └─╼ 3
+        --- undirected case, max_depth=0 ---
+        ╙ ...
+        --- undirected case, max_depth=1 ---
+        ╙── 0 ─ 1
+            └──  ...
+        --- undirected case, max_depth=2 ---
+        ╙── 0
+            └── 1 ─ 2
+                └──  ...
+        --- undirected case, max_depth=3 ---
+        ╙── 0
+            └── 1
+                └── 2 ─ 3
+                    └──  ...
+        --- undirected case, max_depth=4 ---
+        ╙── 0
+            └── 1
+                └── 2
+                    └── 3
+        """
+    ).strip()
+    assert target == text
+
+
+def test_write_network_text_graph_max_depth():
+    orig = nx.erdos_renyi_graph(10, 0.15, directed=True, seed=40392)
+    lines = []
+    write = lines.append
+    write("--- directed case, max_depth=None ---")
+    nx.write_network_text(orig, path=write, end="", max_depth=None)
+    write("--- directed case, max_depth=0 ---")
+    nx.write_network_text(orig, path=write, end="", max_depth=0)
+    write("--- directed case, max_depth=1 ---")
+    nx.write_network_text(orig, path=write, end="", max_depth=1)
+    write("--- directed case, max_depth=2 ---")
+    nx.write_network_text(orig, path=write, end="", max_depth=2)
+    write("--- directed case, max_depth=3 ---")
+    nx.write_network_text(orig, path=write, end="", max_depth=3)
+    write("--- undirected case, max_depth=None ---")
+    nx.write_network_text(orig.to_undirected(), path=write, end="", max_depth=None)
+    write("--- undirected case, max_depth=0 ---")
+    nx.write_network_text(orig.to_undirected(), path=write, end="", max_depth=0)
+    write("--- undirected case, max_depth=1 ---")
+    nx.write_network_text(orig.to_undirected(), path=write, end="", max_depth=1)
+    write("--- undirected case, max_depth=2 ---")
+    nx.write_network_text(orig.to_undirected(), path=write, end="", max_depth=2)
+    write("--- undirected case, max_depth=3 ---")
+    nx.write_network_text(orig.to_undirected(), path=write, end="", max_depth=3)
+    text = "\n".join(lines)
+    print(text)
+    target = dedent(
+        """
+        --- directed case, max_depth=None ---
+        ╟── 4
+        ╎   ├─╼ 0 ╾ 3
+        ╎   ├─╼ 5 ╾ 7
+        ╎   │   └─╼ 3
+        ╎   │       ├─╼ 1 ╾ 9
+        ╎   │       │   └─╼ 9 ╾ 6
+        ╎   │       │       ├─╼ 6
+        ╎   │       │       │   └─╼  ...
+        ╎   │       │       ├─╼ 7 ╾ 4
+        ╎   │       │       │   ├─╼ 2
+        ╎   │       │       │   └─╼  ...
+        ╎   │       │       └─╼  ...
+        ╎   │       └─╼  ...
+        ╎   └─╼  ...
+        ╙── 8
+        --- directed case, max_depth=0 ---
+        ╙ ...
+        --- directed case, max_depth=1 ---
+        ╟── 4
+        ╎   └─╼  ...
+        ╙── 8
+        --- directed case, max_depth=2 ---
+        ╟── 4
+        ╎   ├─╼ 0 ╾ 3
+        ╎   ├─╼ 5 ╾ 7
+        ╎   │   └─╼  ...
+        ╎   └─╼ 7 ╾ 9
+        ╎       └─╼  ...
+        ╙── 8
+        --- directed case, max_depth=3 ---
+        ╟── 4
+        ╎   ├─╼ 0 ╾ 3
+        ╎   ├─╼ 5 ╾ 7
+        ╎   │   └─╼ 3
+        ╎   │       └─╼  ...
+        ╎   └─╼ 7 ╾ 9
+        ╎       ├─╼ 2
+        ╎       └─╼  ...
+        ╙── 8
+        --- undirected case, max_depth=None ---
+        ╟── 8
+        ╙── 2
+            └── 7
+                ├── 4
+                │   ├── 0
+                │   │   └── 3
+                │   │       ├── 1
+                │   │       │   └── 9 ─ 7
+                │   │       │       └── 6
+                │   │       └── 5 ─ 4, 7
+                │   └──  ...
+                └──  ...
+        --- undirected case, max_depth=0 ---
+        ╙ ...
+        --- undirected case, max_depth=1 ---
+        ╟── 8
+        ╙── 2 ─ 7
+            └──  ...
+        --- undirected case, max_depth=2 ---
+        ╟── 8
+        ╙── 2
+            └── 7 ─ 4, 5, 9
+                └──  ...
+        --- undirected case, max_depth=3 ---
+        ╟── 8
+        ╙── 2
+            └── 7
+                ├── 4 ─ 0, 5
+                │   └──  ...
+                ├── 5 ─ 4, 3
+                │   └──  ...
+                └── 9 ─ 1, 6
+                    └──  ...
+        """
+    ).strip()
+    assert target == text
+
+
+def test_write_network_text_clique_max_depth():
+    orig = nx.complete_graph(5, nx.DiGraph)
+    lines = []
+    write = lines.append
+    write("--- directed case, max_depth=None ---")
+    nx.write_network_text(orig, path=write, end="", max_depth=None)
+    write("--- directed case, max_depth=0 ---")
+    nx.write_network_text(orig, path=write, end="", max_depth=0)
+    write("--- directed case, max_depth=1 ---")
+    nx.write_network_text(orig, path=write, end="", max_depth=1)
+    write("--- directed case, max_depth=2 ---")
+    nx.write_network_text(orig, path=write, end="", max_depth=2)
+    write("--- directed case, max_depth=3 ---")
+    nx.write_network_text(orig, path=write, end="", max_depth=3)
+    write("--- undirected case, max_depth=None ---")
+    nx.write_network_text(orig.to_undirected(), path=write, end="", max_depth=None)
+    write("--- undirected case, max_depth=0 ---")
+    nx.write_network_text(orig.to_undirected(), path=write, end="", max_depth=0)
+    write("--- undirected case, max_depth=1 ---")
+    nx.write_network_text(orig.to_undirected(), path=write, end="", max_depth=1)
+    write("--- undirected case, max_depth=2 ---")
+    nx.write_network_text(orig.to_undirected(), path=write, end="", max_depth=2)
+    write("--- undirected case, max_depth=3 ---")
+    nx.write_network_text(orig.to_undirected(), path=write, end="", max_depth=3)
+    text = "\n".join(lines)
+    print(text)
+    target = dedent(
+        """
+        --- directed case, max_depth=None ---
+        ╙── 0 ╾ 1, 2, 3, 4
+            ├─╼ 1 ╾ 2, 3, 4
+            │   ├─╼ 2 ╾ 0, 3, 4
+            │   │   ├─╼ 3 ╾ 0, 1, 4
+            │   │   │   ├─╼ 4 ╾ 0, 1, 2
+            │   │   │   │   └─╼  ...
+            │   │   │   └─╼  ...
+            │   │   └─╼  ...
+            │   └─╼  ...
+            └─╼  ...
+        --- directed case, max_depth=0 ---
+        ╙ ...
+        --- directed case, max_depth=1 ---
+        ╙── 0 ╾ 1, 2, 3, 4
+            └─╼  ...
+        --- directed case, max_depth=2 ---
+        ╙── 0 ╾ 1, 2, 3, 4
+            ├─╼ 1 ╾ 2, 3, 4
+            │   └─╼  ...
+            ├─╼ 2 ╾ 1, 3, 4
+            │   └─╼  ...
+            ├─╼ 3 ╾ 1, 2, 4
+            │   └─╼  ...
+            └─╼ 4 ╾ 1, 2, 3
+                └─╼  ...
+        --- directed case, max_depth=3 ---
+        ╙── 0 ╾ 1, 2, 3, 4
+            ├─╼ 1 ╾ 2, 3, 4
+            │   ├─╼ 2 ╾ 0, 3, 4
+            │   │   └─╼  ...
+            │   ├─╼ 3 ╾ 0, 2, 4
+            │   │   └─╼  ...
+            │   ├─╼ 4 ╾ 0, 2, 3
+            │   │   └─╼  ...
+            │   └─╼  ...
+            └─╼  ...
+        --- undirected case, max_depth=None ---
+        ╙── 0
+            ├── 1
+            │   ├── 2 ─ 0
+            │   │   ├── 3 ─ 0, 1
+            │   │   │   └── 4 ─ 0, 1, 2
+            │   │   └──  ...
+            │   └──  ...
+            └──  ...
+        --- undirected case, max_depth=0 ---
+        ╙ ...
+        --- undirected case, max_depth=1 ---
+        ╙── 0 ─ 1, 2, 3, 4
+            └──  ...
+        --- undirected case, max_depth=2 ---
+        ╙── 0
+            ├── 1 ─ 2, 3, 4
+            │   └──  ...
+            ├── 2 ─ 1, 3, 4
+            │   └──  ...
+            ├── 3 ─ 1, 2, 4
+            │   └──  ...
+            └── 4 ─ 1, 2, 3
+        --- undirected case, max_depth=3 ---
+        ╙── 0
+            ├── 1
+            │   ├── 2 ─ 0, 3, 4
+            │   │   └──  ...
+            │   ├── 3 ─ 0, 2, 4
+            │   │   └──  ...
+            │   └── 4 ─ 0, 2, 3
+            └──  ...
+        """
+    ).strip()
+    assert target == text
+
+
+def test_write_network_text_custom_label():
+    # Create a directed forest with labels
+    graph = nx.erdos_renyi_graph(5, 0.4, directed=True, seed=359222358)
+    for node in graph.nodes:
+        graph.nodes[node]["label"] = f"Node({node})"
+        graph.nodes[node]["chr"] = chr(node + ord("a") - 1)
+        if node % 2 == 0:
+            graph.nodes[node]["part"] = chr(node + ord("a"))
+
+    lines = []
+    write = lines.append
+    write("--- when with_labels=True, uses the 'label' attr ---")
+    nx.write_network_text(graph, path=write, with_labels=True, end="", max_depth=None)
+    write("--- when with_labels=False, uses str(node) value ---")
+    nx.write_network_text(graph, path=write, with_labels=False, end="", max_depth=None)
+    write("--- when with_labels is a string, use that attr ---")
+    nx.write_network_text(graph, path=write, with_labels="chr", end="", max_depth=None)
+    write("--- fallback to str(node) when the attr does not exist ---")
+    nx.write_network_text(graph, path=write, with_labels="part", end="", max_depth=None)
+
+    text = "\n".join(lines)
+    print(text)
+    target = dedent(
+        """
+        --- when with_labels=True, uses the 'label' attr ---
+        ╙── Node(1)
+            └─╼ Node(3) ╾ Node(2)
+                ├─╼ Node(0)
+                │   ├─╼ Node(2) ╾ Node(3), Node(4)
+                │   │   └─╼  ...
+                │   └─╼ Node(4)
+                │       └─╼  ...
+                └─╼  ...
+        --- when with_labels=False, uses str(node) value ---
+        ╙── 1
+            └─╼ 3 ╾ 2
+                ├─╼ 0
+                │   ├─╼ 2 ╾ 3, 4
+                │   │   └─╼  ...
+                │   └─╼ 4
+                │       └─╼  ...
+                └─╼  ...
+        --- when with_labels is a string, use that attr ---
+        ╙── a
+            └─╼ c ╾ b
+                ├─╼ `
+                │   ├─╼ b ╾ c, d
+                │   │   └─╼  ...
+                │   └─╼ d
+                │       └─╼  ...
+                └─╼  ...
+        --- fallback to str(node) when the attr does not exist ---
+        ╙── 1
+            └─╼ 3 ╾ c
+                ├─╼ a
+                │   ├─╼ c ╾ 3, e
+                │   │   └─╼  ...
+                │   └─╼ e
+                │       └─╼  ...
+                └─╼  ...
+        """
+    ).strip()
+    assert target == text
diff --git a/networkx/readwrite/text.py b/networkx/readwrite/text.py
index b38b08d..09f23e1 100644
--- a/networkx/readwrite/text.py
+++ b/networkx/readwrite/text.py
@@ -1,13 +1,474 @@
 """
 Text-based visual representations of graphs
 """
+import sys
+import warnings
+from collections import defaultdict
 
-__all__ = ["forest_str"]
+import networkx as nx
+from networkx.utils import open_file
 
+__all__ = ["forest_str", "generate_network_text", "write_network_text"]
 
-def forest_str(graph, with_labels=True, sources=None, write=None, ascii_only=False):
+
+class _AsciiBaseGlyphs:
+    empty = "+"
+    newtree_last = "+-- "
+    newtree_mid = "+-- "
+    endof_forest = "    "
+    within_forest = ":   "
+    within_tree = "|   "
+
+
+class AsciiDirectedGlyphs(_AsciiBaseGlyphs):
+    last = "L-> "
+    mid = "|-> "
+    backedge = "<-"
+
+
+class AsciiUndirectedGlyphs(_AsciiBaseGlyphs):
+    last = "L-- "
+    mid = "|-- "
+    backedge = "-"
+
+
+class _UtfBaseGlyphs:
+    # Notes on available box and arrow characters
+    # https://en.wikipedia.org/wiki/Box-drawing_character
+    # https://stackoverflow.com/questions/2701192/triangle-arrow
+    empty = "╙"
+    newtree_last = "╙── "
+    newtree_mid = "╟── "
+    endof_forest = "    "
+    within_forest = "╎   "
+    within_tree = "│   "
+
+
+class UtfDirectedGlyphs(_UtfBaseGlyphs):
+    last = "└─╼ "
+    mid = "├─╼ "
+    backedge = "╾"
+
+
+class UtfUndirectedGlyphs(_UtfBaseGlyphs):
+    last = "└── "
+    mid = "├── "
+    backedge = "─"
+
+
+def generate_network_text(
+    graph, with_labels=True, sources=None, max_depth=None, ascii_only=False
+):
+    """Generate lines in the "network text" format
+
+    This works via a depth-first traversal of the graph and writing a line for
+    each unique node encountered. Non-tree edges are written to the right of
+    each node, and connection to a non-tree edge is indicated with an ellipsis.
+    This representation works best when the input graph is a forest, but any
+    graph can be represented.
+
+    This notation is original to networkx, although it is simple enough that it
+    may be known in existing literature. See #5602 for details. The procedure
+    is summarized as follows:
+
+    1. Given a set of source nodes (which can be specified, or automatically
+    discovered via finding the (strongly) connected components and choosing one
+    node with minimum degree from each), we traverse the graph in depth first
+    order.
+
+    2. Each reachable node will be printed exactly once on it's own line.
+
+    3. Edges are indicated in one of three ways:
+
+        a. a parent "L-style" connection on the upper left. This corresponds to
+        a traversal in the directed DFS tree.
+
+        b. a backref "<-style" connection shown directly on the right. For
+        directed graphs, these are drawn for any incoming edges to a node that
+        is not a parent edge. For undirected graphs, these are drawn for only
+        the non-parent edges that have already been represented (The edges that
+        have not been represented will be handled in the recursive case).
+
+        c. a child "L-style" connection on the lower right. Drawing of the
+        children are handled recursively.
+
+    4. The children of each node (wrt the directed DFS tree) are drawn
+    underneath and to the right of it. In the case that a child node has already
+    been drawn the connection is replaced with an ellipsis ("...") to indicate
+    that there is one or more connections represented elsewhere.
+
+    5. If a maximum depth is specified, an edge to nodes past this maximum
+    depth will be represented by an ellipsis.
+
+    Parameters
+    ----------
+    graph : nx.DiGraph | nx.Graph
+        Graph to represent
+
+    with_labels : bool | str
+        If True will use the "label" attribute of a node to display if it
+        exists otherwise it will use the node value itself. If given as a
+        string, then that attribute name will be used instead of "label".
+        Defaults to True.
+
+    sources : List
+        Specifies which nodes to start traversal from. Note: nodes that are not
+        reachable from one of these sources may not be shown. If unspecified,
+        the minimal set of nodes needed to reach all others will be used.
+
+    max_depth : int | None
+        The maximum depth to traverse before stopping. Defaults to None.
+
+    ascii_only : Boolean
+        If True only ASCII characters are used to construct the visualization
+
+    Yields
+    ------
+    str : a line of generated text
+    """
+    is_directed = graph.is_directed()
+
+    if is_directed:
+        glyphs = AsciiDirectedGlyphs if ascii_only else UtfDirectedGlyphs
+        succ = graph.succ
+        pred = graph.pred
+    else:
+        glyphs = AsciiUndirectedGlyphs if ascii_only else UtfUndirectedGlyphs
+        succ = graph.adj
+        pred = graph.adj
+
+    if isinstance(with_labels, str):
+        label_attr = with_labels
+    elif with_labels:
+        label_attr = "label"
+    else:
+        label_attr = None
+
+    if max_depth == 0:
+        yield glyphs.empty + " ..."
+    elif len(graph.nodes) == 0:
+        yield glyphs.empty
+    else:
+        # If the nodes to traverse are unspecified, find the minimal set of
+        # nodes that will reach the entire graph
+        if sources is None:
+            sources = _find_sources(graph)
+
+        # Populate the stack with each:
+        # 1. parent node in the DFS tree (or None for root nodes),
+        # 2. the current node in the DFS tree
+        # 2. a list of indentations indicating depth
+        # 3. a flag indicating if the node is the final one to be written.
+        # Reverse the stack so sources are popped in the correct order.
+        last_idx = len(sources) - 1
+        stack = [
+            (None, node, [], (idx == last_idx)) for idx, node in enumerate(sources)
+        ][::-1]
+
+        num_skipped_children = defaultdict(lambda: 0)
+        seen_nodes = set()
+        while stack:
+            parent, node, indents, this_islast = stack.pop()
+
+            if node is not Ellipsis:
+                skip = node in seen_nodes
+                if skip:
+                    # Mark that we skipped a parent's child
+                    num_skipped_children[parent] += 1
+
+                if this_islast:
+                    # If we reached the last child of a parent, and we skipped
+                    # any of that parents children, then we should emit an
+                    # ellipsis at the end after this.
+                    if num_skipped_children[parent] and parent is not None:
+                        # Append the ellipsis to be emitted last
+                        next_islast = True
+                        try_frame = (node, Ellipsis, indents, next_islast)
+                        stack.append(try_frame)
+
+                        # Redo this frame, but not as a last object
+                        next_islast = False
+                        try_frame = (parent, node, indents, next_islast)
+                        stack.append(try_frame)
+                        continue
+
+                if skip:
+                    continue
+                seen_nodes.add(node)
+
+            if not indents:
+                # Top level items (i.e. trees in the forest) get different
+                # glyphs to indicate they are not actually connected
+                if this_islast:
+                    this_prefix = indents + [glyphs.newtree_last]
+                    next_prefix = indents + [glyphs.endof_forest]
+                else:
+                    this_prefix = indents + [glyphs.newtree_mid]
+                    next_prefix = indents + [glyphs.within_forest]
+
+            else:
+                # For individual tree edges distinguish between directed and
+                # undirected cases
+                if this_islast:
+                    this_prefix = indents + [glyphs.last]
+                    next_prefix = indents + [glyphs.endof_forest]
+                else:
+                    this_prefix = indents + [glyphs.mid]
+                    next_prefix = indents + [glyphs.within_tree]
+
+            if node is Ellipsis:
+                label = " ..."
+                suffix = ""
+                children = []
+            else:
+                if label_attr is not None:
+                    label = str(graph.nodes[node].get(label_attr, node))
+                else:
+                    label = str(node)
+
+                # Determine:
+                # (1) children to traverse into after showing this node.
+                # (2) parents to immediately show to the right of this node.
+                if is_directed:
+                    # In the directed case we must show every successor node
+                    # note: it may be skipped later, but we don't have that
+                    # information here.
+                    children = list(succ[node])
+                    # In the directed case we must show every predecessor
+                    # except for parent we directly traversed from.
+                    handled_parents = {parent}
+                else:
+                    # Showing only the unseen children results in a more
+                    # concise representation for the undirected case.
+                    children = [
+                        child for child in succ[node] if child not in seen_nodes
+                    ]
+
+                    # In the undirected case, parents are also children, so we
+                    # only need to immediately show the ones we can no longer
+                    # traverse
+                    handled_parents = {*children, parent}
+
+                if max_depth is not None and len(indents) == max_depth - 1:
+                    # Use ellipsis to indicate we have reached maximum depth
+                    if children:
+                        children = [Ellipsis]
+                    handled_parents = {parent}
+
+                # The other parents are other predecessors of this node that
+                # are not handled elsewhere.
+                other_parents = [p for p in pred[node] if p not in handled_parents]
+                if other_parents:
+                    if label_attr is not None:
+                        other_parents_labels = ", ".join(
+                            [
+                                str(graph.nodes[p].get(label_attr, p))
+                                for p in other_parents
+                            ]
+                        )
+                    else:
+                        other_parents_labels = ", ".join(
+                            [str(p) for p in other_parents]
+                        )
+                    suffix = " ".join(["", glyphs.backedge, other_parents_labels])
+                else:
+                    suffix = ""
+
+            # Emit the line for this node, this will be called for each node
+            # exactly once.
+            yield "".join(this_prefix + [label, suffix])
+
+            # Push children on the stack in reverse order so they are popped in
+            # the original order.
+            for idx, child in enumerate(children[::-1]):
+                next_islast = idx == 0
+                try_frame = (node, child, next_prefix, next_islast)
+                stack.append(try_frame)
+
+
+@open_file(1, "w")
+def write_network_text(
+    graph,
+    path=None,
+    with_labels=True,
+    sources=None,
+    max_depth=None,
+    ascii_only=False,
+    end="\n",
+):
+    """Creates a nice text representation of a graph
+
+    This works via a depth-first traversal of the graph and writing a line for
+    each unique node encountered. Non-tree edges are written to the right of
+    each node, and connection to a non-tree edge is indicated with an ellipsis.
+    This representation works best when the input graph is a forest, but any
+    graph can be represented.
+
+    Parameters
+    ----------
+    graph : nx.DiGraph | nx.Graph
+        Graph to represent
+
+    path : string or file or callable or None
+       Filename or file handle for data output.
+       if a function, then it will be called for each generated line.
+       if None, this will default to "sys.stdout.write"
+
+    with_labels : bool | str
+        If True will use the "label" attribute of a node to display if it
+        exists otherwise it will use the node value itself. If given as a
+        string, then that attribute name will be used instead of "label".
+        Defaults to True.
+
+    sources : List
+        Specifies which nodes to start traversal from. Note: nodes that are not
+        reachable from one of these sources may not be shown. If unspecified,
+        the minimal set of nodes needed to reach all others will be used.
+
+    max_depth : int | None
+        The maximum depth to traverse before stopping. Defaults to None.
+
+    ascii_only : Boolean
+        If True only ASCII characters are used to construct the visualization
+
+    end : string
+        The line ending character
+
+    Examples
+    --------
+    >>> graph = nx.balanced_tree(r=2, h=2, create_using=nx.DiGraph)
+    >>> nx.write_network_text(graph)
+    ╙── 0
+        ├─╼ 1
+        │   ├─╼ 3
+        │   └─╼ 4
+        └─╼ 2
+            ├─╼ 5
+            └─╼ 6
+
+    >>> # A near tree with one non-tree edge
+    >>> graph.add_edge(5, 1)
+    >>> nx.write_network_text(graph)
+    ╙── 0
+        ├─╼ 1 ╾ 5
+        │   ├─╼ 3
+        │   └─╼ 4
+        └─╼ 2
+            ├─╼ 5
+            │   └─╼  ...
+            └─╼ 6
+
+    >>> graph = nx.cycle_graph(5)
+    >>> nx.write_network_text(graph)
+    ╙── 0
+        ├── 1
+        │   └── 2
+        │       └── 3
+        │           └── 4 ─ 0
+        └──  ...
+
+    >>> graph = nx.generators.barbell_graph(4, 2)
+    >>> nx.write_network_text(graph)
+    ╙── 4
+        ├── 5
+        │   └── 6
+        │       ├── 7
+        │       │   ├── 8 ─ 6
+        │       │   │   └── 9 ─ 6, 7
+        │       │   └──  ...
+        │       └──  ...
+        └── 3
+            ├── 0
+            │   ├── 1 ─ 3
+            │   │   └── 2 ─ 0, 3
+            │   └──  ...
+            └──  ...
+
+    >>> graph = nx.complete_graph(5, create_using=nx.Graph)
+    >>> nx.write_network_text(graph)
+    ╙── 0
+        ├── 1
+        │   ├── 2 ─ 0
+        │   │   ├── 3 ─ 0, 1
+        │   │   │   └── 4 ─ 0, 1, 2
+        │   │   └──  ...
+        │   └──  ...
+        └──  ...
+
+    >>> graph = nx.complete_graph(3, create_using=nx.DiGraph)
+    >>> nx.write_network_text(graph)
+    ╙── 0 ╾ 1, 2
+        ├─╼ 1 ╾ 2
+        │   ├─╼ 2 ╾ 0
+        │   │   └─╼  ...
+        │   └─╼  ...
+        └─╼  ...
     """
-    Creates a nice utf8 representation of a directed forest
+    if path is None:
+        # The path is unspecified, write to stdout
+        _write = sys.stdout.write
+    elif hasattr(path, "write"):
+        # The path is already an open file
+        _write = path.write
+    elif callable(path):
+        # The path is a custom callable
+        _write = path
+    else:
+        raise TypeError(type(path))
+
+    for line in generate_network_text(
+        graph,
+        with_labels=with_labels,
+        sources=sources,
+        max_depth=max_depth,
+        ascii_only=ascii_only,
+    ):
+        _write(line + end)
+
+
+def _find_sources(graph):
+    """
+    Determine a minimal set of nodes such that the entire graph is reachable
+    """
+    # For each connected part of the graph, choose at least
+    # one node as a starting point, preferably without a parent
+    if graph.is_directed():
+        # Choose one node from each SCC with minimum in_degree
+        sccs = list(nx.strongly_connected_components(graph))
+        # condensing the SCCs forms a dag, the nodes in this graph with
+        # 0 in-degree correspond to the SCCs from which the minimum set
+        # of nodes from which all other nodes can be reached.
+        scc_graph = nx.condensation(graph, sccs)
+        supernode_to_nodes = {sn: [] for sn in scc_graph.nodes()}
+        # Note: the order of mapping differs between pypy and cpython
+        # so we have to loop over graph nodes for consistency
+        mapping = scc_graph.graph["mapping"]
+        for n in graph.nodes:
+            sn = mapping[n]
+            supernode_to_nodes[sn].append(n)
+        sources = []
+        for sn in scc_graph.nodes():
+            if scc_graph.in_degree[sn] == 0:
+                scc = supernode_to_nodes[sn]
+                node = min(scc, key=lambda n: graph.in_degree[n])
+                sources.append(node)
+    else:
+        # For undirected graph, the entire graph will be reachable as
+        # long as we consider one node from every connected component
+        sources = [
+            min(cc, key=lambda n: graph.degree[n])
+            for cc in nx.connected_components(graph)
+        ]
+        sources = sorted(sources, key=lambda n: graph.degree[n])
+    return sources
+
+
+def forest_str(graph, with_labels=True, sources=None, write=None, ascii_only=False):
+    """Creates a nice utf8 representation of a forest
+
+    This function has been superseded by
+    :func:`nx.readwrite.text.generate_network_text`, which should be used
+    instead.
 
     Parameters
     ----------
@@ -38,22 +499,22 @@ def forest_str(graph, with_labels=True, sources=None, write=None, ascii_only=Fal
     str | None :
         utf8 representation of the tree / forest
 
-    Example
-    -------
+    Examples
+    --------
     >>> graph = nx.balanced_tree(r=2, h=3, create_using=nx.DiGraph)
     >>> print(nx.forest_str(graph))
     ╙── 0
         ├─╼ 1
-        │   ├─╼ 3
-        │   │   ├─╼ 7
-        │   │   └─╼ 8
-        │   └─╼ 4
-        │       ├─╼ 9
-        │       └─╼ 10
+        │   ├─╼ 3
+        │   │   ├─╼ 7
+        │   │   └─╼ 8
+        │   └─╼ 4
+        │       ├─╼ 9
+        │       └─╼ 10
         └─╼ 2
             ├─╼ 5
-            │   ├─╼ 11
-            │   └─╼ 12
+            │   ├─╼ 11
+            │   └─╼ 12
             └─╼ 6
                 ├─╼ 13
                 └─╼ 14
@@ -70,7 +531,16 @@ def forest_str(graph, with_labels=True, sources=None, write=None, ascii_only=Fal
         L-- 1
             L-- 2
     """
-    import networkx as nx
+    msg = (
+        "\nforest_str is deprecated as of version 3.1 and will be removed "
+        "in version 3.3. Use generate_network_text or write_network_text "
+        "instead.\n"
+    )
+    warnings.warn(msg, DeprecationWarning)
+
+    if len(graph.nodes) > 0:
+        if not nx.is_forest(graph):
+            raise nx.NetworkXNotImplemented("input must be a forest or the empty graph")
 
     printbuf = []
     if write is None:
@@ -78,114 +548,14 @@ def forest_str(graph, with_labels=True, sources=None, write=None, ascii_only=Fal
     else:
         _write = write
 
-    # Define glphys
-    # Notes on available box and arrow characters
-    # https://en.wikipedia.org/wiki/Box-drawing_character
-    # https://stackoverflow.com/questions/2701192/triangle-arrow
-    if ascii_only:
-        glyph_empty = "+"
-        glyph_newtree_last = "+-- "
-        glyph_newtree_mid = "+-- "
-        glyph_endof_forest = "    "
-        glyph_within_forest = ":   "
-        glyph_within_tree = "|   "
-
-        glyph_directed_last = "L-> "
-        glyph_directed_mid = "|-> "
-
-        glyph_undirected_last = "L-- "
-        glyph_undirected_mid = "|-- "
-    else:
-        glyph_empty = "╙"
-        glyph_newtree_last = "╙── "
-        glyph_newtree_mid = "╟── "
-        glyph_endof_forest = "    "
-        glyph_within_forest = "╎   "
-        glyph_within_tree = "│   "
-
-        glyph_directed_last = "└─╼ "
-        glyph_directed_mid = "├─╼ "
-
-        glyph_undirected_last = "└── "
-        glyph_undirected_mid = "├── "
-
-    if len(graph.nodes) == 0:
-        _write(glyph_empty)
-    else:
-        if not nx.is_forest(graph):
-            raise nx.NetworkXNotImplemented("input must be a forest or the empty graph")
-
-        is_directed = graph.is_directed()
-        succ = graph.succ if is_directed else graph.adj
-
-        if sources is None:
-            if is_directed:
-                # use real source nodes for directed trees
-                sources = [n for n in graph.nodes if graph.in_degree[n] == 0]
-            else:
-                # use arbitrary sources for undirected trees
-                sources = [
-                    min(cc, key=lambda n: graph.degree[n])
-                    for cc in nx.connected_components(graph)
-                ]
-
-        # Populate the stack with each source node, empty indentation, and mark
-        # the final node. Reverse the stack so sources are popped in the
-        # correct order.
-        last_idx = len(sources) - 1
-        stack = [(node, "", (idx == last_idx)) for idx, node in enumerate(sources)][
-            ::-1
-        ]
-
-        seen = set()
-        while stack:
-            node, indent, islast = stack.pop()
-            if node in seen:
-                continue
-            seen.add(node)
-
-            if not indent:
-                # Top level items (i.e. trees in the forest) get different
-                # glyphs to indicate they are not actually connected
-                if islast:
-                    this_prefix = indent + glyph_newtree_last
-                    next_prefix = indent + glyph_endof_forest
-                else:
-                    this_prefix = indent + glyph_newtree_mid
-                    next_prefix = indent + glyph_within_forest
-
-            else:
-                # For individual tree edges distinguish between directed and
-                # undirected cases
-                if is_directed:
-                    if islast:
-                        this_prefix = indent + glyph_directed_last
-                        next_prefix = indent + glyph_endof_forest
-                    else:
-                        this_prefix = indent + glyph_directed_mid
-                        next_prefix = indent + glyph_within_tree
-                else:
-                    if islast:
-                        this_prefix = indent + glyph_undirected_last
-                        next_prefix = indent + glyph_endof_forest
-                    else:
-                        this_prefix = indent + glyph_undirected_mid
-                        next_prefix = indent + glyph_within_tree
-
-            if with_labels:
-                label = graph.nodes[node].get("label", node)
-            else:
-                label = node
-
-            _write(this_prefix + str(label))
-
-            # Push children on the stack in reverse order so they are popped in
-            # the original order.
-            children = [child for child in succ[node] if child not in seen]
-            for idx, child in enumerate(children[::-1], start=1):
-                islast_next = idx <= 1
-                try_frame = (child, next_prefix, islast_next)
-                stack.append(try_frame)
+    write_network_text(
+        graph,
+        _write,
+        with_labels=with_labels,
+        sources=sources,
+        ascii_only=ascii_only,
+        end="",
+    )
 
     if write is None:
         # Only return a string if the custom write function was not specified
diff --git a/networkx/relabel.py b/networkx/relabel.py
index ec34142..df3da44 100644
--- a/networkx/relabel.py
+++ b/networkx/relabel.py
@@ -6,6 +6,9 @@ __all__ = ["convert_node_labels_to_integers", "relabel_nodes"]
 def relabel_nodes(G, mapping, copy=True):
     """Relabel the nodes of the graph G according to a given mapping.
 
+    The original node ordering may not be preserved if `copy` is `False` and the
+    mapping includes overlap between old and new labels.
+
     Parameters
     ----------
     G : graph
@@ -111,16 +114,10 @@ def relabel_nodes(G, mapping, copy=True):
     --------
     convert_node_labels_to_integers
     """
-    # you can pass a function f(old_label) -> new_label
-    # or a class e.g. str(old_label) -> new_label
-    # but we'll just make a dictionary here regardless
-    # To allow classes, we check if __getitem__ is a bound method using __self__
-    if not (
-        hasattr(mapping, "__getitem__") and hasattr(mapping.__getitem__, "__self__")
-    ):
-        m = {n: mapping(n) for n in G}
-    else:
-        m = mapping
+    # you can pass any callable e.g. f(old_label) -> new_label or
+    # e.g. str(old_label) -> new_label, but we'll just make a dictionary here regardless
+    m = {n: mapping(n) for n in G} if callable(mapping) else mapping
+
     if copy:
         return _relabel_copy(G, m)
     else:
@@ -128,9 +125,7 @@ def relabel_nodes(G, mapping, copy=True):
 
 
 def _relabel_inplace(G, mapping):
-    old_labels = set(mapping.keys())
-    new_labels = set(mapping.values())
-    if len(old_labels & new_labels) > 0:
+    if len(mapping.keys() & mapping.values()) > 0:
         # labels sets overlap
         # can we topological sort and still do the relabeling?
         D = nx.DiGraph(list(mapping.items()))
@@ -143,8 +138,8 @@ def _relabel_inplace(G, mapping):
                 "resolve the mapping. Use copy=True."
             ) from err
     else:
-        # non-overlapping label sets
-        nodes = old_labels
+        # non-overlapping label sets, sort them in the order of G nodes
+        nodes = [n for n in G if n in mapping]
 
     multigraph = G.is_multigraph()
     directed = G.is_directed()
diff --git a/networkx/testing/__init__.py b/networkx/testing/__init__.py
deleted file mode 100644
index 884ac83..0000000
--- a/networkx/testing/__init__.py
+++ /dev/null
@@ -1,2 +0,0 @@
-from networkx.testing.utils import *
-from networkx.testing.test import run
diff --git a/networkx/testing/test.py b/networkx/testing/test.py
deleted file mode 100644
index 41739be..0000000
--- a/networkx/testing/test.py
+++ /dev/null
@@ -1,44 +0,0 @@
-import warnings
-
-
-def run(verbosity=1, doctest=False):
-    """Run NetworkX tests.
-
-    Parameters
-    ----------
-    verbosity: integer, optional
-      Level of detail in test reports.  Higher numbers provide more detail.
-
-    doctest: bool, optional
-      True to run doctests in code modules
-    """
-    warnings.warn(
-        (
-            "`run` is deprecated and will be removed in version 3.0.\n"
-            "Call `pytest` directly from the commandline instead.\n"
-        ),
-        DeprecationWarning,
-    )
-
-    import pytest
-
-    pytest_args = ["-l"]
-
-    if verbosity and int(verbosity) > 1:
-        pytest_args += ["-" + "v" * (int(verbosity) - 1)]
-
-    if doctest:
-        pytest_args += ["--doctest-modules"]
-
-    pytest_args += ["--pyargs", "networkx"]
-
-    try:
-        code = pytest.main(pytest_args)
-    except SystemExit as err:
-        code = err.code
-
-    return code == 0
-
-
-if __name__ == "__main__":
-    run()
diff --git a/networkx/testing/tests/__init__.py b/networkx/testing/tests/__init__.py
deleted file mode 100644
index e69de29..0000000
diff --git a/networkx/testing/tests/test_utils.py b/networkx/testing/tests/test_utils.py
deleted file mode 100644
index 32804f3..0000000
--- a/networkx/testing/tests/test_utils.py
+++ /dev/null
@@ -1,160 +0,0 @@
-import networkx as nx
-from networkx.testing import assert_edges_equal, assert_graphs_equal, assert_nodes_equal
-
-# thanks to numpy for this GenericTest class (numpy/testing/test_utils.py)
-
-
-class _GenericTest:
-    @classmethod
-    def _test_equal(cls, a, b):
-        cls._assert_func(a, b)
-
-    @classmethod
-    def _test_not_equal(cls, a, b):
-        try:
-            cls._assert_func(a, b)
-            passed = True
-        except AssertionError:
-            pass
-        else:
-            raise AssertionError("a and b are found equal but are not")
-
-
-class TestNodesEqual(_GenericTest):
-    _assert_func = assert_nodes_equal
-
-    def test_nodes_equal(self):
-        a = [1, 2, 5, 4]
-        b = [4, 5, 1, 2]
-        self._test_equal(a, b)
-
-    def test_nodes_not_equal(self):
-        a = [1, 2, 5, 4]
-        b = [4, 5, 1, 3]
-        self._test_not_equal(a, b)
-
-    def test_nodes_with_data_equal(self):
-        G = nx.Graph()
-        G.add_nodes_from([1, 2, 3], color="red")
-        H = nx.Graph()
-        H.add_nodes_from([1, 2, 3], color="red")
-        self._test_equal(G.nodes(data=True), H.nodes(data=True))
-
-    def test_edges_with_data_not_equal(self):
-        G = nx.Graph()
-        G.add_nodes_from([1, 2, 3], color="red")
-        H = nx.Graph()
-        H.add_nodes_from([1, 2, 3], color="blue")
-        self._test_not_equal(G.nodes(data=True), H.nodes(data=True))
-
-
-class TestEdgesEqual(_GenericTest):
-    _assert_func = assert_edges_equal
-
-    def test_edges_equal(self):
-        a = [(1, 2), (5, 4)]
-        b = [(4, 5), (1, 2)]
-        self._test_equal(a, b)
-
-    def test_edges_not_equal(self):
-        a = [(1, 2), (5, 4)]
-        b = [(4, 5), (1, 3)]
-        self._test_not_equal(a, b)
-
-    def test_edges_with_data_equal(self):
-        G = nx.MultiGraph()
-        nx.add_path(G, [0, 1, 2], weight=1)
-        H = nx.MultiGraph()
-        nx.add_path(H, [0, 1, 2], weight=1)
-        self._test_equal(G.edges(data=True, keys=True), H.edges(data=True, keys=True))
-
-    def test_edges_with_data_not_equal(self):
-        G = nx.MultiGraph()
-        nx.add_path(G, [0, 1, 2], weight=1)
-        H = nx.MultiGraph()
-        nx.add_path(H, [0, 1, 2], weight=2)
-        self._test_not_equal(
-            G.edges(data=True, keys=True), H.edges(data=True, keys=True)
-        )
-
-    def test_no_edges(self):
-        G = nx.MultiGraph()
-        H = nx.MultiGraph()
-        self._test_equal(G.edges(data=True, keys=True), H.edges(data=True, keys=True))
-
-    def test_duplicate_edges(self):
-        a = [(1, 2), (5, 4), (1, 2)]
-        b = [(4, 5), (1, 2)]
-        self._test_not_equal(a, b)
-
-    def test_duplicate_edges_with_data(self):
-        a = [(1, 2, {"weight": 10}), (5, 4), (1, 2, {"weight": 1})]
-        b = [(4, 5), (1, 2), (1, 2, {"weight": 1})]
-        self._test_not_equal(a, b)
-
-    def test_order_of_edges_with_data(self):
-        a = [(1, 2, {"weight": 10}), (1, 2, {"weight": 1})]
-        b = [(1, 2, {"weight": 1}), (1, 2, {"weight": 10})]
-        self._test_equal(a, b)
-
-    def test_order_of_multiedges(self):
-        wt1 = {"weight": 1}
-        wt2 = {"weight": 2}
-        a = [(1, 2, wt1), (1, 2, wt1), (1, 2, wt2)]
-        b = [(1, 2, wt1), (1, 2, wt2), (1, 2, wt2)]
-        self._test_not_equal(a, b)
-
-    def test_order_of_edges_with_keys(self):
-        a = [(1, 2, 0, {"weight": 10}), (1, 2, 1, {"weight": 1}), (1, 2, 2)]
-        b = [(1, 2, 1, {"weight": 1}), (1, 2, 2), (1, 2, 0, {"weight": 10})]
-        self._test_equal(a, b)
-        a = [(1, 2, 1, {"weight": 10}), (1, 2, 0, {"weight": 1}), (1, 2, 2)]
-        b = [(1, 2, 1, {"weight": 1}), (1, 2, 2), (1, 2, 0, {"weight": 10})]
-        self._test_not_equal(a, b)
-
-
-class TestGraphsEqual(_GenericTest):
-    _assert_func = assert_graphs_equal
-
-    def test_graphs_equal(self):
-        G = nx.path_graph(4)
-        H = nx.Graph()
-        nx.add_path(H, range(4))
-        self._test_equal(G, H)
-
-    def test_digraphs_equal(self):
-        G = nx.path_graph(4, create_using=nx.DiGraph())
-        H = nx.DiGraph()
-        nx.add_path(H, range(4))
-        self._test_equal(G, H)
-
-    def test_multigraphs_equal(self):
-        G = nx.path_graph(4, create_using=nx.MultiGraph())
-        H = nx.MultiGraph()
-        nx.add_path(H, range(4))
-        self._test_equal(G, H)
-
-    def test_multidigraphs_equal(self):
-        G = nx.path_graph(4, create_using=nx.MultiDiGraph())
-        H = nx.MultiDiGraph()
-        nx.add_path(H, range(4))
-        self._test_equal(G, H)
-
-    def test_graphs_not_equal(self):
-        G = nx.path_graph(4)
-        H = nx.Graph()
-        nx.add_cycle(H, range(4))
-        self._test_not_equal(G, H)
-
-    def test_graphs_not_equal2(self):
-        G = nx.path_graph(4)
-        H = nx.Graph()
-        nx.add_path(H, range(3))
-        self._test_not_equal(G, H)
-
-    def test_graphs_not_equal3(self):
-        G = nx.path_graph(4)
-        H = nx.Graph()
-        nx.add_path(H, range(4))
-        H.name = "path_graph(4)"
-        self._test_not_equal(G, H)
diff --git a/networkx/testing/utils.py b/networkx/testing/utils.py
deleted file mode 100644
index cf6935d..0000000
--- a/networkx/testing/utils.py
+++ /dev/null
@@ -1,54 +0,0 @@
-import warnings
-
-from networkx.utils import edges_equal, graphs_equal, nodes_equal
-
-__all__ = [
-    "assert_nodes_equal",
-    "assert_edges_equal",
-    "assert_graphs_equal",
-    "almost_equal",
-]
-
-
-def almost_equal(x, y, places=7):
-    warnings.warn(
-        (
-            "`almost_equal` is deprecated and will be removed in version 3.0.\n"
-            "Use `pytest.approx` instead.\n"
-        ),
-        DeprecationWarning,
-    )
-    return round(abs(x - y), places) == 0
-
-
-def assert_nodes_equal(nodes1, nodes2):
-    warnings.warn(
-        (
-            "`assert_nodes_equal` is deprecated and will be removed in version 3.0.\n"
-            "Use `from networkx.utils import nodes_equal` and `assert nodes_equal` instead.\n"
-        ),
-        DeprecationWarning,
-    )
-    assert nodes_equal(nodes1, nodes2)
-
-
-def assert_edges_equal(edges1, edges2):
-    warnings.warn(
-        (
-            "`assert_edges_equal` is deprecated and will be removed in version 3.0.\n"
-            "Use `from networkx.utils import edges_equal` and `assert edges_equal` instead.\n"
-        ),
-        DeprecationWarning,
-    )
-    assert edges_equal(edges1, edges2)
-
-
-def assert_graphs_equal(graph1, graph2):
-    warnings.warn(
-        (
-            "`assert_graphs_equal` is deprecated and will be removed in version 3.0.\n"
-            "Use `from networkx.utils import graphs_equal` and `assert graphs_equal` instead.\n"
-        ),
-        DeprecationWarning,
-    )
-    assert graphs_equal(graph1, graph2)
diff --git a/networkx/tests/test_all_random_functions.py b/networkx/tests/test_all_random_functions.py
index e8aaba1..828c313 100644
--- a/networkx/tests/test_all_random_functions.py
+++ b/networkx/tests/test_all_random_functions.py
@@ -85,7 +85,6 @@ def run_all_random_functions(seed):
     )
     t(nx.betweenness_centrality, G, seed=seed)
     t(nx.edge_betweenness_centrality, G, seed=seed)
-    t(nx.edge_betweenness, G, seed=seed)
     t(nx.approximate_current_flow_betweenness_centrality, G, seed=seed)
     # print("kernighan")
     t(nx.algorithms.community.kernighan_lin_bisection, G, seed=seed)
@@ -135,6 +134,7 @@ def run_all_random_functions(seed):
     t(nx.random_clustered_graph, joint_degree_sequence, seed=seed)
     constructor = [(3, 3, 0.5), (10, 10, 0.7)]
     t(nx.random_shell_graph, constructor, seed=seed)
+    t(nx.random_triad, G.to_directed(), seed=seed)
     mapping = {1: 0.4, 2: 0.3, 3: 0.3}
     t(nx.utils.random_weighted_sample, mapping, k, seed=seed)
     t(nx.utils.weighted_choice, mapping, seed=seed)
diff --git a/networkx/tests/test_convert.py b/networkx/tests/test_convert.py
index 5c0a904..44bed94 100644
--- a/networkx/tests/test_convert.py
+++ b/networkx/tests/test_convert.py
@@ -249,11 +249,11 @@ class TestConvert:
 
     def test_attribute_dict_integrity(self):
         # we must not replace dict-like graph data structures with dicts
-        G = nx.OrderedGraph()
+        G = nx.Graph()
         G.add_nodes_from("abc")
-        H = to_networkx_graph(G, create_using=nx.OrderedGraph)
+        H = to_networkx_graph(G, create_using=nx.Graph)
         assert list(H.nodes) == list(G.nodes)
-        H = nx.OrderedDiGraph(G)
+        H = nx.DiGraph(G)
         assert list(H.nodes) == list(G.nodes)
 
     def test_to_edgelist(self):
diff --git a/networkx/tests/test_convert_numpy.py b/networkx/tests/test_convert_numpy.py
index e341ab2..5a3b88e 100644
--- a/networkx/tests/test_convert_numpy.py
+++ b/networkx/tests/test_convert_numpy.py
@@ -8,258 +8,6 @@ from networkx.generators.classic import barbell_graph, cycle_graph, path_graph
 from networkx.utils import graphs_equal
 
 
-def test_to_numpy_matrix_deprecation():
-    pytest.deprecated_call(nx.to_numpy_matrix, nx.Graph())
-
-
-def test_from_numpy_matrix_deprecation():
-    pytest.deprecated_call(nx.from_numpy_matrix, np.eye(2))
-
-
-def test_to_numpy_recarray_deprecation():
-    pytest.deprecated_call(nx.to_numpy_recarray, nx.Graph())
-
-
-class TestConvertNumpyMatrix:
-    # TODO: This entire class can be removed when to/from_numpy_matrix
-    # deprecation expires
-    def setup_method(self):
-        self.G1 = barbell_graph(10, 3)
-        self.G2 = cycle_graph(10, create_using=nx.DiGraph)
-
-        self.G3 = self.create_weighted(nx.Graph())
-        self.G4 = self.create_weighted(nx.DiGraph())
-
-    def test_exceptions(self):
-        G = np.array("a")
-        pytest.raises(nx.NetworkXError, nx.to_networkx_graph, G)
-
-    def create_weighted(self, G):
-        g = cycle_graph(4)
-        G.add_nodes_from(g)
-        G.add_weighted_edges_from((u, v, 10 + u) for u, v in g.edges())
-        return G
-
-    def assert_equal(self, G1, G2):
-        assert sorted(G1.nodes()) == sorted(G2.nodes())
-        assert sorted(G1.edges()) == sorted(G2.edges())
-
-    def identity_conversion(self, G, A, create_using):
-        assert A.sum() > 0
-        GG = nx.from_numpy_matrix(A, create_using=create_using)
-        self.assert_equal(G, GG)
-        GW = nx.to_networkx_graph(A, create_using=create_using)
-        self.assert_equal(G, GW)
-        GI = nx.empty_graph(0, create_using).__class__(A)
-        self.assert_equal(G, GI)
-
-    def test_shape(self):
-        "Conversion from non-square array."
-        A = np.array([[1, 2, 3], [4, 5, 6]])
-        pytest.raises(nx.NetworkXError, nx.from_numpy_matrix, A)
-
-    def test_identity_graph_matrix(self):
-        "Conversion from graph to matrix to graph."
-        A = nx.to_numpy_matrix(self.G1)
-        self.identity_conversion(self.G1, A, nx.Graph())
-
-    def test_identity_graph_array(self):
-        "Conversion from graph to array to graph."
-        A = nx.to_numpy_matrix(self.G1)
-        A = np.asarray(A)
-        self.identity_conversion(self.G1, A, nx.Graph())
-
-    def test_identity_digraph_matrix(self):
-        """Conversion from digraph to matrix to digraph."""
-        A = nx.to_numpy_matrix(self.G2)
-        self.identity_conversion(self.G2, A, nx.DiGraph())
-
-    def test_identity_digraph_array(self):
-        """Conversion from digraph to array to digraph."""
-        A = nx.to_numpy_matrix(self.G2)
-        A = np.asarray(A)
-        self.identity_conversion(self.G2, A, nx.DiGraph())
-
-    def test_identity_weighted_graph_matrix(self):
-        """Conversion from weighted graph to matrix to weighted graph."""
-        A = nx.to_numpy_matrix(self.G3)
-        self.identity_conversion(self.G3, A, nx.Graph())
-
-    def test_identity_weighted_graph_array(self):
-        """Conversion from weighted graph to array to weighted graph."""
-        A = nx.to_numpy_matrix(self.G3)
-        A = np.asarray(A)
-        self.identity_conversion(self.G3, A, nx.Graph())
-
-    def test_identity_weighted_digraph_matrix(self):
-        """Conversion from weighted digraph to matrix to weighted digraph."""
-        A = nx.to_numpy_matrix(self.G4)
-        self.identity_conversion(self.G4, A, nx.DiGraph())
-
-    def test_identity_weighted_digraph_array(self):
-        """Conversion from weighted digraph to array to weighted digraph."""
-        A = nx.to_numpy_matrix(self.G4)
-        A = np.asarray(A)
-        self.identity_conversion(self.G4, A, nx.DiGraph())
-
-    def test_nodelist(self):
-        """Conversion from graph to matrix to graph with nodelist."""
-        P4 = path_graph(4)
-        P3 = path_graph(3)
-        nodelist = list(P3)
-        A = nx.to_numpy_matrix(P4, nodelist=nodelist)
-        GA = nx.Graph(A)
-        self.assert_equal(GA, P3)
-
-        assert nx.to_numpy_matrix(P3, nodelist=[]).shape == (0, 0)
-        # Test nodelist duplicates.
-        long_nodelist = nodelist + [0]
-        pytest.raises(nx.NetworkXError, nx.to_numpy_matrix, P3, nodelist=long_nodelist)
-
-        # Test nodelist contains non-nodes
-        nonnodelist = [-1, 0, 1, 2]
-        pytest.raises(nx.NetworkXError, nx.to_numpy_matrix, P3, nodelist=nonnodelist)
-
-    def test_weight_keyword(self):
-        WP4 = nx.Graph()
-        WP4.add_edges_from((n, n + 1, dict(weight=0.5, other=0.3)) for n in range(3))
-        P4 = path_graph(4)
-        A = nx.to_numpy_matrix(P4)
-        np.testing.assert_equal(A, nx.to_numpy_matrix(WP4, weight=None))
-        np.testing.assert_equal(0.5 * A, nx.to_numpy_matrix(WP4))
-        np.testing.assert_equal(0.3 * A, nx.to_numpy_matrix(WP4, weight="other"))
-
-    def test_from_numpy_matrix_type(self):
-        pytest.importorskip("scipy")
-
-        A = np.matrix([[1]])
-        G = nx.from_numpy_matrix(A)
-        assert type(G[0][0]["weight"]) == int
-
-        A = np.matrix([[1]]).astype(float)
-        G = nx.from_numpy_matrix(A)
-        assert type(G[0][0]["weight"]) == float
-
-        A = np.matrix([[1]]).astype(str)
-        G = nx.from_numpy_matrix(A)
-        assert type(G[0][0]["weight"]) == str
-
-        A = np.matrix([[1]]).astype(bool)
-        G = nx.from_numpy_matrix(A)
-        assert type(G[0][0]["weight"]) == bool
-
-        A = np.matrix([[1]]).astype(complex)
-        G = nx.from_numpy_matrix(A)
-        assert type(G[0][0]["weight"]) == complex
-
-        A = np.matrix([[1]]).astype(object)
-        pytest.raises(TypeError, nx.from_numpy_matrix, A)
-
-        G = nx.cycle_graph(3)
-        A = nx.adjacency_matrix(G).todense()
-        H = nx.from_numpy_matrix(A)
-        assert all(type(m) == int and type(n) == int for m, n in H.edges())
-        H = nx.from_numpy_array(A)
-        assert all(type(m) == int and type(n) == int for m, n in H.edges())
-
-    def test_from_numpy_matrix_dtype(self):
-        dt = [("weight", float), ("cost", int)]
-        A = np.matrix([[(1.0, 2)]], dtype=dt)
-        G = nx.from_numpy_matrix(A)
-        assert type(G[0][0]["weight"]) == float
-        assert type(G[0][0]["cost"]) == int
-        assert G[0][0]["cost"] == 2
-        assert G[0][0]["weight"] == 1.0
-
-    def test_to_numpy_recarray(self):
-        G = nx.Graph()
-        G.add_edge(1, 2, weight=7.0, cost=5)
-        A = nx.to_numpy_recarray(G, dtype=[("weight", float), ("cost", int)])
-        assert sorted(A.dtype.names) == ["cost", "weight"]
-        assert A.weight[0, 1] == 7.0
-        assert A.weight[0, 0] == 0.0
-        assert A.cost[0, 1] == 5
-        assert A.cost[0, 0] == 0
-
-    def test_numpy_multigraph(self):
-        G = nx.MultiGraph()
-        G.add_edge(1, 2, weight=7)
-        G.add_edge(1, 2, weight=70)
-        A = nx.to_numpy_matrix(G)
-        assert A[1, 0] == 77
-        A = nx.to_numpy_matrix(G, multigraph_weight=min)
-        assert A[1, 0] == 7
-        A = nx.to_numpy_matrix(G, multigraph_weight=max)
-        assert A[1, 0] == 70
-
-    def test_from_numpy_matrix_parallel_edges(self):
-        """Tests that the :func:`networkx.from_numpy_matrix` function
-        interprets integer weights as the number of parallel edges when
-        creating a multigraph.
-
-        """
-        A = np.matrix([[1, 1], [1, 2]])
-        # First, with a simple graph, each integer entry in the adjacency
-        # matrix is interpreted as the weight of a single edge in the graph.
-        expected = nx.DiGraph()
-        edges = [(0, 0), (0, 1), (1, 0)]
-        expected.add_weighted_edges_from([(u, v, 1) for (u, v) in edges])
-        expected.add_edge(1, 1, weight=2)
-        actual = nx.from_numpy_matrix(A, parallel_edges=True, create_using=nx.DiGraph)
-        assert graphs_equal(actual, expected)
-        actual = nx.from_numpy_matrix(A, parallel_edges=False, create_using=nx.DiGraph)
-        assert graphs_equal(actual, expected)
-        # Now each integer entry in the adjacency matrix is interpreted as the
-        # number of parallel edges in the graph if the appropriate keyword
-        # argument is specified.
-        edges = [(0, 0), (0, 1), (1, 0), (1, 1), (1, 1)]
-        expected = nx.MultiDiGraph()
-        expected.add_weighted_edges_from([(u, v, 1) for (u, v) in edges])
-        actual = nx.from_numpy_matrix(
-            A, parallel_edges=True, create_using=nx.MultiDiGraph
-        )
-        assert graphs_equal(actual, expected)
-        expected = nx.MultiDiGraph()
-        expected.add_edges_from(set(edges), weight=1)
-        # The sole self-loop (edge 0) on vertex 1 should have weight 2.
-        expected[1][1][0]["weight"] = 2
-        actual = nx.from_numpy_matrix(
-            A, parallel_edges=False, create_using=nx.MultiDiGraph
-        )
-        assert graphs_equal(actual, expected)
-
-    def test_symmetric(self):
-        """Tests that a symmetric matrix has edges added only once to an
-        undirected multigraph when using :func:`networkx.from_numpy_matrix`.
-
-        """
-        A = np.matrix([[0, 1], [1, 0]])
-        G = nx.from_numpy_matrix(A, create_using=nx.MultiGraph)
-        expected = nx.MultiGraph()
-        expected.add_edge(0, 1, weight=1)
-        assert graphs_equal(G, expected)
-
-    def test_dtype_int_graph(self):
-        """Test that setting dtype int actually gives an integer matrix.
-
-        For more information, see GitHub pull request #1363.
-
-        """
-        G = nx.complete_graph(3)
-        A = nx.to_numpy_matrix(G, dtype=int)
-        assert A.dtype == int
-
-    def test_dtype_int_multigraph(self):
-        """Test that setting dtype int actually gives an integer matrix.
-
-        For more information, see GitHub pull request #1363.
-
-        """
-        G = nx.MultiGraph(nx.complete_graph(3))
-        A = nx.to_numpy_matrix(G, dtype=int)
-        assert A.dtype == int
-
-
 class TestConvertNumpyArray:
     def setup_method(self):
         self.G1 = barbell_graph(10, 3)
@@ -324,9 +72,17 @@ class TestConvertNumpyArray:
         nodelist += [nodelist[0]]
         pytest.raises(nx.NetworkXError, nx.to_numpy_array, P3, nodelist=nodelist)
 
+        # Make nodelist invalid by including non-existent nodes
+        nodelist = [-1, 0, 1]
+        with pytest.raises(
+            nx.NetworkXError,
+            match=f"Nodes {nodelist - P3.nodes} in nodelist is not in G",
+        ):
+            nx.to_numpy_array(P3, nodelist=nodelist)
+
     def test_weight_keyword(self):
         WP4 = nx.Graph()
-        WP4.add_edges_from((n, n + 1, dict(weight=0.5, other=0.3)) for n in range(3))
+        WP4.add_edges_from((n, n + 1, {"weight": 0.5, "other": 0.3}) for n in range(3))
         P4 = path_graph(4)
         A = nx.to_numpy_array(P4)
         np.testing.assert_equal(A, nx.to_numpy_array(WP4, weight=None))
@@ -357,6 +113,12 @@ class TestConvertNumpyArray:
         A = np.array([[1]]).astype(object)
         pytest.raises(TypeError, nx.from_numpy_array, A)
 
+        A = np.array([[[1, 1, 1], [1, 1, 1]], [[1, 1, 1], [1, 1, 1]]])
+        with pytest.raises(
+            nx.NetworkXError, match=f"Input array must be 2D, not {A.ndim}"
+        ):
+            g = nx.from_numpy_array(A)
+
     def test_from_numpy_array_dtype(self):
         dt = [("weight", float), ("cost", int)]
         A = np.array([[(1.0, 2)]], dtype=dt)
@@ -434,75 +196,6 @@ class TestConvertNumpyArray:
         assert A.dtype == int
 
 
-@pytest.fixture
-def recarray_test_graph():
-    G = nx.Graph()
-    G.add_edge(1, 2, weight=7.0, cost=5)
-    return G
-
-
-def test_to_numpy_recarray(recarray_test_graph):
-    A = nx.to_numpy_recarray(
-        recarray_test_graph, dtype=[("weight", float), ("cost", int)]
-    )
-    assert sorted(A.dtype.names) == ["cost", "weight"]
-    assert A.weight[0, 1] == 7.0
-    assert A.weight[0, 0] == 0.0
-    assert A.cost[0, 1] == 5
-    assert A.cost[0, 0] == 0
-    with pytest.raises(AttributeError, match="has no attribute"):
-        A.color[0, 1]
-
-
-def test_to_numpy_recarray_default_dtype(recarray_test_graph):
-    A = nx.to_numpy_recarray(recarray_test_graph)
-    assert A.dtype.names == ("weight",)
-    assert A.weight[0, 0] == 0
-    assert A.weight[0, 1] == 7
-    with pytest.raises(AttributeError, match="has no attribute"):
-        A.cost[0, 1]
-
-
-def test_to_numpy_recarray_directed(recarray_test_graph):
-    G = recarray_test_graph.to_directed()
-    G.remove_edge(2, 1)
-    A = nx.to_numpy_recarray(G, dtype=[("weight", float), ("cost", int)])
-    np.testing.assert_array_equal(A.weight, np.array([[0, 7.0], [0, 0]]))
-    np.testing.assert_array_equal(A.cost, np.array([[0, 5], [0, 0]]))
-
-
-def test_to_numpy_recarray_default_dtype_no_weight():
-    G = nx.Graph()
-    G.add_edge(0, 1, color="red")
-    with pytest.raises(KeyError):
-        A = nx.to_numpy_recarray(G)
-    A = nx.to_numpy_recarray(G, dtype=[("color", "U8")])
-    assert A.color[0, 1] == "red"
-
-
-@pytest.fixture
-def recarray_nodelist_test_graph():
-    G = nx.Graph()
-    G.add_edges_from(
-        [(0, 1, {"weight": 1.0}), (0, 2, {"weight": 2.0}), (1, 2, {"weight": 0.5})]
-    )
-    return G
-
-
-def test_to_numpy_recarray_nodelist(recarray_nodelist_test_graph):
-    A = nx.to_numpy_recarray(recarray_nodelist_test_graph, nodelist=[0, 1])
-    np.testing.assert_array_equal(A.weight, np.array([[0, 1], [1, 0]]))
-
-
-@pytest.mark.parametrize(
-    ("nodelist", "errmsg"),
-    (([2, 3], "in nodelist is not in G"), ([1, 1], "nodelist contains duplicates")),
-)
-def test_to_numpy_recarray_bad_nodelist(recarray_nodelist_test_graph, nodelist, errmsg):
-    with pytest.raises(nx.NetworkXError, match=errmsg):
-        A = nx.to_numpy_recarray(recarray_nodelist_test_graph, nodelist=nodelist)
-
-
 @pytest.fixture
 def multigraph_test_graph():
     G = nx.MultiGraph()
diff --git a/networkx/tests/test_convert_pandas.py b/networkx/tests/test_convert_pandas.py
index d3830e6..ca8d08c 100644
--- a/networkx/tests/test_convert_pandas.py
+++ b/networkx/tests/test_convert_pandas.py
@@ -168,7 +168,6 @@ class TestConvertPandas:
         assert edges_equal(G.edges(), GW.edges())
 
     def test_to_edgelist_default_source_or_target_col_exists(self):
-
         G = nx.path_graph(10)
         G.add_weighted_edges_from((u, v, u) for u, v in list(G.edges))
         nx.set_edge_attributes(G, 0, name="source")
@@ -182,7 +181,6 @@ class TestConvertPandas:
         pytest.raises(nx.NetworkXError, nx.to_pandas_edgelist, G)
 
     def test_to_edgelist_custom_source_or_target_col_exists(self):
-
         G = nx.path_graph(10)
         G.add_weighted_edges_from((u, v, u) for u, v in list(G.edges))
         nx.set_edge_attributes(G, 0, name="source_col_name")
diff --git a/networkx/tests/test_convert_scipy.py b/networkx/tests/test_convert_scipy.py
index 4b2537d..125e905 100644
--- a/networkx/tests/test_convert_scipy.py
+++ b/networkx/tests/test_convert_scipy.py
@@ -108,7 +108,7 @@ class TestConvertScipy:
 
     def test_weight_keyword(self):
         WP4 = nx.Graph()
-        WP4.add_edges_from((n, n + 1, dict(weight=0.5, other=0.3)) for n in range(3))
+        WP4.add_edges_from((n, n + 1, {"weight": 0.5, "other": 0.3}) for n in range(3))
         P4 = path_graph(4)
         A = nx.to_scipy_sparse_array(P4)
         np.testing.assert_equal(
@@ -123,7 +123,7 @@ class TestConvertScipy:
 
     def test_format_keyword(self):
         WP4 = nx.Graph()
-        WP4.add_edges_from((n, n + 1, dict(weight=0.5, other=0.3)) for n in range(3))
+        WP4.add_edges_from((n, n + 1, {"weight": 0.5, "other": 0.3}) for n in range(3))
         P4 = path_graph(4)
         A = nx.to_scipy_sparse_array(P4, format="csr")
         np.testing.assert_equal(
@@ -164,7 +164,7 @@ class TestConvertScipy:
         with pytest.raises(nx.NetworkXError):
             WP4 = nx.Graph()
             WP4.add_edges_from(
-                (n, n + 1, dict(weight=0.5, other=0.3)) for n in range(3)
+                (n, n + 1, {"weight": 0.5, "other": 0.3}) for n in range(3)
             )
             P4 = path_graph(4)
             nx.to_scipy_sparse_array(P4, format="any_other")
@@ -281,13 +281,3 @@ def test_from_scipy_sparse_array_formats(sparse_format):
     )
     A = sp.sparse.coo_array([[0, 3, 2], [3, 0, 1], [2, 1, 0]]).asformat(sparse_format)
     assert graphs_equal(expected, nx.from_scipy_sparse_array(A))
-
-
-# NOTE: remove when to/from_sparse_matrix deprecations expire
-def test_scipy_sparse_matrix_deprecations():
-    G = nx.path_graph(3)
-    msg = "\n\nThe scipy.sparse array containers will be used instead of matrices"
-    with pytest.warns(DeprecationWarning, match=msg):
-        M = nx.to_scipy_sparse_matrix(G)
-    with pytest.warns(DeprecationWarning, match=msg):
-        H = nx.from_scipy_sparse_matrix(M)
diff --git a/networkx/tests/test_relabel.py b/networkx/tests/test_relabel.py
index 9a86b38..0ebf4d3 100644
--- a/networkx/tests/test_relabel.py
+++ b/networkx/tests/test_relabel.py
@@ -106,12 +106,19 @@ class TestRelabel:
         H = nx.relabel_nodes(G, mapping)
         assert nodes_equal(H.nodes(), [65, 66, 67, 68])
 
-    def test_relabel_nodes_classes(self):
-        G = nx.empty_graph()
-        G.add_edges_from([(0, 1), (0, 2), (1, 2), (2, 3)])
+    def test_relabel_nodes_callable_type(self):
+        G = nx.path_graph(4)
         H = nx.relabel_nodes(G, str)
         assert nodes_equal(H.nodes, ["0", "1", "2", "3"])
 
+    @pytest.mark.parametrize("non_mc", ("0123", ["0", "1", "2", "3"]))
+    def test_relabel_nodes_non_mapping_or_callable(self, non_mc):
+        """If `mapping` is neither a Callable or a Mapping, an exception
+        should be raised."""
+        G = nx.path_graph(4)
+        with pytest.raises(AttributeError):
+            nx.relabel_nodes(G, non_mc)
+
     def test_relabel_nodes_graph(self):
         G = nx.Graph([("A", "B"), ("A", "C"), ("B", "C"), ("C", "D")])
         mapping = {"A": "aardvark", "B": "bear", "C": "cat", "D": "dog"}
@@ -119,7 +126,7 @@ class TestRelabel:
         assert nodes_equal(H.nodes(), ["aardvark", "bear", "cat", "dog"])
 
     def test_relabel_nodes_orderedgraph(self):
-        G = nx.OrderedGraph()
+        G = nx.Graph()
         G.add_nodes_from([1, 2, 3])
         G.add_edges_from([(1, 3), (2, 3)])
         mapping = {1: "a", 2: "b", 3: "c"}
@@ -306,3 +313,35 @@ class TestRelabel:
         H = nx.relabel_nodes(G, mapping, copy=True)
         with pytest.raises(nx.NetworkXUnfeasible):
             H = nx.relabel_nodes(G, mapping, copy=False)
+
+    def test_relabel_preserve_node_order_full_mapping_with_copy_true(self):
+        G = nx.path_graph(3)
+        original_order = list(G.nodes())
+        mapping = {2: "a", 1: "b", 0: "c"}  # dictionary keys out of order on purpose
+        H = nx.relabel_nodes(G, mapping, copy=True)
+        new_order = list(H.nodes())
+        assert [mapping.get(i, i) for i in original_order] == new_order
+
+    def test_relabel_preserve_node_order_full_mapping_with_copy_false(self):
+        G = nx.path_graph(3)
+        original_order = list(G)
+        mapping = {2: "a", 1: "b", 0: "c"}  # dictionary keys out of order on purpose
+        H = nx.relabel_nodes(G, mapping, copy=False)
+        new_order = list(H)
+        assert [mapping.get(i, i) for i in original_order] == new_order
+
+    def test_relabel_preserve_node_order_partial_mapping_with_copy_true(self):
+        G = nx.path_graph(3)
+        original_order = list(G)
+        mapping = {1: "a", 0: "b"}  # partial mapping and keys out of order on purpose
+        H = nx.relabel_nodes(G, mapping, copy=True)
+        new_order = list(H)
+        assert [mapping.get(i, i) for i in original_order] == new_order
+
+    def test_relabel_preserve_node_order_partial_mapping_with_copy_false(self):
+        G = nx.path_graph(3)
+        original_order = list(G)
+        mapping = {1: "a", 0: "b"}  # partial mapping and keys out of order on purpose
+        H = nx.relabel_nodes(G, mapping, copy=False)
+        new_order = list(H)
+        assert [mapping.get(i, i) for i in original_order] != new_order
diff --git a/networkx/utils/__init__.py b/networkx/utils/__init__.py
index 9f168de..48f02c1 100644
--- a/networkx/utils/__init__.py
+++ b/networkx/utils/__init__.py
@@ -4,4 +4,3 @@ from networkx.utils.random_sequence import *
 from networkx.utils.union_find import *
 from networkx.utils.rcm import *
 from networkx.utils.heaps import *
-from networkx.utils.contextmanagers import *
diff --git a/networkx/utils/contextmanagers.py b/networkx/utils/contextmanagers.py
deleted file mode 100644
index dcd6b9c..0000000
--- a/networkx/utils/contextmanagers.py
+++ /dev/null
@@ -1,47 +0,0 @@
-import warnings
-from contextlib import contextmanager
-
-__all__ = ["reversed"]
-
-
-@contextmanager
-def reversed(G):
-    """A context manager for temporarily reversing a directed graph in place.
-
-    .. deprecated:: 2.6
-
-       This context manager is deprecated and will be removed in 3.0.
-       Use ``G.reverse(copy=False) if G.is_directed() else G`` instead.
-
-    This is a no-op for undirected graphs.
-
-    Parameters
-    ----------
-    G : graph
-        A NetworkX graph.
-
-    Warning
-    -------
-    The reversed context manager is deprecated in favor
-    of G.reverse(copy=False). The view allows multiple threads to use the
-    same graph without confusion while the context manager does not.
-    This context manager is scheduled to be removed in version 3.0.
-    """
-    msg = (
-        "context manager reversed is deprecated and to be removed in 3.0."
-        "Use G.reverse(copy=False) if G.is_directed() else G instead."
-    )
-    warnings.warn(msg, DeprecationWarning)
-
-    directed = G.is_directed()
-    if directed:
-        G._pred, G._succ = G._succ, G._pred
-        G._adj = G._succ
-
-    try:
-        yield
-    finally:
-        if directed:
-            # Reverse the reverse.
-            G._pred, G._succ = G._succ, G._pred
-            G._adj = G._succ
diff --git a/networkx/utils/decorators.py b/networkx/utils/decorators.py
index d72af39..fbccff6 100644
--- a/networkx/utils/decorators.py
+++ b/networkx/utils/decorators.py
@@ -16,8 +16,6 @@ __all__ = [
     "not_implemented_for",
     "open_file",
     "nodes_or_number",
-    "preserve_random_state",
-    "random_state",
     "np_random_state",
     "py_random_state",
     "argmap",
@@ -77,8 +75,8 @@ def not_implemented_for(*graph_types):
         )
 
     # 3-way logic: True if "directed" input, False if "undirected" input, else None
-    dval = ("directed" in graph_types) or not ("undirected" in graph_types) and None
-    mval = ("multigraph" in graph_types) or not ("graph" in graph_types) and None
+    dval = ("directed" in graph_types) or "undirected" not in graph_types and None
+    mval = ("multigraph" in graph_types) or "graph" not in graph_types and None
     errmsg = f"not implemented for {' '.join(graph_types)} type"
 
     def _not_implemented_for(g):
@@ -99,7 +97,7 @@ fopeners = {
     ".gzip": gzip.open,
     ".bz2": bz2.BZ2File,
 }
-_dispatch_dict = defaultdict(lambda: open, **fopeners)  # type: ignore
+_dispatch_dict = defaultdict(lambda: open, **fopeners)
 
 
 def open_file(path_arg, mode="r"):
@@ -258,62 +256,6 @@ def nodes_or_number(which_args):
     return argmap(_nodes_or_number, *iter_wa)
 
 
-def preserve_random_state(func):
-    """Decorator to preserve the numpy.random state during a function.
-
-    .. deprecated:: 2.6
-        This is deprecated and will be removed in NetworkX v3.0.
-
-    Parameters
-    ----------
-    func : function
-        function around which to preserve the random state.
-
-    Returns
-    -------
-    wrapper : function
-        Function which wraps the input function by saving the state before
-        calling the function and restoring the function afterward.
-
-    Examples
-    --------
-    Decorate functions like this::
-
-        @preserve_random_state
-        def do_random_stuff(x, y):
-            return x + y * numpy.random.random()
-
-    Notes
-    -----
-    If numpy.random is not importable, the state is not saved or restored.
-    """
-    import warnings
-
-    msg = "preserve_random_state is deprecated and will be removed in 3.0."
-    warnings.warn(msg, DeprecationWarning)
-
-    try:
-        import numpy as np
-
-        @contextmanager
-        def save_random_state():
-            state = np.random.get_state()
-            try:
-                yield
-            finally:
-                np.random.set_state(state)
-
-        def wrapper(*args, **kwargs):
-            with save_random_state():
-                np.random.seed(1234567890)
-                return func(*args, **kwargs)
-
-        wrapper.__name__ = func.__name__
-        return wrapper
-    except ImportError:
-        return func
-
-
 def np_random_state(random_state_argument):
     """Decorator to generate a `numpy.random.RandomState` instance.
 
@@ -358,27 +300,6 @@ def np_random_state(random_state_argument):
     return argmap(create_random_state, random_state_argument)
 
 
-def random_state(random_state_argument):
-    """Decorator to generate a `numpy.random.RandomState` instance.
-
-    .. deprecated:: 2.7
-
-       This function is a deprecated alias for `np_random_state` and will be
-       removed in version 3.0. Use np_random_state instead.
-    """
-    import warnings
-
-    warnings.warn(
-        (
-            "`random_state` is a deprecated alias for `np_random_state`\n"
-            "and will be removed in version 3.0. Use `np_random_state` instead."
-        ),
-        DeprecationWarning,
-        stacklevel=2,
-    )
-    return np_random_state(random_state_argument)
-
-
 def py_random_state(random_state_argument):
     """Decorator to generate a random.Random instance (or equiv).
 
diff --git a/networkx/utils/mapped_queue.py b/networkx/utils/mapped_queue.py
index 0ff53a0..5dd8590 100644
--- a/networkx/utils/mapped_queue.py
+++ b/networkx/utils/mapped_queue.py
@@ -43,7 +43,12 @@ class _HeapElement:
             return self.priority < other
         # assume comparing to another _HeapElement
         if self.priority == other_priority:
-            return self.element < other.element
+            try:
+                return self.element < other.element
+            except TypeError as err:
+                raise TypeError(
+                    "Consider using a tuple, with a priority value that can be compared."
+                )
         return self.priority < other_priority
 
     def __gt__(self, other):
@@ -53,7 +58,12 @@ class _HeapElement:
             return self.priority > other
         # assume comparing to another _HeapElement
         if self.priority == other_priority:
-            return self.element < other.element
+            try:
+                return self.element > other.element
+            except TypeError as err:
+                raise TypeError(
+                    "Consider using a tuple, with a priority value that can be compared."
+                )
         return self.priority > other_priority
 
     def __eq__(self, other):
@@ -93,20 +103,28 @@ class MappedQueue:
     library. While MappedQueue is designed for maximum compatibility with
     heapq, it adds element removal, lookup, and priority update.
 
+    Parameters
+    ----------
+    data : dict or iterable
+
     Examples
     --------
 
-    A `MappedQueue` can be created empty or optionally given an array of
-    initial elements. Calling `push()` will add an element and calling `pop()`
-    will remove and return the smallest element.
+    A `MappedQueue` can be created empty, or optionally, given a dictionary
+    of initial elements and priorities.  The methods `push`, `pop`,
+    `remove`, and `update` operate on the queue.
 
-    >>> q = MappedQueue([916, 50, 4609, 493, 237])
-    >>> q.push(1310)
+    >>> colors_nm = {'red':665, 'blue': 470, 'green': 550}
+    >>> q = MappedQueue(colors_nm)
+    >>> q.remove('red')
+    >>> q.update('green', 'violet', 400)
+    >>> q.push('indigo', 425)
     True
-    >>> [q.pop() for i in range(len(q.heap))]
-    [50, 237, 493, 916, 1310, 4609]
+    >>> [q.pop().element for i in range(len(q.heap))]
+    ['violet', 'indigo', 'blue']
 
-    Elements can also be updated or removed from anywhere in the queue.
+    A `MappedQueue` can also be initialized with a list or other iterable. The priority is assumed
+    to be the sort order of the items in the list.
 
     >>> q = MappedQueue([916, 50, 4609, 493, 237])
     >>> q.remove(493)
@@ -114,6 +132,17 @@ class MappedQueue:
     >>> [q.pop() for i in range(len(q.heap))]
     [50, 916, 1117, 4609]
 
+    An exception is raised if the elements are not comparable.
+
+    >>> q = MappedQueue([100, 'a'])
+    Traceback (most recent call last):
+    ...
+    TypeError: '<' not supported between instances of 'int' and 'str'
+
+    To avoid the exception, use a dictionary to assign priorities to the elements.
+
+    >>> q = MappedQueue({100: 0, 'a': 1 })
+
     References
     ----------
     .. [1] Cormen, T. H., Leiserson, C. E., Rivest, R. L., & Stein, C. (2001).
@@ -122,13 +151,15 @@ class MappedQueue:
        Pearson Education.
     """
 
-    def __init__(self, data=[]):
+    def __init__(self, data=None):
         """Priority queue class with updatable priorities."""
-        if isinstance(data, dict):
+        if data is None:
+            self.heap = []
+        elif isinstance(data, dict):
             self.heap = [_HeapElement(v, k) for k, v in data.items()]
         else:
             self.heap = list(data)
-        self.position = dict()
+        self.position = {}
         self._heapify()
 
     def _heapify(self):
diff --git a/networkx/utils/misc.py b/networkx/utils/misc.py
index 51a527d..31189bf 100644
--- a/networkx/utils/misc.py
+++ b/networkx/utils/misc.py
@@ -21,24 +21,12 @@ from itertools import chain, tee
 import networkx as nx
 
 __all__ = [
-    "is_string_like",
-    "iterable",
-    "empty_generator",
     "flatten",
     "make_list_of_ints",
-    "is_list_of_ints",
-    "make_str",
-    "generate_unique_node",
-    "default_opener",
     "dict_to_numpy_array",
-    "dict_to_numpy_array1",
-    "dict_to_numpy_array2",
-    "is_iterator",
     "arbitrary_element",
-    "consume",
     "pairwise",
     "groups",
-    "to_tuple",
     "create_random_state",
     "create_py_random_state",
     "PythonRandomInterface",
@@ -53,51 +41,6 @@ __all__ = [
 # see G.add_nodes and others in Graph Class in networkx/base.py
 
 
-def is_string_like(obj):  # from John Hunter, types-free version
-    """Check if obj is string.
-
-    .. deprecated:: 2.6
-        This is deprecated and will be removed in NetworkX v3.0.
-    """
-    msg = (
-        "is_string_like is deprecated and will be removed in 3.0."
-        "Use isinstance(obj, str) instead."
-    )
-    warnings.warn(msg, DeprecationWarning)
-    return isinstance(obj, str)
-
-
-def iterable(obj):
-    """Return True if obj is iterable with a well-defined len().
-
-    .. deprecated:: 2.6
-        This is deprecated and will be removed in NetworkX v3.0.
-    """
-    msg = (
-        "iterable is deprecated and will be removed in 3.0."
-        "Use isinstance(obj, (collections.abc.Iterable, collections.abc.Sized)) instead."
-    )
-    warnings.warn(msg, DeprecationWarning)
-    if hasattr(obj, "__iter__"):
-        return True
-    try:
-        len(obj)
-    except:
-        return False
-    return True
-
-
-def empty_generator():
-    """Return a generator with no members.
-
-    .. deprecated:: 2.6
-    """
-    warnings.warn(
-        "empty_generator is deprecated and will be removed in v3.0.", DeprecationWarning
-    )
-    return (i for i in ())
-
-
 def flatten(obj, result=None):
     """Return flattened version of (possibly nested) iterable object."""
     if not isinstance(obj, (Iterable, Sized)) or isinstance(obj, str):
@@ -148,79 +91,6 @@ def make_list_of_ints(sequence):
     return sequence
 
 
-def is_list_of_ints(intlist):
-    """Return True if list is a list of ints.
-
-    .. deprecated:: 2.6
-        This is deprecated and will be removed in NetworkX v3.0.
-    """
-    msg = (
-        "is_list_of_ints is deprecated and will be removed in 3.0."
-        "See also: ``networkx.utils.make_list_of_ints.``"
-    )
-    warnings.warn(msg, DeprecationWarning, stacklevel=2)
-    if not isinstance(intlist, list):
-        return False
-    for i in intlist:
-        if not isinstance(i, int):
-            return False
-    return True
-
-
-def make_str(x):
-    """Returns the string representation of t.
-
-    .. deprecated:: 2.6
-        This is deprecated and will be removed in NetworkX v3.0.
-    """
-    msg = "make_str is deprecated and will be removed in 3.0. Use str instead."
-    warnings.warn(msg, DeprecationWarning)
-    return str(x)
-
-
-def generate_unique_node():
-    """Generate a unique node label.
-
-    .. deprecated:: 2.6
-        This is deprecated and will be removed in NetworkX v3.0.
-    """
-    msg = "generate_unique_node is deprecated and will be removed in 3.0. Use uuid.uuid4 instead."
-    warnings.warn(msg, DeprecationWarning)
-    return str(uuid.uuid4())
-
-
-def default_opener(filename):
-    """Opens `filename` using system's default program.
-
-    .. deprecated:: 2.6
-       default_opener is deprecated and will be removed in version 3.0.
-       Consider an image processing library to open images, such as Pillow::
-
-           from PIL import Image
-           Image.open(filename).show()
-
-    Parameters
-    ----------
-    filename : str
-        The path of the file to be opened.
-
-    """
-    warnings.warn(
-        "default_opener is deprecated and will be removed in version 3.0. ",
-        DeprecationWarning,
-    )
-    from subprocess import call
-
-    cmds = {
-        "darwin": ["open"],
-        "linux": ["xdg-open"],
-        "linux2": ["xdg-open"],
-        "win32": ["cmd.exe", "/C", "start", ""],
-    }
-    cmd = cmds[sys.platform] + [filename]
-    call(cmd)
-
-
 def dict_to_numpy_array(d, mapping=None):
     """Convert a dictionary of dictionaries to a numpy array
     with optional mapping."""
@@ -232,23 +102,6 @@ def dict_to_numpy_array(d, mapping=None):
         return _dict_to_numpy_array1(d, mapping)
 
 
-def dict_to_numpy_array2(d, mapping=None):
-    """Convert a dict of dicts to a 2d numpy array with optional mapping.
-
-    .. deprecated:: 2.8
-
-       dict_to_numpy_array2 is deprecated and will be removed in networkx 3.0.
-       Use `dict_to_numpy_array` instead.
-    """
-    msg = (
-        "dict_to_numpy_array2 is deprecated and will be removed in networkx 3.0.\n"
-        "Use dict_to_numpy_array instead."
-    )
-    warnings.warn(msg, DeprecationWarning, stacklevel=2)
-
-    return _dict_to_numpy_array2(d, mapping)
-
-
 def _dict_to_numpy_array2(d, mapping=None):
     """Convert a dictionary of dictionaries to a 2d numpy array
     with optional mapping.
@@ -272,23 +125,6 @@ def _dict_to_numpy_array2(d, mapping=None):
     return a
 
 
-def dict_to_numpy_array1(d, mapping=None):
-    """Convert a dict of numbers to a 1d numpy array with optional mapping.
-
-    .. deprecated:: 2.8
-
-       dict_to_numpy_array1 is deprecated and will be removed in networkx 3.0.
-       Use dict_to_numpy_array instead.
-    """
-    msg = (
-        "dict_to_numpy_array1 is deprecated and will be removed in networkx 3.0.\n"
-        "Use dict_to_numpy_array instead."
-    )
-    warnings.warn(msg, DeprecationWarning, stacklevel=2)
-
-    return _dict_to_numpy_array1(d, mapping)
-
-
 def _dict_to_numpy_array1(d, mapping=None):
     """Convert a dictionary of numbers to a 1d numpy array with optional mapping."""
     import numpy as np
@@ -304,21 +140,6 @@ def _dict_to_numpy_array1(d, mapping=None):
     return a
 
 
-def is_iterator(obj):
-    """Returns True if and only if the given object is an iterator object.
-
-    .. deprecated:: 2.6.0
-        Deprecated in favor of ``isinstance(obj, collections.abc.Iterator)``
-    """
-    msg = (
-        "is_iterator is deprecated and will be removed in version 3.0. "
-        "Use ``isinstance(obj, collections.abc.Iterator)`` instead."
-    )
-    warnings.warn(msg, DeprecationWarning, stacklevel=2)
-    has_next_attr = hasattr(obj, "__next__") or hasattr(obj, "next")
-    return iter(obj) is obj and has_next_attr
-
-
 def arbitrary_element(iterable):
     """Returns an arbitrary element of `iterable` without removing it.
 
@@ -388,22 +209,6 @@ def arbitrary_element(iterable):
     return next(iter(iterable))
 
 
-# Recipe from the itertools documentation.
-def consume(iterator):
-    """Consume the iterator entirely.
-
-    .. deprecated:: 2.6
-        This is deprecated and will be removed in NetworkX v3.0.
-    """
-    # Feed the entire iterator into a zero-length deque.
-    msg = (
-        "consume is deprecated and will be removed in version 3.0. "
-        "Use ``collections.deque(iterator, maxlen=0)`` instead."
-    )
-    warnings.warn(msg, DeprecationWarning, stacklevel=2)
-    deque(iterator, maxlen=0)
-
-
 # Recipe from the itertools documentation.
 def pairwise(iterable, cyclic=False):
     "s -> (s0, s1), (s1, s2), (s2, s3), ..."
@@ -436,31 +241,6 @@ def groups(many_to_one):
     return dict(one_to_many)
 
 
-def to_tuple(x):
-    """Converts lists to tuples.
-
-    .. deprecated:: 2.8
-
-       to_tuple is deprecated and will be removed in NetworkX 3.0.
-
-    Examples
-    --------
-    >>> from networkx.utils import to_tuple
-    >>> a_list = [1, 2, [1, 4]]
-    >>> to_tuple(a_list)
-    (1, 2, (1, 4))
-    """
-    warnings.warn(
-        "to_tuple is deprecated and will be removed in NetworkX 3.0.",
-        DeprecationWarning,
-        stacklevel=2,
-    )
-
-    if not isinstance(x, (tuple, list)):
-        return x
-    return tuple(map(to_tuple, x))
-
-
 def create_random_state(random_state=None):
     """Returns a numpy.random.RandomState or numpy.random.Generator instance
     depending on input.
diff --git a/networkx/utils/rcm.py b/networkx/utils/rcm.py
index 8499fd8..f9e1bfe 100644
--- a/networkx/utils/rcm.py
+++ b/networkx/utils/rcm.py
@@ -135,7 +135,7 @@ def connected_cuthill_mckee_ordering(G, heuristic=None):
     while queue:
         parent = queue.popleft()
         yield parent
-        nd = sorted(list(G.degree(set(G[parent]) - visited)), key=itemgetter(1))
+        nd = sorted(G.degree(set(G[parent]) - visited), key=itemgetter(1))
         children = [n for n, d in nd]
         visited.update(children)
         queue.extend(children)
diff --git a/networkx/utils/tests/test_contextmanager.py b/networkx/utils/tests/test_contextmanager.py
deleted file mode 100644
index 6924683..0000000
--- a/networkx/utils/tests/test_contextmanager.py
+++ /dev/null
@@ -1,18 +0,0 @@
-import networkx as nx
-
-
-def test_reversed():
-    G = nx.DiGraph()
-    G.add_edge("A", "B")
-
-    # no exception
-    with nx.utils.reversed(G):
-        pass
-    assert "B" in G["A"]
-
-    # exception
-    try:
-        with nx.utils.reversed(G):
-            raise Exception
-    except:
-        assert "B" in G["A"]
diff --git a/networkx/utils/tests/test_decorators.py b/networkx/utils/tests/test_decorators.py
index 93f22be..f74dd9a 100644
--- a/networkx/utils/tests/test_decorators.py
+++ b/networkx/utils/tests/test_decorators.py
@@ -11,9 +11,7 @@ from networkx.utils.decorators import (
     not_implemented_for,
     np_random_state,
     open_file,
-    preserve_random_state,
     py_random_state,
-    random_state,
 )
 from networkx.utils.misc import PythonRandomInterface
 
@@ -206,17 +204,6 @@ class TestOpenFileDecorator:
         self.writer_kwarg(path=None)
 
 
-@preserve_random_state
-def test_preserve_random_state():
-    try:
-        import numpy.random
-
-        r = numpy.random.random()
-    except ImportError:
-        return
-    assert abs(r - 0.61879477158568) < 1e-16
-
-
 class TestRandomState:
     @classmethod
     def setup_class(cls):
@@ -230,9 +217,7 @@ class TestRandomState:
 
     @py_random_state(1)
     def instantiate_py_random_state(self, random_state):
-        assert isinstance(random_state, random.Random) or isinstance(
-            random_state, PythonRandomInterface
-        )
+        assert isinstance(random_state, (random.Random, PythonRandomInterface))
         return random_state.random()
 
     def test_random_state_None(self):
diff --git a/networkx/utils/tests/test_mapped_queue.py b/networkx/utils/tests/test_mapped_queue.py
index 3570ad2..ca9b7e4 100644
--- a/networkx/utils/tests/test_mapped_queue.py
+++ b/networkx/utils/tests/test_mapped_queue.py
@@ -12,6 +12,13 @@ def test_HeapElement_gtlt():
     assert 1 < bar
 
 
+def test_HeapElement_gtlt_tied_priority():
+    bar = _HeapElement(1, "a")
+    foo = _HeapElement(1, "b")
+    assert foo > bar
+    assert bar < foo
+
+
 def test_HeapElement_eq():
     bar = _HeapElement(1.1, "a")
     foo = _HeapElement(1, "a")
@@ -63,6 +70,10 @@ class TestMappedQueue:
         q = MappedQueue(h)
         self._check_map(q)
 
+    def test_incomparable(self):
+        h = [5, 4, "a", 2, 1, 0]
+        pytest.raises(TypeError, MappedQueue, h)
+
     def test_len(self):
         h = [5, 4, 3, 2, 1, 0]
         q = MappedQueue(h)
@@ -197,6 +208,30 @@ class TestMappedDict(TestMappedQueue):
         priority_dict = {elt: elt for elt in h}
         return MappedQueue(priority_dict)
 
+    def test_init(self):
+        d = {5: 0, 4: 1, "a": 2, 2: 3, 1: 4}
+        q = MappedQueue(d)
+        assert q.position == d
+
+    def test_ties(self):
+        d = {5: 0, 4: 1, 3: 2, 2: 3, 1: 4}
+        q = MappedQueue(d)
+        assert q.position == {elt: pos for pos, elt in enumerate(q.heap)}
+
+    def test_pop(self):
+        d = {5: 0, 4: 1, 3: 2, 2: 3, 1: 4}
+        q = MappedQueue(d)
+        assert q.pop() == _HeapElement(0, 5)
+        assert q.position == {elt: pos for pos, elt in enumerate(q.heap)}
+
+    def test_empty_pop(self):
+        q = MappedQueue()
+        pytest.raises(IndexError, q.pop)
+
+    def test_incomparable_ties(self):
+        d = {5: 0, 4: 0, "a": 0, 2: 0, 1: 0}
+        pytest.raises(TypeError, MappedQueue, d)
+
     def test_push(self):
         to_push = [6, 1, 4, 3, 2, 5, 0]
         h_sifted = [0, 2, 1, 6, 3, 5, 4]
diff --git a/networkx/utils/tests/test_misc.py b/networkx/utils/tests/test_misc.py
index b886852..18d2878 100644
--- a/networkx/utils/tests/test_misc.py
+++ b/networkx/utils/tests/test_misc.py
@@ -13,13 +13,9 @@ from networkx.utils import (
     discrete_sequence,
     flatten,
     groups,
-    is_string_like,
-    iterable,
     make_list_of_ints,
-    make_str,
     pairwise,
     powerlaw_sequence,
-    to_tuple,
 )
 from networkx.utils.misc import _dict_to_numpy_array1, _dict_to_numpy_array2
 
@@ -62,28 +58,6 @@ def test_flatten(nested, result):
     assert issubclass(type(val), tuple)
 
 
-def test_is_string_like():
-    assert is_string_like("aaaa")
-    assert not is_string_like(None)
-    assert not is_string_like(123)
-
-
-def test_iterable():
-    assert not iterable(None)
-    assert not iterable(10)
-    assert iterable([1, 2, 3])
-    assert iterable((1, 2, 3))
-    assert iterable({1: "A", 2: "X"})
-    assert iterable("ABC")
-
-
-def test_graph_iterable():
-    K = nx.complete_graph(10)
-    assert iterable(K)
-    assert iterable(K.nodes())
-    assert iterable(K.edges())
-
-
 def test_make_list_of_ints():
     mylist = [1, 2, 3.0, 42, -2]
     assert make_list_of_ints(mylist) is mylist
@@ -99,20 +73,6 @@ def test_random_number_distribution():
     z = discrete_sequence(20, distribution=[0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 3])
 
 
-def test_make_str_with_bytes():
-    x = "qualité"
-    y = make_str(x)
-    assert isinstance(y, str)
-    assert len(y) == 7
-
-
-def test_make_str_with_unicode():
-    x = "qualité"
-    y = make_str(x)
-    assert isinstance(y, str)
-    assert len(y) == 7
-
-
 class TestNumpyArray:
     @classmethod
     def setup_class(cls):
@@ -195,23 +155,6 @@ def test_groups():
     assert {} == groups({})
 
 
-def test_to_tuple():
-    a_list = [1, 2, [1, 3]]
-    actual = to_tuple(a_list)
-    expected = (1, 2, (1, 3))
-    assert actual == expected
-
-    a_tuple = (1, 2)
-    actual = to_tuple(a_tuple)
-    expected = a_tuple
-    assert actual == expected
-
-    a_mix = (1, 2, [1, 3])
-    actual = to_tuple(a_mix)
-    expected = (1, 2, (1, 3))
-    assert actual == expected
-
-
 def test_create_random_state():
     np = pytest.importorskip("numpy")
     rs = np.random.RandomState
@@ -310,13 +253,3 @@ def test_arbitrary_element_raises(iterator):
     """Value error is raised when input is an iterator."""
     with pytest.raises(ValueError, match="from an iterator"):
         arbitrary_element(iterator)
-
-
-def test_dict_to_numpy_array_deprecations():
-    np = pytest.importorskip("numpy")
-    d = {"a": 1}
-    with pytest.deprecated_call():
-        nx.utils.dict_to_numpy_array1(d)
-    d2 = {"a": {"b": 2}}
-    with pytest.deprecated_call():
-        nx.utils.dict_to_numpy_array2(d2)
diff --git a/networkx/utils/union_find.py b/networkx/utils/union_find.py
index 27596c9..2a07129 100644
--- a/networkx/utils/union_find.py
+++ b/networkx/utils/union_find.py
@@ -53,11 +53,12 @@ class UnionFind:
             return object
 
         # find path of objects leading to the root
-        path = [object]
+        path = []
         root = self.parents[object]
-        while root != path[-1]:
-            path.append(root)
-            root = self.parents[root]
+        while root != object:
+            path.append(object)
+            object = root
+            root = self.parents[object]
 
         # compress the path and return
         for ancestor in path:
@@ -82,7 +83,7 @@ class UnionFind:
 
         """
         # Ensure fully pruned paths
-        for x in self.parents.keys():
+        for x in self.parents:
             _ = self[x]  # Evaluated for side-effect only
 
         yield from groups(self.parents).values()
diff --git a/requirements/default.txt b/requirements/default.txt
index 153e8b5..add8758 100644
--- a/requirements/default.txt
+++ b/requirements/default.txt
@@ -1,4 +1,4 @@
-numpy>=1.19
+numpy>=1.20
 scipy>=1.8
 matplotlib>=3.4
 pandas>=1.3
diff --git a/requirements/developer.txt b/requirements/developer.txt
index 068b450..92556c7 100644
--- a/requirements/developer.txt
+++ b/requirements/developer.txt
@@ -1,2 +1,2 @@
-pre-commit>=2.20
-mypy>=0.982
+pre-commit>=3.2
+mypy>=1.1
diff --git a/requirements/doc.txt b/requirements/doc.txt
index 7cc1e78..21db9d3 100644
--- a/requirements/doc.txt
+++ b/requirements/doc.txt
@@ -1,7 +1,7 @@
-sphinx>=5.2
-pydata-sphinx-theme>=0.11
-sphinx-gallery>=0.11
+sphinx>=6.1
+pydata-sphinx-theme>=0.13
+sphinx-gallery>=0.12
 numpydoc>=1.5
-pillow>=9.2
+pillow>=9.4
 nb2plots>=0.6
-texext>=0.6.6
+texext>=0.6.7
diff --git a/requirements/example.txt b/requirements/example.txt
index a256fbd..9350879 100644
--- a/requirements/example.txt
+++ b/requirements/example.txt
@@ -1,6 +1,6 @@
-osmnx>=1.1
+osmnx>=1.2
 momepy>=0.5
 contextily>=1.2
-seaborn>=0.11
-cairocffi>=1.3
-igraph>=0.9.8
+seaborn>=0.12
+cairocffi>=1.4
+igraph>=0.10
diff --git a/requirements/extra.txt b/requirements/extra.txt
index 75db002..081f2b3 100644
--- a/requirements/extra.txt
+++ b/requirements/extra.txt
@@ -1,4 +1,4 @@
 lxml>=4.6
-pygraphviz>=1.9
+pygraphviz>=1.10
 pydot>=1.4.2
 sympy>=1.10
diff --git a/requirements/release.txt b/requirements/release.txt
index 465bec5..3e4d567 100644
--- a/requirements/release.txt
+++ b/requirements/release.txt
@@ -1,3 +1,3 @@
-build>=0.8
+build>=0.10
 twine>=4.0
-wheel>=0.37
+wheel>=0.40
diff --git a/ruff.toml b/ruff.toml
new file mode 100644
index 0000000..e0b0e30
--- /dev/null
+++ b/ruff.toml
@@ -0,0 +1,24 @@
+select = [
+    "I",       # isort
+    "UP",      # pyupgrade
+    "C4",      # flake8-comprehensions
+    "E713",    # use 'key not in list'
+    "PIE",     # flake8-pie
+    "PGH003",  # forbid blanket 'type: ignore' comments
+    "PLR0402", # useless import alias
+    "SIM101",  # merge 'isinstance' calls
+    "SIM109",  # use a tuple for multiple comparisons
+    "SIM110",  # convert loop to 'any'
+    "SIM111",  # convert loop to 'all'
+    "SIM118",  # use 'key in dict'
+    "SIM2",    # simplify boolean comparisons
+]
+
+target-version = "py38"
+
+[per-file-ignores]
+"__init__.py" = ["I"]
+"setup.py" = ["I"]
+"examples/*.py" = ["I"]
+"doc/*.py" = ["I"]
+"tools/*.py" = ["I"]
diff --git a/setup.py b/setup.py
index e201ec3..30a7574 100644
--- a/setup.py
+++ b/setup.py
@@ -68,7 +68,6 @@ packages = [
     "networkx.algorithms",
     "networkx.algorithms.assortativity",
     "networkx.algorithms.bipartite",
-    "networkx.algorithms.node_classification",
     "networkx.algorithms.centrality",
     "networkx.algorithms.community",
     "networkx.algorithms.components",
@@ -90,7 +89,6 @@ packages = [
     "networkx.readwrite",
     "networkx.readwrite.json_graph",
     "networkx.tests",
-    "networkx.testing",
     "networkx.utils",
 ]
 
@@ -124,13 +122,12 @@ dd = os.path.join(docdirbase, "examples", "javascript/force")
 pp = os.path.join("examples", "javascript/force")
 data.append((dd, glob(os.path.join(pp, "*"))))
 
-# add the tests
+# add the tests subpackage(s)
 package_data = {
     "networkx": ["tests/*.py"],
     "networkx.algorithms": ["tests/*.py"],
     "networkx.algorithms.assortativity": ["tests/*.py"],
     "networkx.algorithms.bipartite": ["tests/*.py"],
-    "networkx.algorithms.node_classification": ["tests/*.py"],
     "networkx.algorithms.centrality": ["tests/*.py"],
     "networkx.algorithms.community": ["tests/*.py"],
     "networkx.algorithms.components": ["tests/*.py"],
@@ -151,10 +148,14 @@ package_data = {
     "networkx.linalg": ["tests/*.py"],
     "networkx.readwrite": ["tests/*.py"],
     "networkx.readwrite.json_graph": ["tests/*.py"],
-    "networkx.testing": ["tests/*.py"],
     "networkx.utils": ["tests/*.py"],
 }
 
+# Loopback dispatcher required for testing nx._dispatch decorator
+entry_points = {
+    "networkx.plugins": "nx-loopback = networkx.classes.tests.dispatch_interface:dispatcher"
+}
+
 
 def parse_requirements_file(filename):
     with open(filename) as fid:
@@ -173,7 +174,6 @@ with open("README.rst") as fh:
     long_description = fh.read()
 
 if __name__ == "__main__":
-
     setup(
         name=name,
         version=version,
@@ -193,6 +193,7 @@ if __name__ == "__main__":
         package_data=package_data,
         install_requires=install_requires,
         extras_require=extras_require,
+        entry_points=entry_points,
         python_requires=">=3.8",
         zip_safe=False,
     )
diff --git a/tools/team_list.py b/tools/team_list.py
index ce2c271..8617a4a 100644
--- a/tools/team_list.py
+++ b/tools/team_list.py
@@ -6,8 +6,10 @@ import requests
 project = "networkx"
 core = "core-developers"
 emeritus = "emeritus-developers"
+steering = "steering-council"
 core_url = f"https://api.github.com/orgs/{project}/teams/{core}/members"
 emeritus_url = f"https://api.github.com/orgs/{project}/teams/{emeritus}/members"
+steering_url = f"https://api.github.com/orgs/{project}/teams/{steering}/members"
 
 
 token = os.environ.get("GH_TOKEN", None)
@@ -33,6 +35,9 @@ core = sorted(resp, key=lambda user: user["login"].lower())
 resp = api(emeritus_url)
 emeritus = sorted(resp, key=lambda user: user["login"].lower())
 
+resp = api(steering_url)
+steering = sorted(resp, key=lambda user: user["login"].lower())
+
 
 def render_team(team):
     for member in team:
@@ -61,6 +66,8 @@ def render_team(team):
 
 print(
     """
+.. _core-developers-team:
+
 Core Developers
 ---------------
 
@@ -83,3 +90,16 @@ We thank these previously-active core developers for their contributions to Netw
 )
 
 render_team(emeritus)
+
+print(
+    """
+.. _steering-council-team:
+
+Steering Council
+----------------
+
+
+"""
+)
+
+render_team(steering)

Debdiff

[The following lists of changes regard files as different if they have different names, permissions or owners.]

Files in second set of .debs but not in first

-rw-r--r--  root/root   /usr/lib/python3/dist-packages/networkx-3.1.egg-info/PKG-INFO
-rw-r--r--  root/root   /usr/lib/python3/dist-packages/networkx-3.1.egg-info/dependency_links.txt
-rw-r--r--  root/root   /usr/lib/python3/dist-packages/networkx-3.1.egg-info/entry_points.txt
-rw-r--r--  root/root   /usr/lib/python3/dist-packages/networkx-3.1.egg-info/not-zip-safe
-rw-r--r--  root/root   /usr/lib/python3/dist-packages/networkx-3.1.egg-info/requires.txt
-rw-r--r--  root/root   /usr/lib/python3/dist-packages/networkx-3.1.egg-info/top_level.txt
-rw-r--r--  root/root   /usr/lib/python3/dist-packages/networkx/algorithms/centrality/laplacian.py
-rw-r--r--  root/root   /usr/lib/python3/dist-packages/networkx/algorithms/centrality/tests/test_laplacian_centrality.py
-rw-r--r--  root/root   /usr/lib/python3/dist-packages/networkx/algorithms/isomorphism/tests/test_vf2pp.py
-rw-r--r--  root/root   /usr/lib/python3/dist-packages/networkx/algorithms/isomorphism/tests/test_vf2pp_helpers.py
-rw-r--r--  root/root   /usr/lib/python3/dist-packages/networkx/algorithms/isomorphism/vf2pp.py
-rw-r--r--  root/root   /usr/lib/python3/dist-packages/networkx/algorithms/node_classification.py
-rw-r--r--  root/root   /usr/lib/python3/dist-packages/networkx/classes/backends.py
-rw-r--r--  root/root   /usr/lib/python3/dist-packages/networkx/classes/tests/dispatch_interface.py
-rw-r--r--  root/root   /usr/lib/python3/dist-packages/networkx/classes/tests/test_backends.py
-rw-r--r--  root/root   /usr/lib/python3/dist-packages/networkx/drawing/nx_latex.py
-rw-r--r--  root/root   /usr/lib/python3/dist-packages/networkx/drawing/tests/test_latex.py
-rw-r--r--  root/root   /usr/share/doc/networkx-3.1/LICENSE.txt
-rw-r--r--  root/root   /usr/share/doc/networkx-3.1/examples/3d_drawing/README.txt
-rw-r--r--  root/root   /usr/share/doc/networkx-3.1/examples/3d_drawing/mayavi2_spring.py
-rw-r--r--  root/root   /usr/share/doc/networkx-3.1/examples/3d_drawing/plot_basic.py
-rw-r--r--  root/root   /usr/share/doc/networkx-3.1/examples/README.txt
-rw-r--r--  root/root   /usr/share/doc/networkx-3.1/examples/algorithms/README.txt
-rw-r--r--  root/root   /usr/share/doc/networkx-3.1/examples/algorithms/WormNet.v3.benchmark.txt
-rw-r--r--  root/root   /usr/share/doc/networkx-3.1/examples/algorithms/hartford_drug.edgelist
-rw-r--r--  root/root   /usr/share/doc/networkx-3.1/examples/algorithms/plot_beam_search.py
-rw-r--r--  root/root   /usr/share/doc/networkx-3.1/examples/algorithms/plot_betweenness_centrality.py
-rw-r--r--  root/root   /usr/share/doc/networkx-3.1/examples/algorithms/plot_blockmodel.py
-rw-r--r--  root/root   /usr/share/doc/networkx-3.1/examples/algorithms/plot_circuits.py
-rw-r--r--  root/root   /usr/share/doc/networkx-3.1/examples/algorithms/plot_davis_club.py
-rw-r--r--  root/root   /usr/share/doc/networkx-3.1/examples/algorithms/plot_dedensification.py
-rw-r--r--  root/root   /usr/share/doc/networkx-3.1/examples/algorithms/plot_girvan_newman.py
-rw-r--r--  root/root   /usr/share/doc/networkx-3.1/examples/algorithms/plot_iterated_dynamical_systems.py
-rw-r--r--  root/root   /usr/share/doc/networkx-3.1/examples/algorithms/plot_krackhardt_centrality.py
-rw-r--r--  root/root   /usr/share/doc/networkx-3.1/examples/algorithms/plot_maximum_independent_set.py
-rw-r--r--  root/root   /usr/share/doc/networkx-3.1/examples/algorithms/plot_parallel_betweenness.py
-rw-r--r--  root/root   /usr/share/doc/networkx-3.1/examples/algorithms/plot_rcm.py
-rw-r--r--  root/root   /usr/share/doc/networkx-3.1/examples/algorithms/plot_snap.py
-rw-r--r--  root/root   /usr/share/doc/networkx-3.1/examples/algorithms/plot_subgraphs.py
-rw-r--r--  root/root   /usr/share/doc/networkx-3.1/examples/basic/README.txt
-rw-r--r--  root/root   /usr/share/doc/networkx-3.1/examples/basic/plot_properties.py
-rw-r--r--  root/root   /usr/share/doc/networkx-3.1/examples/basic/plot_read_write.py
-rw-r--r--  root/root   /usr/share/doc/networkx-3.1/examples/basic/plot_simple_graph.py
-rw-r--r--  root/root   /usr/share/doc/networkx-3.1/examples/drawing/README.txt
-rw-r--r--  root/root   /usr/share/doc/networkx-3.1/examples/drawing/chess_masters_WCC.pgn.bz2
-rw-r--r--  root/root   /usr/share/doc/networkx-3.1/examples/drawing/knuth_miles.txt.gz
-rw-r--r--  root/root   /usr/share/doc/networkx-3.1/examples/drawing/plot_center_node.py
-rw-r--r--  root/root   /usr/share/doc/networkx-3.1/examples/drawing/plot_chess_masters.py
-rw-r--r--  root/root   /usr/share/doc/networkx-3.1/examples/drawing/plot_custom_node_icons.py
-rw-r--r--  root/root   /usr/share/doc/networkx-3.1/examples/drawing/plot_degree.py
-rw-r--r--  root/root   /usr/share/doc/networkx-3.1/examples/drawing/plot_directed.py
-rw-r--r--  root/root   /usr/share/doc/networkx-3.1/examples/drawing/plot_edge_colormap.py
-rw-r--r--  root/root   /usr/share/doc/networkx-3.1/examples/drawing/plot_ego_graph.py
-rw-r--r--  root/root   /usr/share/doc/networkx-3.1/examples/drawing/plot_eigenvalues.py
-rw-r--r--  root/root   /usr/share/doc/networkx-3.1/examples/drawing/plot_four_grids.py
-rw-r--r--  root/root   /usr/share/doc/networkx-3.1/examples/drawing/plot_house_with_colors.py
-rw-r--r--  root/root   /usr/share/doc/networkx-3.1/examples/drawing/plot_knuth_miles.py
-rw-r--r--  root/root   /usr/share/doc/networkx-3.1/examples/drawing/plot_labels_and_colors.py
-rw-r--r--  root/root   /usr/share/doc/networkx-3.1/examples/drawing/plot_multipartite_graph.py
-rw-r--r--  root/root   /usr/share/doc/networkx-3.1/examples/drawing/plot_node_colormap.py
-rw-r--r--  root/root   /usr/share/doc/networkx-3.1/examples/drawing/plot_rainbow_coloring.py
-rw-r--r--  root/root   /usr/share/doc/networkx-3.1/examples/drawing/plot_random_geometric_graph.py
-rw-r--r--  root/root   /usr/share/doc/networkx-3.1/examples/drawing/plot_sampson.py
-rw-r--r--  root/root   /usr/share/doc/networkx-3.1/examples/drawing/plot_selfloops.py
-rw-r--r--  root/root   /usr/share/doc/networkx-3.1/examples/drawing/plot_simple_path.py
-rw-r--r--  root/root   /usr/share/doc/networkx-3.1/examples/drawing/plot_spectral_grid.py
-rw-r--r--  root/root   /usr/share/doc/networkx-3.1/examples/drawing/plot_tsp.py
-rw-r--r--  root/root   /usr/share/doc/networkx-3.1/examples/drawing/plot_unix_email.py
-rw-r--r--  root/root   /usr/share/doc/networkx-3.1/examples/drawing/plot_weighted_graph.py
-rw-r--r--  root/root   /usr/share/doc/networkx-3.1/examples/drawing/unix_email.mbox
-rw-r--r--  root/root   /usr/share/doc/networkx-3.1/examples/graph/README.txt
-rw-r--r--  root/root   /usr/share/doc/networkx-3.1/examples/graph/plot_dag_layout.py
-rw-r--r--  root/root   /usr/share/doc/networkx-3.1/examples/graph/plot_degree_sequence.py
-rw-r--r--  root/root   /usr/share/doc/networkx-3.1/examples/graph/plot_erdos_renyi.py
-rw-r--r--  root/root   /usr/share/doc/networkx-3.1/examples/graph/plot_expected_degree_sequence.py
-rw-r--r--  root/root   /usr/share/doc/networkx-3.1/examples/graph/plot_football.py
-rw-r--r--  root/root   /usr/share/doc/networkx-3.1/examples/graph/plot_karate_club.py
-rw-r--r--  root/root   /usr/share/doc/networkx-3.1/examples/graph/plot_morse_trie.py
-rw-r--r--  root/root   /usr/share/doc/networkx-3.1/examples/graph/plot_mst.py
-rw-r--r--  root/root   /usr/share/doc/networkx-3.1/examples/graph/plot_napoleon_russian_campaign.py
-rw-r--r--  root/root   /usr/share/doc/networkx-3.1/examples/graph/plot_roget.py
-rw-r--r--  root/root   /usr/share/doc/networkx-3.1/examples/graph/plot_triad_types.py
-rw-r--r--  root/root   /usr/share/doc/networkx-3.1/examples/graph/plot_words.py
-rw-r--r--  root/root   /usr/share/doc/networkx-3.1/examples/graph/roget_dat.txt.gz
-rw-r--r--  root/root   /usr/share/doc/networkx-3.1/examples/graph/words_dat.txt.gz
-rw-r--r--  root/root   /usr/share/doc/networkx-3.1/examples/subclass/README.txt
-rw-r--r--  root/root   /usr/share/doc/networkx-3.1/examples/subclass/plot_antigraph.py
-rw-r--r--  root/root   /usr/share/doc/networkx-3.1/examples/subclass/plot_printgraph.py
-rw-r--r--  root/root   /usr/share/doc/python3-networkx/examples/algorithms/plot_girvan_newman.py
-rw-r--r--  root/root   /usr/share/doc/python3-networkx/examples/algorithms/plot_maximum_independent_set.py
-rw-r--r--  root/root   /usr/share/doc/python3-networkx/examples/drawing/plot_center_node.py
-rw-r--r--  root/root   /usr/share/doc/python3-networkx/examples/graph/plot_dag_layout.py
-rw-r--r--  root/root   /usr/share/doc/python3-networkx/examples/graph/plot_mst.py
-rw-r--r--  root/root   /usr/share/doc/python3-networkx/examples/graph/plot_triad_types.py

Files in first set of .debs but not in second

-rw-r--r--  root/root   /usr/lib/python3/dist-packages/networkx-2.8.8.egg-info/PKG-INFO
-rw-r--r--  root/root   /usr/lib/python3/dist-packages/networkx-2.8.8.egg-info/dependency_links.txt
-rw-r--r--  root/root   /usr/lib/python3/dist-packages/networkx-2.8.8.egg-info/not-zip-safe
-rw-r--r--  root/root   /usr/lib/python3/dist-packages/networkx-2.8.8.egg-info/requires.txt
-rw-r--r--  root/root   /usr/lib/python3/dist-packages/networkx-2.8.8.egg-info/top_level.txt
-rw-r--r--  root/root   /usr/lib/python3/dist-packages/networkx/algorithms/node_classification/__init__.py
-rw-r--r--  root/root   /usr/lib/python3/dist-packages/networkx/algorithms/node_classification/hmn.py
-rw-r--r--  root/root   /usr/lib/python3/dist-packages/networkx/algorithms/node_classification/lgc.py
-rw-r--r--  root/root   /usr/lib/python3/dist-packages/networkx/algorithms/node_classification/utils.py
-rw-r--r--  root/root   /usr/lib/python3/dist-packages/networkx/algorithms/tests/test_node_classification_deprecations.py
-rw-r--r--  root/root   /usr/lib/python3/dist-packages/networkx/classes/ordered.py
-rw-r--r--  root/root   /usr/lib/python3/dist-packages/networkx/classes/tests/test_ordered.py
-rw-r--r--  root/root   /usr/lib/python3/dist-packages/networkx/readwrite/gpickle.py
-rw-r--r--  root/root   /usr/lib/python3/dist-packages/networkx/readwrite/json_graph/jit.py
-rw-r--r--  root/root   /usr/lib/python3/dist-packages/networkx/readwrite/json_graph/tests/test_jit.py
-rw-r--r--  root/root   /usr/lib/python3/dist-packages/networkx/readwrite/nx_shp.py
-rw-r--r--  root/root   /usr/lib/python3/dist-packages/networkx/readwrite/nx_yaml.py
-rw-r--r--  root/root   /usr/lib/python3/dist-packages/networkx/readwrite/tests/test_getattr_nxyaml_removal.py
-rw-r--r--  root/root   /usr/lib/python3/dist-packages/networkx/readwrite/tests/test_gpickle.py
-rw-r--r--  root/root   /usr/lib/python3/dist-packages/networkx/readwrite/tests/test_shp.py
-rw-r--r--  root/root   /usr/lib/python3/dist-packages/networkx/testing/__init__.py
-rw-r--r--  root/root   /usr/lib/python3/dist-packages/networkx/testing/test.py
-rw-r--r--  root/root   /usr/lib/python3/dist-packages/networkx/testing/tests/__init__.py
-rw-r--r--  root/root   /usr/lib/python3/dist-packages/networkx/testing/tests/test_utils.py
-rw-r--r--  root/root   /usr/lib/python3/dist-packages/networkx/testing/utils.py
-rw-r--r--  root/root   /usr/lib/python3/dist-packages/networkx/utils/contextmanagers.py
-rw-r--r--  root/root   /usr/lib/python3/dist-packages/networkx/utils/tests/test_contextmanager.py
-rw-r--r--  root/root   /usr/share/doc/networkx-2.8.8/LICENSE.txt
-rw-r--r--  root/root   /usr/share/doc/networkx-2.8.8/examples/3d_drawing/README.txt
-rw-r--r--  root/root   /usr/share/doc/networkx-2.8.8/examples/3d_drawing/mayavi2_spring.py
-rw-r--r--  root/root   /usr/share/doc/networkx-2.8.8/examples/3d_drawing/plot_basic.py
-rw-r--r--  root/root   /usr/share/doc/networkx-2.8.8/examples/README.txt
-rw-r--r--  root/root   /usr/share/doc/networkx-2.8.8/examples/algorithms/README.txt
-rw-r--r--  root/root   /usr/share/doc/networkx-2.8.8/examples/algorithms/WormNet.v3.benchmark.txt
-rw-r--r--  root/root   /usr/share/doc/networkx-2.8.8/examples/algorithms/hartford_drug.edgelist
-rw-r--r--  root/root   /usr/share/doc/networkx-2.8.8/examples/algorithms/plot_beam_search.py
-rw-r--r--  root/root   /usr/share/doc/networkx-2.8.8/examples/algorithms/plot_betweenness_centrality.py
-rw-r--r--  root/root   /usr/share/doc/networkx-2.8.8/examples/algorithms/plot_blockmodel.py
-rw-r--r--  root/root   /usr/share/doc/networkx-2.8.8/examples/algorithms/plot_circuits.py
-rw-r--r--  root/root   /usr/share/doc/networkx-2.8.8/examples/algorithms/plot_davis_club.py
-rw-r--r--  root/root   /usr/share/doc/networkx-2.8.8/examples/algorithms/plot_dedensification.py
-rw-r--r--  root/root   /usr/share/doc/networkx-2.8.8/examples/algorithms/plot_iterated_dynamical_systems.py
-rw-r--r--  root/root   /usr/share/doc/networkx-2.8.8/examples/algorithms/plot_krackhardt_centrality.py
-rw-r--r--  root/root   /usr/share/doc/networkx-2.8.8/examples/algorithms/plot_parallel_betweenness.py
-rw-r--r--  root/root   /usr/share/doc/networkx-2.8.8/examples/algorithms/plot_rcm.py
-rw-r--r--  root/root   /usr/share/doc/networkx-2.8.8/examples/algorithms/plot_snap.py
-rw-r--r--  root/root   /usr/share/doc/networkx-2.8.8/examples/algorithms/plot_subgraphs.py
-rw-r--r--  root/root   /usr/share/doc/networkx-2.8.8/examples/basic/README.txt
-rw-r--r--  root/root   /usr/share/doc/networkx-2.8.8/examples/basic/plot_properties.py
-rw-r--r--  root/root   /usr/share/doc/networkx-2.8.8/examples/basic/plot_read_write.py
-rw-r--r--  root/root   /usr/share/doc/networkx-2.8.8/examples/basic/plot_simple_graph.py
-rw-r--r--  root/root   /usr/share/doc/networkx-2.8.8/examples/drawing/README.txt
-rw-r--r--  root/root   /usr/share/doc/networkx-2.8.8/examples/drawing/chess_masters_WCC.pgn.bz2
-rw-r--r--  root/root   /usr/share/doc/networkx-2.8.8/examples/drawing/knuth_miles.txt.gz
-rw-r--r--  root/root   /usr/share/doc/networkx-2.8.8/examples/drawing/plot_chess_masters.py
-rw-r--r--  root/root   /usr/share/doc/networkx-2.8.8/examples/drawing/plot_custom_node_icons.py
-rw-r--r--  root/root   /usr/share/doc/networkx-2.8.8/examples/drawing/plot_degree.py
-rw-r--r--  root/root   /usr/share/doc/networkx-2.8.8/examples/drawing/plot_directed.py
-rw-r--r--  root/root   /usr/share/doc/networkx-2.8.8/examples/drawing/plot_edge_colormap.py
-rw-r--r--  root/root   /usr/share/doc/networkx-2.8.8/examples/drawing/plot_ego_graph.py
-rw-r--r--  root/root   /usr/share/doc/networkx-2.8.8/examples/drawing/plot_eigenvalues.py
-rw-r--r--  root/root   /usr/share/doc/networkx-2.8.8/examples/drawing/plot_four_grids.py
-rw-r--r--  root/root   /usr/share/doc/networkx-2.8.8/examples/drawing/plot_house_with_colors.py
-rw-r--r--  root/root   /usr/share/doc/networkx-2.8.8/examples/drawing/plot_knuth_miles.py
-rw-r--r--  root/root   /usr/share/doc/networkx-2.8.8/examples/drawing/plot_labels_and_colors.py
-rw-r--r--  root/root   /usr/share/doc/networkx-2.8.8/examples/drawing/plot_multipartite_graph.py
-rw-r--r--  root/root   /usr/share/doc/networkx-2.8.8/examples/drawing/plot_node_colormap.py
-rw-r--r--  root/root   /usr/share/doc/networkx-2.8.8/examples/drawing/plot_rainbow_coloring.py
-rw-r--r--  root/root   /usr/share/doc/networkx-2.8.8/examples/drawing/plot_random_geometric_graph.py
-rw-r--r--  root/root   /usr/share/doc/networkx-2.8.8/examples/drawing/plot_sampson.py
-rw-r--r--  root/root   /usr/share/doc/networkx-2.8.8/examples/drawing/plot_selfloops.py
-rw-r--r--  root/root   /usr/share/doc/networkx-2.8.8/examples/drawing/plot_simple_path.py
-rw-r--r--  root/root   /usr/share/doc/networkx-2.8.8/examples/drawing/plot_spectral_grid.py
-rw-r--r--  root/root   /usr/share/doc/networkx-2.8.8/examples/drawing/plot_tsp.py
-rw-r--r--  root/root   /usr/share/doc/networkx-2.8.8/examples/drawing/plot_unix_email.py
-rw-r--r--  root/root   /usr/share/doc/networkx-2.8.8/examples/drawing/plot_weighted_graph.py
-rw-r--r--  root/root   /usr/share/doc/networkx-2.8.8/examples/drawing/unix_email.mbox
-rw-r--r--  root/root   /usr/share/doc/networkx-2.8.8/examples/graph/README.txt
-rw-r--r--  root/root   /usr/share/doc/networkx-2.8.8/examples/graph/plot_degree_sequence.py
-rw-r--r--  root/root   /usr/share/doc/networkx-2.8.8/examples/graph/plot_erdos_renyi.py
-rw-r--r--  root/root   /usr/share/doc/networkx-2.8.8/examples/graph/plot_expected_degree_sequence.py
-rw-r--r--  root/root   /usr/share/doc/networkx-2.8.8/examples/graph/plot_football.py
-rw-r--r--  root/root   /usr/share/doc/networkx-2.8.8/examples/graph/plot_karate_club.py
-rw-r--r--  root/root   /usr/share/doc/networkx-2.8.8/examples/graph/plot_morse_trie.py
-rw-r--r--  root/root   /usr/share/doc/networkx-2.8.8/examples/graph/plot_napoleon_russian_campaign.py
-rw-r--r--  root/root   /usr/share/doc/networkx-2.8.8/examples/graph/plot_roget.py
-rw-r--r--  root/root   /usr/share/doc/networkx-2.8.8/examples/graph/plot_words.py
-rw-r--r--  root/root   /usr/share/doc/networkx-2.8.8/examples/graph/roget_dat.txt.gz
-rw-r--r--  root/root   /usr/share/doc/networkx-2.8.8/examples/graph/words_dat.txt.gz
-rw-r--r--  root/root   /usr/share/doc/networkx-2.8.8/examples/subclass/README.txt
-rw-r--r--  root/root   /usr/share/doc/networkx-2.8.8/examples/subclass/plot_antigraph.py
-rw-r--r--  root/root   /usr/share/doc/networkx-2.8.8/examples/subclass/plot_printgraph.py

No differences were encountered in the control files

More details

Full run details