New Upstream Release - osmnx

Ready changes

Summary

Merged new upstream version: 1.3.1+ds (was: 1.3.0+ds).

Diff

diff --git a/CHANGELOG.md b/CHANGELOG.md
index e643288..6372ce6 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,6 +1,17 @@
 # Change log
 
+## 1.3.1 (2023-05-24)
+
+  - improve DNS resolution when using proxies or on networks blocking DNS-over-HTTPS
+  - improve processing of per-lane values when adding edge speeds
+  - improve file writing in save_graph_xml function
+  - ensure node coordinates are non-null and covertible to float in the add_edge_lengths function
+  - ignore ways tagged highway=no or highway=razed in built-in filters
+  - do not assume an edge with key=0 exists between each node pair when simplifying graph
+  - drop dateutil package dependency
+
 ## 1.3.0 (2023-01-01)
+
   - fully support Shapely 2.0 and drop support for Shapely 1.x
   - drop RTree package dependency
   - much faster nearest edges search using STRTree index
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index 8877a4d..592d867 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -29,4 +29,6 @@ Thanks for using OSMnx and for considering contributing to it by opening an issu
     - `isort` sorted imports
     - `numpy` style docstrings
 
+This project requires minimum Python and NumPy versions in accordance with [NEP 29](https://numpy.org/neps/nep-0029-deprecation_policy.html).
+
 Every piece of software is a work in progress. This project is the result of many hours of work contributed freely by myself and the many people that build the projects it depends on. Thank you for contributing!
diff --git a/debian/changelog b/debian/changelog
index ff66a64..72323ca 100644
--- a/debian/changelog
+++ b/debian/changelog
@@ -1,3 +1,9 @@
+osmnx (1.3.1+ds-1) UNRELEASED; urgency=low
+
+  * New upstream release.
+
+ -- Debian Janitor <janitor@jelmer.uk>  Sun, 11 Jun 2023 14:10:07 -0000
+
 osmnx (1.3.0+ds-1) experimental; urgency=medium
 
   * New upstream minor version.
diff --git a/debian/patches/adhoc-fix-elevation-multiprocessing.patch b/debian/patches/adhoc-fix-elevation-multiprocessing.patch
index c8d19d0..ec2a7e2 100644
--- a/debian/patches/adhoc-fix-elevation-multiprocessing.patch
+++ b/debian/patches/adhoc-fix-elevation-multiprocessing.patch
@@ -7,9 +7,11 @@ Forwarded: https://github.com/gboeing/osmnx/issues/769
 Author: Jerome Benoit <calculus@rezozer.net>
 Last-Update: 2021-11-04
 
---- a/osmnx/elevation.py
-+++ b/osmnx/elevation.py
-@@ -93,7 +93,7 @@
+Index: osmnx.git/osmnx/elevation.py
+===================================================================
+--- osmnx.git.orig/osmnx/elevation.py
++++ osmnx.git/osmnx/elevation.py
+@@ -93,7 +93,7 @@ def add_node_elevations_raster(G, filepa
          elevs = dict(_query_raster(nodes, filepath, band))
      else:
          # divide nodes into equal-sized chunks for multiprocessing
diff --git a/debian/patches/debianization-tests-extra.patch b/debian/patches/debianization-tests-extra.patch
index 1310fc3..f8bf40d 100644
--- a/debian/patches/debianization-tests-extra.patch
+++ b/debian/patches/debianization-tests-extra.patch
@@ -5,9 +5,11 @@ Forwarded: not-needed
 Author: Jerome Benoit <calculus@rezozer.net>
 Last-Update: 2021-11-03
 
---- a/tests/test_osmnx.py
-+++ b/tests/test_osmnx.py
-@@ -199,6 +199,58 @@
+Index: osmnx.git/tests/test_osmnx.py
+===================================================================
+--- osmnx.git.orig/tests/test_osmnx.py
++++ osmnx.git/tests/test_osmnx.py
+@@ -200,6 +200,58 @@ def test_elevation():
      G = ox.add_edge_grades(G, add_absolute=True)
  
  
@@ -64,5 +66,5 @@ Last-Update: 2021-11-03
 +
 +
  def test_routing():
- 
      G = ox.graph_from_address(address=address, dist=500, dist_type="bbox", network_type="bike")
+ 
diff --git a/debian/patches/debianization.patch b/debian/patches/debianization.patch
index 6681847..2fa2711 100644
--- a/debian/patches/debianization.patch
+++ b/debian/patches/debianization.patch
@@ -6,15 +6,17 @@ Forwarded: not-needed
 Author: Jerome Benoit <calculus@rezozer.net>
 Last-Update: 2021-06-20
 
---- a/tests/lint_test.sh
-+++ b/tests/lint_test.sh
+Index: osmnx.git/tests/lint_test.sh
+===================================================================
+--- osmnx.git.orig/tests/lint_test.sh
++++ osmnx.git/tests/lint_test.sh
 @@ -1,4 +1,4 @@
 -#!/bin/bash
 +#!/bin/sh
  
  # exit on error
  set -e
-@@ -19,14 +19,14 @@
+@@ -19,14 +19,14 @@ flake8 .
  # lint the docstrings
  pydocstyle .
  
diff --git a/docs/source/conf.py b/docs/source/conf.py
index 3b001cf..1ca3ff5 100644
--- a/docs/source/conf.py
+++ b/docs/source/conf.py
@@ -24,7 +24,6 @@ sys.path.insert(0, str(Path().resolve().parent.parent))
 
 # mock import these packages because readthedocs doesn't have them installed
 autodoc_mock_imports = [
-    "dateutil",
     "geopandas",
     "matplotlib",
     "matplotlib.cm",
@@ -58,7 +57,7 @@ author = "Geoff Boeing"
 # The version info for the project you're documenting, acts as replacement for
 # |version| and |release|, also used in various other places throughout the
 # built documents.
-version = release = "1.3.0"
+version = release = "1.3.1"
 
 # If your documentation needs a minimal Sphinx version, state it here.
 #
diff --git a/osmnx/_version.py b/osmnx/_version.py
index def9f69..77f81be 100644
--- a/osmnx/_version.py
+++ b/osmnx/_version.py
@@ -1,3 +1,3 @@
 """OSMnx package version."""
 
-__version__ = "1.3.0"
+__version__ = "1.3.1"
diff --git a/osmnx/distance.py b/osmnx/distance.py
index 7c57ca1..fb43a8e 100644
--- a/osmnx/distance.py
+++ b/osmnx/distance.py
@@ -147,8 +147,10 @@ def add_edge_lengths(G, precision=3, edges=None):
     try:
         # two-dimensional array of coordinates: y0, x0, y1, x1
         c = np.array([(y[u], x[u], y[v], x[v]) for u, v, k in uvk])
-    except KeyError:  # pragma: no cover
-        raise KeyError("some edges missing nodes, possibly due to input data clipping issue")
+        # ensure all coordinates can be converted to float and are non-null
+        assert not np.isnan(c.astype(float)).any()
+    except (AssertionError, KeyError):  # pragma: no cover
+        raise ValueError("some edges missing nodes, possibly due to input data clipping issue")
 
     # calculate great circle distances, round, and fill nulls with zeros
     dists = great_circle_vec(c[:, 0], c[:, 1], c[:, 2], c[:, 3]).round(precision)
@@ -290,7 +292,6 @@ def nearest_edges(G, X, Y, interpolate=None, return_dist=False):
 
     # if no interpolation distance was provided
     if interpolate is None:
-
         # build the r-tree spatial index by position for subsequent iloc
         rtree = STRtree(geoms)
 
@@ -305,7 +306,6 @@ def nearest_edges(G, X, Y, interpolate=None, return_dist=False):
 
     # otherwise, if interpolation distance was provided
     else:
-
         # interpolate points along edges to index with k-d tree or ball tree
         uvk_xy = []
         for uvk, geom in zip(geoms.index, geoms.values):
@@ -349,7 +349,7 @@ def _single_shortest_path(G, orig, dest, weight):
     Solve the shortest path from an origin node to a destination node.
 
     This function is a convenience wrapper around networkx.shortest_path, with
-    exception handling for unsolvable paths.
+    exception handling for unsolvable paths. It uses Dijkstra's algorithm.
 
     Parameters
     ----------
@@ -368,7 +368,7 @@ def _single_shortest_path(G, orig, dest, weight):
         list of node IDs constituting the shortest path
     """
     try:
-        return nx.shortest_path(G, orig, dest, weight=weight)
+        return nx.shortest_path(G, orig, dest, weight=weight, method="dijkstra")
     except nx.exception.NetworkXNoPath:  # pragma: no cover
         utils.log(f"Cannot solve path from {orig} to {dest}")
         return None
@@ -378,13 +378,13 @@ def shortest_path(G, orig, dest, weight="length", cpus=1):
     """
     Solve shortest path from origin node(s) to destination node(s).
 
-    If `orig` and `dest` are single node IDs, this will return a list of the
-    nodes constituting the shortest path between them.  If `orig` and `dest`
-    are lists of node IDs, this will return a list of lists of the nodes
-    constituting the shortest path between each origin-destination pair. If a
-    path cannot be solved, this will return None for that path. You can
-    parallelize solving multiple paths with the `cpus` parameter, but be
-    careful to not exceed your available RAM.
+    Uses Dijkstra's algorithm. If `orig` and `dest` are single node IDs, this
+    will return a list of the nodes constituting the shortest path between
+    them. If `orig` and `dest` are lists of node IDs, this will return a list
+    of lists of the nodes constituting the shortest path between each
+    origin-destination pair. If a path cannot be solved, this will return None
+    for that path. You can parallelize solving multiple paths with the `cpus`
+    parameter, but be careful to not exceed your available RAM.
 
     See also `k_shortest_paths` to solve multiple shortest paths between a
     single origin and destination. For additional functionality or different
@@ -446,7 +446,8 @@ def k_shortest_paths(G, orig, dest, k, weight="length"):
     """
     Solve `k` shortest paths from an origin node to a destination node.
 
-    See also `shortest_path` to get just the one shortest path.
+    Uses Yen's algorithm. See also `shortest_path` to solve just the one
+    shortest path.
 
     Parameters
     ----------
diff --git a/osmnx/downloader.py b/osmnx/downloader.py
index 7622611..5d700dc 100644
--- a/osmnx/downloader.py
+++ b/osmnx/downloader.py
@@ -1,19 +1,18 @@
 """Interact with the OSM APIs."""
 
-import datetime as dt
 import json
 import logging as lg
 import re
 import socket
 import time
 from collections import OrderedDict
+from datetime import datetime as dt
 from hashlib import sha1
 from pathlib import Path
 from urllib.parse import urlparse
 
 import numpy as np
 import requests
-from dateutil import parser as date_parser
 
 from . import projection
 from . import settings
@@ -49,7 +48,7 @@ def _get_osm_filter(network_type):
     filters["drive"] = (
         f'["highway"]["area"!~"yes"]{settings.default_access}'
         f'["highway"!~"abandoned|bridleway|bus_guideway|construction|corridor|cycleway|elevator|'
-        f"escalator|footway|path|pedestrian|planned|platform|proposed|raceway|service|"
+        f"escalator|footway|no|path|pedestrian|planned|platform|proposed|raceway|razed|service|"
         f'steps|track"]'
         f'["motor_vehicle"!~"no"]["motorcar"!~"no"]'
         f'["service"!~"alley|driveway|emergency_access|parking|parking_aisle|private"]'
@@ -59,7 +58,8 @@ def _get_osm_filter(network_type):
     filters["drive_service"] = (
         f'["highway"]["area"!~"yes"]{settings.default_access}'
         f'["highway"!~"abandoned|bridleway|bus_guideway|construction|corridor|cycleway|elevator|'
-        f'escalator|footway|path|pedestrian|planned|platform|proposed|raceway|steps|track"]'
+        f"escalator|footway|no|path|pedestrian|planned|platform|proposed|raceway|razed|steps|"
+        f'track"]'
         f'["motor_vehicle"!~"no"]["motorcar"!~"no"]'
         f'["service"!~"emergency_access|parking|parking_aisle|private"]'
     )
@@ -71,8 +71,8 @@ def _get_osm_filter(network_type):
     # filter ignores such cycleways.
     filters["walk"] = (
         f'["highway"]["area"!~"yes"]{settings.default_access}'
-        f'["highway"!~"abandoned|bus_guideway|construction|cycleway|motor|planned|platform|'
-        f'proposed|raceway"]'
+        f'["highway"!~"abandoned|bus_guideway|construction|cycleway|motor|no|planned|platform|'
+        f'proposed|raceway|razed"]'
         f'["foot"!~"no"]["service"!~"private"]'
     )
 
@@ -81,7 +81,7 @@ def _get_osm_filter(network_type):
     filters["bike"] = (
         f'["highway"]["area"!~"yes"]{settings.default_access}'
         f'["highway"!~"abandoned|bus_guideway|construction|corridor|elevator|escalator|footway|'
-        f'motor|planned|platform|proposed|raceway|steps"]'
+        f'motor|no|planned|platform|proposed|raceway|razed|steps"]'
         f'["bicycle"!~"no"]["service"!~"private"]'
     )
 
@@ -89,20 +89,21 @@ def _get_osm_filter(network_type):
     # that is private-access only
     filters["all"] = (
         f'["highway"]["area"!~"yes"]{settings.default_access}'
-        f'["highway"!~"abandoned|construction|planned|platform|proposed|raceway"]'
+        f'["highway"!~"abandoned|construction|no|planned|platform|proposed|raceway|razed"]'
         f'["service"!~"private"]'
     )
 
     # to download all ways, including private-access ones, just filter out
     # everything not currently in use
-    filters[
-        "all_private"
-    ] = '["highway"]["area"!~"yes"]["highway"!~"abandoned|construction|planned|platform|proposed|raceway"]'
+    filters["all_private"] = (
+        '["highway"]["area"!~"yes"]["highway"!~"abandoned|construction|no|planned|platform|'
+        'proposed|raceway|razed"]'
+    )
 
     if network_type in filters:
         osm_filter = filters[network_type]
     else:  # pragma: no cover
-        raise ValueError(f'Unrecognized network_type "{network_type}"')
+        raise ValueError(f"Unrecognized network_type {network_type!r}")
 
     return osm_filter
 
@@ -137,7 +138,6 @@ def _save_to_cache(url, response_json, sc):
     None
     """
     if settings.use_cache:
-
         if sc != 200:
             utils.log(f"Did not save to cache because status code is {sc}")
 
@@ -156,7 +156,7 @@ def _save_to_cache(url, response_json, sc):
 
             # dump to json, and save to file
             cache_filepath.write_text(json.dumps(response_json), encoding="utf-8")
-            utils.log(f'Saved response to cache file "{cache_filepath}"')
+            utils.log(f"Saved response to cache file {cache_filepath!r}")
 
 
 def _url_in_cache(url):
@@ -202,7 +202,6 @@ def _retrieve_from_cache(url, check_remark=False):
     """
     # if the tool is configured to use the cache
     if settings.use_cache:
-
         # return cached response for this url if exists, otherwise return None
         cache_filepath = _url_in_cache(url)
         if cache_filepath is not None:
@@ -211,10 +210,10 @@ def _retrieve_from_cache(url, check_remark=False):
             # return None if check_remark is True and there is a server
             # remark in the cached response
             if check_remark and "remark" in response_json:
-                utils.log(f'Found remark, so ignoring cache file "{cache_filepath}"')
+                utils.log(f"Found remark, so ignoring cache file {cache_filepath!r}")
                 return None
 
-            utils.log(f'Retrieved response from cache file "{cache_filepath}"')
+            utils.log(f"Retrieved response from cache file {cache_filepath!r}")
             return response_json
 
 
@@ -250,48 +249,58 @@ def _get_http_headers(user_agent=None, referer=None, accept_language=None):
     return headers
 
 
-def _get_host_by_name(host):
+def _resolve_host_via_doh(hostname):
     """
-    Resolve IP address from host using Google's public API for DNS over HTTPS.
+    Resolve hostname to IP address via Google's public DNS-over-HTTPS API.
 
     Necessary fallback as socket.gethostbyname will not always work when using
     a proxy. See https://developers.google.com/speed/public-dns/docs/doh/json
+    If the user has set `settings.doh_url_template=None` or if resolution
+    fails (e.g., due to local network blocking DNS-over-HTTPS) the hostname
+    itself will be returned instead. Note that this means that server slot
+    management may be violated: see `_config_dns` documentation for details.
 
     Parameters
     ----------
-    host : string
-        the host to consistently resolve the IP address of
+    hostname : string
+        the hostname to consistently resolve the IP address of
 
     Returns
     -------
     ip_address : string
-        resolved IP address
+        resolved IP address of host, or hostname itself if resolution failed
     """
-    dns_url = f"https://8.8.8.8/resolve?name={host}"
-    response = requests.get(dns_url)
-    data = response.json()
+    if settings.doh_url_template is None:
+        # if user has set the url template to None, return hostname itself
+        utils.log("User set `doh_url_template=None`, requesting host by name", level=lg.WARNING)
+        return hostname
 
-    # status = 0 means NOERROR: standard DNS response code
-    if response.ok and data["Status"] == 0:
-        ip_address = data["Answer"][0]["data"]
-        utils.log(f"Google resolved '{host}' to '{ip_address}'")
-        return ip_address
+    try:
+        response = requests.get(settings.doh_url_template.format(hostname=hostname))
+        data = response.json()
+        if response.ok and data["Status"] == 0:
+            # status 0 means NOERROR, so return the IP address
+            return data["Answer"][0]["data"]
+        else:
+            raise requests.exceptions.RequestException
 
-    # in case host could not be resolved return the host itself
-    else:
-        utils.log(f"Google could not resolve '{host}'. Response status: {data['Status']}")
-        return host
+    # if we cannot reach DoH server or cannot resolve host, return hostname itself
+    except requests.exceptions.RequestException:
+        utils.log(
+            f"Failed to resolve {hostname!r} IP via DoH, requesting host by name", level=lg.ERROR
+        )
+        return hostname
 
 
 def _config_dns(url):
     """
-    Force socket.getaddrinfo to use IP address instead of host.
+    Force socket.getaddrinfo to use IP address instead of hostname.
 
     Resolves the URL's domain to an IP address so that we use the same server
     for both 1) checking the necessary pause duration and 2) sending the query
     itself even if there is round-robin redirecting among multiple server
     machines on the server-side. Mutates the getaddrinfo function so it uses
-    the same IP address everytime it finds the host name in the URL.
+    the same IP address everytime it finds the hostname in the URL.
 
     For example, the domain overpass-api.de just redirects to one of its
     subdomains (currently z.overpass-api.de and lz4.overpass-api.de). So if we
@@ -309,17 +318,21 @@ def _config_dns(url):
     -------
     None
     """
-    host = urlparse(url).netloc.split(":")[0]
+    hostname = urlparse(url).netloc.split(":")[0]
     try:
-        ip = socket.gethostbyname(host)
+        ip = socket.gethostbyname(hostname)
     except socket.gaierror:  # pragma: no cover
-        # this error occurs sometimes when using a proxy. instead, you must
-        # get IP address using google's public JSON API for DNS over HTTPS
-        ip = _get_host_by_name(host)[0]
+        # may occur when using a proxy, so instead resolve IP address via DoH
+        utils.log(
+            f"Encountered gaierror while trying to resolve {hostname!r}, trying again via DoH...",
+            level=lg.ERROR,
+        )
+        ip = _resolve_host_via_doh(hostname)
 
+    # mutate socket.getaddrinfo to map hostname -> IP address
     def _getaddrinfo(*args):
-        if args[0] == host:
-            utils.log(f"Resolved {host} to {ip}")
+        if args[0] == hostname:
+            utils.log(f"Resolved {hostname!r} to {ip!r}")
             return _original_getaddrinfo(ip, *args[1:])
         else:
             return _original_getaddrinfo(*args)
@@ -378,8 +391,8 @@ def _get_pause(base_endpoint, recursive_delay=5, default_duration=60):
         # if first token is 'Slot', it tells you when your slot will be free
         if status_first_token == "Slot":
             utc_time_str = status.split(" ")[3]
-            utc_time = date_parser.parse(utc_time_str).replace(tzinfo=None)
-            pause = int(np.ceil((utc_time - dt.datetime.utcnow()).total_seconds()))
+            utc_time = dt.strptime(utc_time_str, "%Y-%m-%dT%H:%M:%SZ,")
+            pause = int(np.ceil((utc_time - dt.utcnow()).total_seconds()))
             pause = max(pause, 1)
 
         # if first token is 'Currently', it is currently running a query so
@@ -390,7 +403,7 @@ def _get_pause(base_endpoint, recursive_delay=5, default_duration=60):
 
         # any other status is unrecognized: log error, return default duration
         else:
-            utils.log(f'Unrecognized server status: "{status}"', level=lg.ERROR)
+            utils.log(f"Unrecognized server status: {status!r}", level=lg.ERROR)
             return default_duration
 
     return pause
@@ -462,7 +475,6 @@ def _create_overpass_query(polygon_coord_str, tags):
 
     tags_dict = {}
     for key, value in tags.items():
-
         if isinstance(value, bool):
             tags_dict[key] = value
 
@@ -490,13 +502,12 @@ def _create_overpass_query(polygon_coord_str, tags):
     components = []
     for d in tags_list:
         for key, value in d.items():
-
             if isinstance(value, bool):
                 # if bool (ie, True) just pass the key, no value
-                tag_str = f"['{key}'](poly:'{polygon_coord_str}');(._;>;);"
+                tag_str = f"[{key!r}](poly:{polygon_coord_str!r});(._;>;);"
             else:
                 # otherwise, pass "key"="value"
-                tag_str = f"['{key}'='{value}'](poly:'{polygon_coord_str}');(._;>;);"
+                tag_str = f"[{key!r}={value!r}](poly:{polygon_coord_str!r});(._;>;);"
 
             for kind in ("node", "way", "relation"):
                 components.append(f"({kind}{tag_str});")
@@ -544,7 +555,7 @@ def _osm_network_download(polygon, network_type, custom_filter):
     # pass each polygon exterior coordinates in the list to the API, one at a
     # time. The '>' makes it recurse so we get ways and the ways' nodes.
     for polygon_coord_str in polygon_coord_strs:
-        query_str = f"{overpass_settings};(way{osm_filter}(poly:'{polygon_coord_str}');>;);out;"
+        query_str = f"{overpass_settings};(way{osm_filter}(poly:{polygon_coord_str!r});>;);out;"
         response_json = overpass_request(data={"data": query_str})
         response_jsons.append(response_json)
     utils.log(
@@ -783,7 +794,7 @@ def overpass_request(data, pause=None, error_pause=60):
         try:
             response_json = response.json()
             if "remark" in response_json:
-                utils.log(f'Server remark: "{response_json["remark"]}"', level=lg.WARNING)
+                utils.log(f'Server remark: {response_json["remark"]!r}', level=lg.WARNING)
 
         except Exception:  # pragma: no cover
             if sc in {429, 504}:
diff --git a/osmnx/geocoder.py b/osmnx/geocoder.py
index e6f1fa0..f627552 100644
--- a/osmnx/geocoder.py
+++ b/osmnx/geocoder.py
@@ -39,10 +39,10 @@ def geocode(query):
         lat = float(response_json[0]["lat"])
         lng = float(response_json[0]["lon"])
         point = (lat, lng)
-        utils.log(f'Geocoded "{query}" to {point}')
+        utils.log(f"Geocoded {query!r} to {point}")
         return point
     else:
-        raise ValueError(f'Nominatim could not geocode query "{query}"')
+        raise ValueError(f"Nominatim could not geocode query {query!r}")
 
 
 def geocode_to_gdf(query, which_result=None, by_osmid=False, buffer_dist=None):
@@ -156,7 +156,7 @@ def _geocode_query_to_gdf(query, which_result, by_osmid):
     # choose the right result from the JSON response
     if not results:
         # if no results were returned, raise error
-        raise ValueError(f'Nominatim geocoder returned 0 results for query "{query}"')
+        raise ValueError(f"Nominatim geocoder returned 0 results for query {query!r}")
 
     elif by_osmid:
         # if searching by OSM ID, always take the first (ie, only) result
@@ -172,13 +172,13 @@ def _geocode_query_to_gdf(query, which_result, by_osmid):
 
     else:  # pragma: no cover
         # else, we got fewer results than which_result, raise error
-        msg = f'Nominatim geocoder only returned {len(results)} result(s) for query "{query}"'
+        msg = f"Nominatim geocoder only returned {len(results)} result(s) for query {query!r}"
         raise ValueError(msg)
 
     # if we got a non (Multi)Polygon geometry type (like a point), log warning
     geom_type = result["geojson"]["type"]
     if geom_type not in {"Polygon", "MultiPolygon"}:
-        msg = f'Nominatim geocoder returned a {geom_type} as the geometry for query "{query}"'
+        msg = f"Nominatim geocoder returned a {geom_type} as the geometry for query {query!r}"
         utils.log(msg, level=lg.WARNING)
 
     # build the GeoJSON feature from the chosen result
@@ -228,4 +228,4 @@ def _get_first_polygon(results, query):
             return result
 
     # if we never found a polygon, throw an error
-    raise ValueError(f'Nominatim could not geocode query "{query}" to polygonal boundaries')
+    raise ValueError(f"Nominatim could not geocode query {query!r} to polygonal boundaries")
diff --git a/osmnx/geometries.py b/osmnx/geometries.py
index 9663185..0504382 100644
--- a/osmnx/geometries.py
+++ b/osmnx/geometries.py
@@ -384,7 +384,6 @@ def _create_gdf(response_jsons, polygon, tags):
             # to dictionaries of coordinates, Shapely Points, LineStrings,
             # Polygons and MultiPolygons
             for element in response_json["elements"]:
-
                 # id numbers are only unique within element types
                 # create unique id from combination of type and id
                 unique_id = f"{element['type']}/{element['id']}"
@@ -565,7 +564,6 @@ def _parse_way_to_linestring_or_polygon(element, coords, polygon_features=_polyg
     # same) depending upon the tags the geometry could be a Shapely LineString
     # or Polygon
     elif element["nodes"][0] == element["nodes"][-1]:
-
         # determine if closed way represents LineString or Polygon
         if _is_closed_way_a_polygon(element):
             # if it is a Polygon
@@ -636,7 +634,6 @@ def _is_closed_way_a_polygon(element, polygon_features=_polygon_features):
 
     # if the element doesn't have any tags leave it as a Linestring
     if element_tags is not None:
-
         # if the element is specifically tagged 'area':'no' -> LineString
         if element_tags.get("area") == "no":
             pass
@@ -649,7 +646,6 @@ def _is_closed_way_a_polygon(element, polygon_features=_polygon_features):
 
             # for each key in the intersecting keys (if any found)
             for key in intersecting_keys:
-
                 # Get the key's value from the element's tags
                 key_value = element_tags.get(key)
 
@@ -667,7 +663,6 @@ def _is_closed_way_a_polygon(element, polygon_features=_polygon_features):
                 # if the key is for a blocklist i.e. tags that should not
                 # become Polygons
                 elif blocklist_or_passlist == "blocklist":
-
                     # if the value for that key in the element is not in
                     # the blocklist -> Polygon
                     if key_value not in polygon_features_values:
@@ -676,7 +671,6 @@ def _is_closed_way_a_polygon(element, polygon_features=_polygon_features):
                 # if the key is for a passlist i.e. specific tags should
                 # become Polygons
                 elif blocklist_or_passlist == "passlist":
-
                     # if the value for that key in the element is in the
                     # passlist -> Polygon
                     if key_value in polygon_features_values:
@@ -907,13 +901,11 @@ def _buffer_invalid_geometries(gdf):
     """
     # only apply the filters if the GeoDataFrame is not empty
     if not gdf.empty:
-
         # create a filter for rows with invalid geometries
         invalid_geometry_filter = ~gdf["geometry"].is_valid
 
         # if there are invalid geometries
         if invalid_geometry_filter.any():
-
             # get their unique_ids from the index
             invalid_geometry_ids = gdf.loc[invalid_geometry_filter].index.to_list()
 
@@ -958,7 +950,6 @@ def _filter_gdf_by_polygon_and_tags(gdf, polygon, tags):
     """
     # only apply the filters if the GeoDataFrame is not empty
     if not gdf.empty:
-
         # create two filters, initially all True
         polygon_filter = pd.Series(True, index=gdf.index)
         combined_tag_filter = pd.Series(True, index=gdf.index)
diff --git a/osmnx/graph.py b/osmnx/graph.py
index edb0617..1899a0c 100644
--- a/osmnx/graph.py
+++ b/osmnx/graph.py
@@ -485,7 +485,7 @@ def graph_from_polygon(
             "the graph-level street_count attribute will likely be inaccurate "
             "when you set clean_periphery=False"
         )
-        warnings.warn(msg)
+        warnings.warn(msg, stacklevel=1)
 
     utils.log(f"graph_from_polygon returned graph with {len(G)} nodes and {len(G.edges)} edges")
     return G
@@ -755,7 +755,6 @@ def _add_paths(G, paths, bidirectional=False):
     reversed_values = {"-1", "reverse", "T"}
 
     for path in paths:
-
         # extract/remove the ordered list of nodes from this path element so
         # we don't add it as a superfluous attribute to the edge later
         nodes = path.pop("nodes")
diff --git a/osmnx/io.py b/osmnx/io.py
index f4ce302..fe9cb07 100644
--- a/osmnx/io.py
+++ b/osmnx/io.py
@@ -55,7 +55,7 @@ def save_graph_geopackage(G, filepath=None, encoding="utf-8", directed=False):
     # save the nodes and edges as GeoPackage layers
     gdf_nodes.to_file(filepath, layer="nodes", driver="GPKG", index=True, encoding=encoding)
     gdf_edges.to_file(filepath, layer="edges", driver="GPKG", index=True, encoding=encoding)
-    utils.log(f'Saved graph as GeoPackage at "{filepath}"')
+    utils.log(f"Saved graph as GeoPackage at {filepath!r}")
 
 
 def save_graph_shapefile(G, filepath=None, encoding="utf-8", directed=False):
@@ -87,7 +87,8 @@ def save_graph_shapefile(G, filepath=None, encoding="utf-8", directed=False):
     warnings.warn(
         "The `save_graph_shapefile` function is deprecated and will be removed "
         "in a future release. Instead, use the `save_graph_geopackage` function "
-        "to save graphs as GeoPackage files for subsequent GIS analysis."
+        "to save graphs as GeoPackage files for subsequent GIS analysis.",
+        stacklevel=1,
     )
 
     # default filepath if none was provided
@@ -113,7 +114,7 @@ def save_graph_shapefile(G, filepath=None, encoding="utf-8", directed=False):
     # save the nodes and edges as separate ESRI shapefiles
     gdf_nodes.to_file(filepath_nodes, driver="ESRI Shapefile", index=True, encoding=encoding)
     gdf_edges.to_file(filepath_edges, driver="ESRI Shapefile", index=True, encoding=encoding)
-    utils.log(f'Saved graph as shapefiles at "{filepath}"')
+    utils.log(f"Saved graph as shapefiles at {filepath!r}")
 
 
 def save_graphml(G, filepath=None, gephi=False, encoding="utf-8"):
@@ -170,7 +171,7 @@ def save_graphml(G, filepath=None, gephi=False, encoding="utf-8"):
             data[attr] = str(value)
 
     nx.write_graphml(G, path=filepath, encoding=encoding)
-    utils.log(f'Saved graph as GraphML file at "{filepath}"')
+    utils.log(f"Saved graph as GraphML file at {filepath!r}")
 
 
 def load_graphml(
@@ -271,7 +272,7 @@ def load_graphml(
     G = _convert_node_attr_types(G, default_node_dtypes)
     G = _convert_edge_attr_types(G, default_edge_dtypes)
 
-    utils.log(f'Loaded graph with {len(G)} nodes and {len(G.edges)} edges from "{source}"')
+    utils.log(f"Loaded graph with {len(G)} nodes and {len(G.edges)} edges from {source!r}")
     return G
 
 
@@ -338,7 +339,6 @@ def _convert_edge_attr_types(G, dtypes=None):
     """
     # for each edge in the graph, eval attribute value lists and convert types
     for _, _, data in G.edges(data=True, keys=False):
-
         # remove extraneous "id" attribute added by graphml saving
         data.pop("id", None)
 
@@ -392,7 +392,7 @@ def _convert_bool_string(value):
     elif isinstance(value, bool):
         return value
     else:  # pragma: no cover
-        raise ValueError(f'invalid literal for boolean: "{value}"')
+        raise ValueError(f"invalid literal for boolean: {value!r}")
 
 
 def _stringify_nonnumeric_cols(gdf):
diff --git a/osmnx/osm_xml.py b/osmnx/osm_xml.py
index 0d267a2..3ea5627 100644
--- a/osmnx/osm_xml.py
+++ b/osmnx/osm_xml.py
@@ -98,6 +98,8 @@ def save_graph_xml(
     oneway=False,
     merge_edges=True,
     edge_tag_aggs=None,
+    api_version=0.6,
+    precision=6,
 ):
     """
     Save graph to disk as an OSM-formatted XML .osm file.
@@ -163,6 +165,10 @@ def save_graph_xml(
         this method to aggregate the lengths of the individual
         component edges. Otherwise, the length attribute will simply
         reflect the length of the first edge associated with the way.
+    api_version : float
+        OpenStreetMap API version to write to the XML file header
+    precision : int
+        number of decimal places to round latitude and longitude values
 
     Returns
     -------
@@ -184,7 +190,7 @@ def save_graph_xml(
             "In order for save_graph_xml to behave properly the graph must "
             "have been created with the `all_oneway` setting set to True."
         )
-        warnings.warn(msg)
+        warnings.warn(msg, stacklevel=1)
 
     try:
         gdf_nodes, gdf_edges = data
@@ -195,6 +201,8 @@ def save_graph_xml(
 
     # rename columns per osm specification
     gdf_nodes.rename(columns={"x": "lon", "y": "lat"}, inplace=True)
+    gdf_nodes["lon"] = gdf_nodes["lon"].round(precision)
+    gdf_nodes["lat"] = gdf_nodes["lat"].round(precision)
     gdf_nodes = gdf_nodes.reset_index().rename(columns={"osmid": "id"})
     if "id" in gdf_edges.columns:
         gdf_edges = gdf_edges[[col for col in gdf_edges if col != "id"]]
@@ -209,11 +217,7 @@ def save_graph_xml(
         table["user"] = "osmnx"
         table["version"] = "1"
         table["changeset"] = "1"
-        table["timestamp"] = "2017-01-01T00:00:00Z"
-
-    # convert all datatypes to str
-    gdf_nodes = gdf_nodes.applymap(str)
-    gdf_edges = gdf_edges.applymap(str)
+        table["timestamp"] = utils.ts(template="{:%Y-%m-%dT%H:%M:%SZ}")
 
     # misc. string replacements to meet OSM XML spec
     if "oneway" in gdf_edges.columns:
@@ -225,15 +229,15 @@ def save_graph_xml(
         )
 
     # initialize XML tree with an OSM root element then append nodes/edges
-    root = etree.Element("osm", attrib={"version": "0.6", "generator": "OSMnx"})
+    root = etree.Element("osm", attrib={"version": str(api_version), "generator": "OSMnx"})
     root = _append_nodes_xml_tree(root, gdf_nodes, node_attrs, node_tags)
     root = _append_edges_xml_tree(
         root, gdf_edges, edge_attrs, edge_tags, edge_tag_aggs, merge_edges
     )
 
     # write to disk
-    etree.ElementTree(root).write(filepath)
-    utils.log(f'Saved graph as .osm file at "{filepath}"')
+    etree.ElementTree(root).write(filepath, encoding="utf-8", xml_declaration=True)
+    utils.log(f"Saved graph as .osm file at {filepath!r}")
 
 
 def _append_nodes_xml_tree(root, gdf_nodes, node_attrs, node_tags):
@@ -257,9 +261,11 @@ def _append_nodes_xml_tree(root, gdf_nodes, node_attrs, node_tags):
         xml tree with nodes appended
     """
     for _, row in gdf_nodes.iterrows():
-        node = etree.SubElement(root, "node", attrib=row[node_attrs].dropna().to_dict())
+        row = row.dropna().astype(str)
+        node = etree.SubElement(root, "node", attrib=row[node_attrs].to_dict())
+
         for tag in node_tags:
-            if tag in gdf_nodes.columns:
+            if tag in row:
                 etree.SubElement(node, "tag", attrib={"k": tag, "v": row[tag]})
     return root
 
@@ -300,9 +306,8 @@ def _append_edges_xml_tree(root, gdf_edges, edge_attrs, edge_tags, edge_tag_aggs
     """
     gdf_edges.reset_index(inplace=True)
     if merge_edges:
-
         for _, all_way_edges in gdf_edges.groupby("id"):
-            first = all_way_edges.iloc[0]
+            first = all_way_edges.iloc[0].dropna().astype(str)
             edge = etree.SubElement(root, "way", attrib=first[edge_attrs].dropna().to_dict())
 
             if len(all_way_edges) == 1:
@@ -312,23 +317,23 @@ def _append_edges_xml_tree(root, gdf_edges, edge_attrs, edge_tags, edge_tag_aggs
                 # topological sort
                 ordered_nodes = _get_unique_nodes_ordered_from_way(all_way_edges)
                 for node in ordered_nodes:
-                    etree.SubElement(edge, "nd", attrib={"ref": node})
+                    etree.SubElement(edge, "nd", attrib={"ref": str(node)})
 
             if edge_tag_aggs is None:
                 for tag in edge_tags:
-                    if tag in all_way_edges.columns:
+                    if tag in first:
                         etree.SubElement(edge, "tag", attrib={"k": tag, "v": first[tag]})
             else:
                 for tag in edge_tags:
-                    if (tag in all_way_edges.columns) and (
-                        tag not in (t for t, agg in edge_tag_aggs)
-                    ):
+                    if (tag in first) and (tag not in (t for t, agg in edge_tag_aggs)):
                         etree.SubElement(edge, "tag", attrib={"k": tag, "v": first[tag]})
 
                 for tag, agg in edge_tag_aggs:
                     if tag in all_way_edges.columns:
                         etree.SubElement(
-                            edge, "tag", attrib={"k": tag, "v": all_way_edges[tag].aggregate(agg)}
+                            edge,
+                            "tag",
+                            attrib={"k": tag, "v": str(all_way_edges[tag].aggregate(agg))},
                         )
     else:
         # NOTE: this will generate separate OSM ways for each network edge,
@@ -338,11 +343,12 @@ def _append_edges_xml_tree(root, gdf_edges, edge_attrs, edge_tags, edge_tag_aggs
         # OSM XML schema standard, however, the data will still comprise a
         # valid network and will be readable by *most* OSM tools.
         for _, row in gdf_edges.iterrows():
-            edge = etree.SubElement(root, "way", attrib=row[edge_attrs].dropna().to_dict())
+            row = row.dropna().astype(str)
+            edge = etree.SubElement(root, "way", attrib=row[edge_attrs].to_dict())
             etree.SubElement(edge, "nd", attrib={"ref": row["u"]})
             etree.SubElement(edge, "nd", attrib={"ref": row["v"]})
             for tag in edge_tags:
-                if tag in gdf_edges.columns:
+                if tag in row:
                     etree.SubElement(edge, "tag", attrib={"k": tag, "v": row[tag]})
 
     return root
diff --git a/osmnx/plot.py b/osmnx/plot.py
index 4ca54c6..201d295 100644
--- a/osmnx/plot.py
+++ b/osmnx/plot.py
@@ -724,7 +724,6 @@ def _save_and_show(fig, ax, save=False, show=True, close=True, filepath=None, dp
     fig.canvas.flush_events()
 
     if save:
-
         # default filepath, if none provided
         if filepath is None:
             filepath = Path(settings.imgs_folder) / "image.png"
diff --git a/osmnx/settings.py b/osmnx/settings.py
index 394afe9..da1f9bf 100644
--- a/osmnx/settings.py
+++ b/osmnx/settings.py
@@ -44,9 +44,13 @@ default_referer : string
 default_user_agent : string
     HTTP header user-agent. Default is
     `"OSMnx Python package (https://github.com/gboeing/osmnx)"`.
+doh_url_template : string
+    Endpoint to resolve DNS-over-HTTPS if local DNS resolution fails. Set to
+    None to disable DoH, but see `downloader._config_dns` documentation for
+    caveats. Default is: `"https://8.8.8.8/resolve?name={hostname}"`
 imgs_folder : string or pathlib.Path
     Path to folder in which to save plotted images by default. Default is
-    "./images".
+    `"./images"`.
 log_file : bool
     If True, save log output to a file in logs_folder. Default is `False`.
 log_filename : string
@@ -135,6 +139,7 @@ default_access = '["access"!~"private"]'
 default_crs = "epsg:4326"
 default_referer = "OSMnx Python package (https://github.com/gboeing/osmnx)"
 default_user_agent = "OSMnx Python package (https://github.com/gboeing/osmnx)"
+doh_url_template = "https://8.8.8.8/resolve?name={hostname}"
 imgs_folder = "./images"
 log_console = False
 log_file = False
diff --git a/osmnx/simplification.py b/osmnx/simplification.py
index d35516c..ccdf6dd 100644
--- a/osmnx/simplification.py
+++ b/osmnx/simplification.py
@@ -269,13 +269,11 @@ def simplify_graph(G, strict=True, remove_rings=True, track_merged=False):
 
     # generate each path that needs to be simplified
     for path in _get_paths_to_simplify(G, strict=strict):
-
         # add the interstitial edges we're removing to a list so we can retain
         # their spatial geometry
         merged_edges = []
         path_attributes = {}
         for u, v in zip(path[:-1], path[1:]):
-
             if track_merged:
                 # keep track of the edges that were merged
                 merged_edges.append((u, v))
@@ -289,7 +287,10 @@ def simplify_graph(G, strict=True, remove_rings=True, track_merged=False):
 
             # get edge between these nodes: if multiple edges exist between
             # them (see above), we retain only one in the simplified graph
-            edge_data = G.edges[u, v, 0]
+            # We can't assume that there exists an edge from u to v
+            # with key=0, so we get a list of all edges from u to v
+            # and just take the first one.
+            edge_data = list(G.get_edge_data(u, v).values())[0]
             for attr in edge_data:
                 if attr in path_attributes:
                     # if this key already exists in the dict, append it to the
@@ -551,7 +552,6 @@ def _consolidate_intersections_rebuild_graph(G, tolerance=10, reconnect_edges=Tr
     # regroup now that we potentially have new cluster labels from step 3
     groups = gdf.groupby("cluster")
     for cluster_label, nodes_subset in groups:
-
         osmids = nodes_subset.index.to_list()
         if len(osmids) == 1:
             # if cluster is a single node, add that node to new graph
@@ -597,11 +597,9 @@ def _consolidate_intersections_rebuild_graph(G, tolerance=10, reconnect_edges=Tr
     # for every group of merged nodes with more than 1 node in it, extend the
     # edge geometries to reach the new node point
     for cluster_label, nodes_subset in groups:
-
         # but only if there were multiple nodes merged together,
         # otherwise it's the same old edge as in original graph
         if len(nodes_subset) > 1:
-
             # get coords of merged nodes point centroid to prepend or
             # append to the old edge geom's coords
             x = H.nodes[cluster_label]["x"]
diff --git a/osmnx/speed.py b/osmnx/speed.py
index 9b41f37..1fdcc0c 100644
--- a/osmnx/speed.py
+++ b/osmnx/speed.py
@@ -167,32 +167,45 @@ def add_edge_travel_times(G, precision=1):
     return G
 
 
-def _clean_maxspeed(value, convert_mph=True):
+def _clean_maxspeed(maxspeed, agg=np.mean, convert_mph=True):
     """
     Clean a maxspeed string and convert mph to kph if necessary.
 
+    If present, splits maxspeed on "|" (which denotes that the value contains
+    different speeds per lane) then aggregates the resulting values. Invalid
+    inputs return None. See https://wiki.openstreetmap.org/wiki/Key:maxspeed
+    for details on values and formats.
+
     Parameters
     ----------
-    value : string
-        an OSM way maxspeed value
+    maxspeed : string
+        a valid OpenStreetMap way maxspeed value
+    agg : function
+        aggregation function if maxspeed contains multiple values (default
+        is numpy.mean)
     convert_mph : bool
-        if True, convert mph to kph
+        if True, convert miles per hour to km per hour
 
     Returns
     -------
-    value_clean : string
+    clean_value : string
     """
-    MPH_TO_KPH = 1.60934
-    pattern = re.compile(r"[^\d\.,;]")
-
+    MILES_TO_KM = 1.60934
+    # regex adapted from OSM wiki
+    pattern = "^([0-9][\\.,0-9]+?)(?:[ ]?(?:km/h|kmh|kph|mph|knots))?$"
+    values = re.split(r"\|", maxspeed)  # creates a list even if it's a single value
     try:
-        # strip out everything but numbers, periods, commas, semicolons
-        value_clean = float(re.sub(pattern, "", value).replace(",", "."))
-        if convert_mph and "mph" in value.lower():
-            value_clean = value_clean * MPH_TO_KPH
-        return value_clean
-
-    except ValueError:
+        clean_values = []
+        for value in values:
+            match = re.match(pattern, value)
+            clean_value = float(match.group(1).replace(",", "."))
+            if convert_mph and "mph" in maxspeed.lower():
+                clean_value = clean_value * MILES_TO_KM
+            clean_values.append(clean_value)
+        return agg(clean_values)
+
+    except (ValueError, AttributeError):
+        # if invalid input, return None
         return None
 
 
diff --git a/osmnx/utils.py b/osmnx/utils.py
index b586bfd..b344388 100644
--- a/osmnx/utils.py
+++ b/osmnx/utils.py
@@ -72,7 +72,7 @@ def ts(style="datetime", template=None):
         elif style == "time":
             template = "{:%H:%M:%S}"
         else:  # pragma: no cover
-            raise ValueError(f'unrecognized timestamp style "{style}"')
+            raise ValueError(f"unrecognized timestamp style {style!r}")
 
     ts = template.format(dt.datetime.now())
     return ts
@@ -193,7 +193,8 @@ def config(
         "The `utils.config` function is deprecated and will be removed in a "
         "future release. Instead, use the `settings` module directly to "
         "configure a global setting's value. For example, "
-        "`ox.settings.log_console=True`."
+        "`ox.settings.log_console=True`.",
+        stacklevel=1,
     )
 
     # set each global setting to the argument value
@@ -315,7 +316,6 @@ def _get_logger(level, name, filename):
 
     # if a logger with this name is not already set up
     if not getattr(logger, "handler_set", None):
-
         # get today's date and construct a log filename
         log_filename = Path(settings.logs_folder) / f'{filename}_{ts(style="date")}.log'
 
diff --git a/osmnx/utils_geo.py b/osmnx/utils_geo.py
index 1ede395..6101393 100644
--- a/osmnx/utils_geo.py
+++ b/osmnx/utils_geo.py
@@ -42,7 +42,9 @@ def sample_points(G, n):
         which each point was drawn
     """
     if nx.is_directed(G):  # pragma: no cover
-        warnings.warn("graph should be undirected to not oversample bidirectional edges")
+        warnings.warn(
+            "graph should be undirected to not oversample bidirectional edges", stacklevel=1
+        )
     gdf_edges = utils_graph.graph_to_gdfs(G, nodes=False)[["geometry", "length"]]
     weights = gdf_edges["length"] / gdf_edges["length"].sum()
     idx = np.random.choice(gdf_edges.index, size=n, p=weights)
diff --git a/osmnx/utils_graph.py b/osmnx/utils_graph.py
index 8a64b99..afc4661 100644
--- a/osmnx/utils_graph.py
+++ b/osmnx/utils_graph.py
@@ -42,7 +42,6 @@ def graph_to_gdfs(G, nodes=True, edges=True, node_geometry=True, fill_edge_geome
     crs = G.graph["crs"]
 
     if nodes:
-
         if not G.nodes:  # pragma: no cover
             raise ValueError("graph contains no nodes")
 
@@ -59,14 +58,12 @@ def graph_to_gdfs(G, nodes=True, edges=True, node_geometry=True, fill_edge_geome
         utils.log("Created nodes GeoDataFrame from graph")
 
     if edges:
-
         if not G.edges:  # pragma: no cover
             raise ValueError("graph contains no edges")
 
         u, v, k, data = zip(*G.edges(keys=True, data=True))
 
         if fill_edge_geometry:
-
             # subroutine to get geometry for every edge: if edge already has
             # geometry return it, otherwise create it using the incident nodes
             x_lookup = nx.get_node_attributes(G, "x")
@@ -153,7 +150,8 @@ def graph_from_gdfs(gdf_nodes, gdf_edges, graph_attrs=None):
             # ValueError if geometry column contains non-point geometry types
             warnings.warn(
                 "discarding the gdf_nodes geometry column, though its "
-                "values differ from the coordinates in the x and y columns"
+                "values differ from the coordinates in the x and y columns",
+                stacklevel=1,
             )
         gdf_nodes = gdf_nodes.drop(columns=gdf_nodes.geometry.name)
 
@@ -368,16 +366,12 @@ def get_undirected(G):
     # edge or self-loop. so, look through the edges and remove any duplicates.
     duplicate_edges = set()
     for u1, v1, key1, data1 in H.edges(keys=True, data=True):
-
         # if we haven't already flagged this edge as a duplicate
         if (u1, v1, key1) not in duplicate_edges:
-
             # look at every other edge between u and v, one at a time
             for key2 in H[u1][v1]:
-
                 # don't compare this edge to itself
                 if key1 != key2:
-
                     # compare the first edge's data to the second's
                     # if they match up, flag the duplicate for removal
                     data2 = H.edges[u1, v1, key2]
@@ -414,7 +408,6 @@ def _is_duplicate_edge(data1, data2):
 
     # if they contain the same osmid or set of osmids (due to simplification)
     if osmid1 == osmid2:
-
         # if both edges have geometry attributes and they match each other
         if ("geometry" in data1) and ("geometry" in data2):
             if _is_same_geometry(data1["geometry"], data2["geometry"]):
@@ -490,10 +483,8 @@ def _update_edge_keys(G):
 
     # for each group of duplicate edges
     for _, group in groups:
-
         # for each pair of edges within this group
         for geom1, geom2 in itertools.combinations(group["geometry"], 2):
-
             # if they don't have the same geometry, flag them as different
             # streets: flag edge uvk, but not edge vuk, otherwise we would
             # increment both their keys and they'll still duplicate each other
diff --git a/requirements.txt b/requirements.txt
index 7244025..4e92636 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,9 +1,8 @@
 geopandas>=0.12
 matplotlib>=3.5
 networkx>=2.8
-numpy>=1.23
+numpy>=1.21
 pandas>=1.5
 pyproj>=3.4
-python-dateutil>=2.8
 requests>=2.28
 Shapely>=2.0
diff --git a/setup.py b/setup.py
index 97e35c9..f6e796e 100644
--- a/setup.py
+++ b/setup.py
@@ -62,7 +62,7 @@ else:
 # now call setup
 setup(
     name="osmnx",
-    version="1.3.0",
+    version="1.3.1",
     description=DESC,
     long_description=LONG_DESCRIPTION,
     long_description_content_type="text/x-rst",
diff --git a/tests/environment-dev.yml b/tests/environment-dev.yml
index 853d16c..d4e256f 100644
--- a/tests/environment-dev.yml
+++ b/tests/environment-dev.yml
@@ -22,7 +22,7 @@ dependencies:
     - scipy
 
     # linting/testing
-    - black=22.*
+    - black=23.*
     - coverage
     - flake8
     - flake8-bugbear
diff --git a/tests/test_osmnx.py b/tests/test_osmnx.py
index d2c3167..efd813c 100755
--- a/tests/test_osmnx.py
+++ b/tests/test_osmnx.py
@@ -1,9 +1,11 @@
 """Unit tests for the package."""
 
-# use agg backend so you don't need a display on travis-ci
+# use agg backend so you don't need a display on ci
 # do this first before pyplot is imported by anything
 import matplotlib as mpl
 
+from osmnx.speed import _clean_maxspeed
+
 mpl.use("Agg")
 
 import bz2
@@ -43,7 +45,7 @@ ox.settings.cache_folder = ".temp/cache"
 location_point = (37.791427, -122.410018)
 address = "600 Montgomery St, San Francisco, California, USA"
 place1 = {"city": "Piedmont", "state": "California", "country": "USA"}
-place2 = "Civic Center, Los Angeles, California"
+place2 = "SoHo, New York, NY"
 p = (
     "POLYGON ((-122.262 37.869, -122.255 37.869, -122.255 37.874,"
     "-122.262 37.874, -122.262 37.869))"
@@ -184,7 +186,6 @@ def test_osm_xml():
 
 
 def test_elevation():
-
     G = ox.graph_from_address(address=address, dist=500, dist_type="bbox", network_type="bike")
     rasters = list(Path("tests/input_data").glob("elevation*.tif"))
 
@@ -200,7 +201,6 @@ def test_elevation():
 
 
 def test_routing():
-
     G = ox.graph_from_address(address=address, dist=500, dist_type="bbox", network_type="bike")
 
     # give each edge speed and travel time attributes
@@ -208,6 +208,16 @@ def test_routing():
     G = ox.add_edge_speeds(G, hwy_speeds={"motorway": 100})
     G = ox.add_edge_travel_times(G)
 
+    # test value cleaning
+    assert _clean_maxspeed("100,2") == 100.2
+    assert _clean_maxspeed("100.2") == 100.2
+    assert _clean_maxspeed("100 km/h") == 100.0
+    assert _clean_maxspeed("100 mph") == pytest.approx(160.934)
+    assert _clean_maxspeed("60|100") == 80
+    assert _clean_maxspeed("60|100 mph") == pytest.approx(128.7472)
+    assert _clean_maxspeed("signal") is None
+    assert _clean_maxspeed("100;70") is None
+
     orig_x = np.array([-122.404771])
     dest_x = np.array([-122.401429])
     orig_y = np.array([37.794302])
@@ -285,7 +295,6 @@ def test_plots():
 
 
 def test_find_nearest():
-
     # get graph and x/y coords to search
     G = ox.graph_from_point(location_point, dist=500, network_type="drive", simplify=False)
     Gp = ox.project_graph(G)
@@ -304,9 +313,15 @@ def test_find_nearest():
 
 
 def test_api_endpoints():
+    ip = ox.downloader._resolve_host_via_doh("overpass-api.de")
+    ip = ox.downloader._resolve_host_via_doh("AAAAAAAAAAA")
 
-    ip = ox.downloader._get_host_by_name("overpass-api.de")
-    ip = ox.downloader._get_host_by_name("AAAAAAAAAAA")
+    _doh_url_template_default = ox.settings.doh_url_template
+    ox.settings.doh_url_template = "http://aaaaaa.hostdoesntexist.org/nothinguseful"
+    ip = ox.downloader._resolve_host_via_doh("overpass-api.de")
+    ox.settings.doh_url_template = None
+    ip = ox.downloader._resolve_host_via_doh("overpass-api.de")
+    ox.settings.doh_url_template = _doh_url_template_default
 
     params = OrderedDict()
     params["format"] = "json"
@@ -340,16 +355,10 @@ def test_api_endpoints():
     ox.settings.nominatim_key = "NOT_A_KEY"
     response_json = ox.downloader.nominatim_request(params=params, request_type="search")
 
-    # Test changing the endpoint.
-    # It should fail because we didn't provide a valid key
-    ox.settings.nominatim_endpoint = "http://open.mapquestapi.com/nominatim/v1/"
-    with pytest.raises(Exception):
-        response_json = ox.downloader.nominatim_request(params=params, request_type="search")
-
     # Test changing the endpoint.
     # This should fail because we didn't provide a valid endpoint
     ox.settings.overpass_endpoint = "http://NOT_A_VALID_ENDPOINT/api/"
-    with pytest.raises(Exception):
+    with pytest.raises(Exception) as ex:
         G = ox.graph_from_place(place1, network_type="all")
 
     ox.settings.nominatim_key = default_key
@@ -358,7 +367,6 @@ def test_api_endpoints():
 
 
 def test_graph_save_load():
-
     # save graph as shapefile and geopackage
     G = ox.graph_from_point(location_point, dist=500, network_type="drive")
     ox.save_graph_shapefile(G, directed=True)
@@ -426,7 +434,6 @@ def test_graph_save_load():
 
 
 def test_graph_from_functions():
-
     # graph from bounding box
     _ = ox.utils_geo.bbox_from_point(location_point, project_utm=True, return_crs=True)
     north, south, east, west = ox.utils_geo.bbox_from_point(location_point, dist=500)
@@ -473,7 +480,6 @@ def test_graph_from_functions():
 
 
 def test_geometries():
-
     # geometries_from_bbox - bounding box query to return empty GeoDataFrame
     gdf = ox.geometries_from_bbox(0.009, -0.009, 0.009, -0.009, tags={"building": True})
 

More details

Full run details

Historical runs