diff --git a/docs/generate_docs.py b/docs/generate_docs.py
index 0c4babb1..66103cdb 100644
--- a/docs/generate_docs.py
+++ b/docs/generate_docs.py
@@ -1,6 +1,7 @@
from __future__ import annotations
import importlib
+import inspect
from pathlib import Path
import docstring_parser
@@ -96,7 +97,7 @@ def strip_markdown(text: str) -> str:
return cleaned_text
-def process_docstring(doc_str: str) -> str:
+def custom_process_docstring(doc_str: str) -> str:
"""Process a docstring."""
doc_str_frag: str = ""
parsed_doc_str = docstring_parser.parse(doc_str)
@@ -163,9 +164,65 @@ def process_docstring(doc_str: str) -> str:
return doc_str_frag
+def custom_format_signature(sig: inspect.Signature, colon: bool = True) -> str:
+ """pdoc currently returns expanded annotations - problematic for npt.Arraylike etc."""
+ # First get a list with all params as strings.
+ params: list[str] = doc._PrettySignature._params(sig) # type: ignore
+ return_annot = doc._PrettySignature._return_annotation_str(sig) # type: ignore
+ parsed_return_annot: list[str] = []
+ if return_annot not in ["", "None", None]:
+ ra = return_annot.lstrip("tuple[")
+ ra = ra.rstrip("]")
+ rs = ra.split(",")
+ for r in rs:
+ r = r.strip()
+ if "." in r:
+ r = r.split(".")[-1]
+ if r.lower() not in ["any", "nonetype"]:
+ parsed_return_annot.append(r)
+ # build tags
+ if len(params) <= 1 and len(parsed_return_annot) <= 1:
+ sig_fragment: tags.div = tags.div(cls="signature")
+ else:
+ sig_fragment: tags.div = tags.div(cls="signature multiline")
+ with sig_fragment:
+ tags.span("(", cls="pt")
+ # nest sig params for CSS alignment
+ for param in params:
+ param_fragment = tags.div(cls="param")
+ if ":" in param:
+ param_text, annot = param.split(":")
+ if "any" in annot.strip().lower():
+ annot = None
+ elif annot.strip().lower().startswith("union"):
+ annot = None
+ else:
+ param_text = param
+ annot = None
+ with param_fragment:
+ tags.span(param_text, cls="pn")
+ if annot is not None:
+ with param_fragment:
+ tags.span(":", cls="pc")
+ tags.span(annot, cls="pa")
+ sig_fragment += param_fragment
+ if not parsed_return_annot:
+ with sig_fragment:
+ tags.span(")", cls="pt")
+ else:
+ with sig_fragment:
+ tags.span(")->[", cls="pt")
+ for parsed_return in parsed_return_annot:
+ tags.span(parsed_return, cls="pr")
+ tags.span("]", cls="pt")
+
+ return sig_fragment.render()
+
+
if __name__ == "__main__":
# Add custom function
- render.env.filters["process_docstring"] = process_docstring # type: ignore
+ render.env.filters["custom_process_docstring"] = custom_process_docstring # type: ignore
+ render.env.filters["custom_format_signature"] = custom_format_signature # type: ignore
here = Path(__file__).parent
module_file_maps = [
diff --git a/docs/pdoc_templates/module.html.jinja2 b/docs/pdoc_templates/module.html.jinja2
index 2a65faf4..ad45f208 100644
--- a/docs/pdoc_templates/module.html.jinja2
+++ b/docs/pdoc_templates/module.html.jinja2
@@ -1,9 +1,11 @@
{#
DON'T INDENT -> CAUSES ISSUES FOR MARKDOWN OUTPUT
+PRESERVE WHITE SPACE AROUND MARKDOWN BLOCKS FOR PARSER
#}
{% extends "frame.html.jinja2" %}
{% block content %}
+
{% block module_info %}
{% if module.namespace %}
@@ -20,6 +22,7 @@ DON'T INDENT -> CAUSES ISSUES FOR MARKDOWN OUTPUT
{% endfor %}
{% endblock %}
+
{% endblock content %}
{#
End of content, beginning of helper macros.
@@ -34,7 +37,6 @@ See https://pdoc.dev/docs/pdoc/render_helpers.html#DefaultMacroExtension for an
{{ submodule(doc) }}
{% else %}
{{ variable(doc) }}
-{{ docstring(doc) }}
{% endif %}
{% enddefaultmacro %}
@@ -107,29 +109,30 @@ See https://pdoc.dev/docs/pdoc/render_helpers.html#DefaultMacroExtension for an
+
diff --git a/docs/src/pages/intro.md b/docs/src/pages/intro.md
index 448ad394..26387f10 100644
--- a/docs/src/pages/intro.md
+++ b/docs/src/pages/intro.md
@@ -119,11 +119,15 @@ from cityseer.metrics import networks
nodes_gdf, edges_gdf, network_structure = io.network_structure_from_nx(G_decomp, crs=3395)
# the underlying method allows the computation of various centralities simultaneously, e.g.
nodes_gdf = networks.segment_centrality(
- network_structure=network_structure, # the network structure for which to compute the measures
- nodes_gdf=nodes_gdf, # the nodes GeoDataFrame, to which the results will be written
- distances=[200, 400, 800, 1600], # the distance thresholds for which to compute centralities
+ # the network structure for which to compute the measures
+ network_structure=network_structure,
+ # the nodes GeoDataFrame, to which the results will be written
+ nodes_gdf=nodes_gdf,
+ # the distance thresholds for which to compute centralities
+ distances=[200, 400, 800, 1600],
)
-nodes_gdf.head() # the results are now in the GeoDataFrame
+# the results are now in the GeoDataFrame
+nodes_gdf.head()
```
```python
@@ -167,18 +171,26 @@ data_gdf.head()
# example easy-wrapper method for computing mixed-uses
# this is a distance weighted form of hill diversity
nodes_gdf, data_gdf = layers.compute_mixed_uses(
- data_gdf, # the source data
- landuse_column_label="categorical_landuses", # column in the dataframe which contains the landuse labels
- nodes_gdf=nodes_gdf, # nodes GeoDataFrame - the results are written here
- network_structure=network_structure, # measures will be computed relative to pedestrian distances over the network
- distances=[200, 400, 800, 1600], # distance thresholds for which you want to compute the measures
+ # the source data
+ data_gdf,
+ # column in the dataframe which contains the landuse labels
+ landuse_column_label="categorical_landuses",
+ # nodes GeoDataFrame - the results are written here
+ nodes_gdf=nodes_gdf,
+ # measures will be computed relative to pedestrian distances over the network
+ network_structure=network_structure,
+ # distance thresholds for which you want to compute the measures
+ distances=[200, 400, 800, 1600],
)
-print(nodes_gdf.columns) # the GeoDataFrame will contain the results of the calculations
-print(nodes_gdf["cc_metric_q0_800_hill"]) # which can be retrieved as needed
+# the GeoDataFrame will contain the results of the calculations
+print(nodes_gdf.columns)
+# which can be retrieved as needed
+print(nodes_gdf["cc_metric_q0_800_hill"])
```
```python
-# for curiosity's sake - plot the assignments to see which edges the data points were assigned to
+# for curiosity's sake:
+# plot the assignments to see which edges the data points were assigned to
plot.plot_assignment(network_structure, G_decomp, data_gdf, dpi=200, figsize=(4, 4))
```
@@ -210,15 +222,22 @@ _800m distance-weighted mixed-uses._
```python
# compute landuse accessibilities for land-use types a, b, c
nodes_gdf, data_gdf = layers.compute_accessibilities(
- data_gdf, # the source data
- landuse_column_label="categorical_landuses", # column in the dataframe which contains the landuse labels
- accessibility_keys=["a", "b", "c"], # the landuse categories for which to compute accessibilities
- nodes_gdf=nodes_gdf, # nodes GeoDataFrame - the results are written here
- network_structure=network_structure, # measures will be computed relative to pedestrian distances over the network
- distances=[200, 400, 800, 1600], # distance thresholds for which you want to compute the measures
+ # the source data
+ data_gdf,
+ # column in the dataframe which contains the landuse labels
+ landuse_column_label="categorical_landuses",
+ # the landuse categories for which to compute accessibilities
+ accessibility_keys=["a", "b", "c"],
+ # nodes GeoDataFrame - the results are written here
+ nodes_gdf=nodes_gdf,
+ # measures will be computed relative to pedestrian distances over the network
+ network_structure=network_structure,
+ # distance thresholds for which you want to compute the measures
+ distances=[200, 400, 800, 1600],
)
-# accessibilities are computed in both weighted and unweighted forms, e.g. for "a" and "b" landuse codes
-print(nodes_gdf[["cc_metric_a_800_weighted", "cc_metric_b_1600_non_weighted"]]) # and can be retrieved as needed
+# accessibilities are computed in both weighted and unweighted forms
+# e.g. for "a" and "b" landuse codes in weighted and non weighted, respectively
+print(nodes_gdf[["cc_metric_a_800_weighted", "cc_metric_b_1600_non_weighted"]])
```
Aggregations can likewise be computed for numerical data. Let's generate some mock numerical data:
@@ -228,13 +247,19 @@ numerical_data_gdf = mock.mock_numerical_data(G_decomp, num_arrs=3)
numerical_data_gdf.head()
# compute stats for column mock_numerical_1
nodes_gdf, numerical_data_gdf = layers.compute_stats(
- numerical_data_gdf, # the source data
- stats_column_label="mock_numerical_1", # numerical column to compute stats for
- nodes_gdf=nodes_gdf, # nodes GeoDataFrame - the results are written here
- network_structure=network_structure, # measures will be computed relative to pedestrian distances over the network
- distances=[800, 1600], # distance thresholds for which you want to compute the measures
+ # the source data
+ numerical_data_gdf,
+ # numerical column to compute stats for
+ stats_column_label="mock_numerical_1",
+ # nodes GeoDataFrame - the results are written here
+ nodes_gdf=nodes_gdf,
+ # measures will be computed relative to pedestrian distances over the network
+ network_structure=network_structure,
+ # distance thresholds for which you want to compute the measures
+ distances=[800, 1600],
)
-# statistical aggregations are calculated for each requested column, and in the following forms:
+# statistical aggregations are calculated for each requested column,
+# and in the following forms:
# max, min, sum, sum_weighted, mean, mean_weighted, variance, variance_weighted
print(nodes_gdf["cc_metric_max_800"])
print(nodes_gdf["cc_metric_mean_wt_800"])
diff --git a/docs/src/pages/metrics/layers.md b/docs/src/pages/metrics/layers.md
index 9a3b64bb..ccb62a6b 100644
--- a/docs/src/pages/metrics/layers.md
+++ b/docs/src/pages/metrics/layers.md
@@ -1,6 +1,7 @@
---
layout: ../../layouts/PageLayout.astro
---
+
# layers
@@ -11,7 +12,33 @@ layout: ../../layouts/PageLayout.astro
-
assign_gdf_to_network( data_gdf: geopandas.geodataframe.GeoDataFrame, network_structure: NetworkStructure, max_netw_assign_dist: int | float, data_id_col: str | None = None) -> tuple[DataMap, geopandas.geodataframe.GeoDataFrame]:
+
assign_gdf_to_network
+
(
+
+ data_gdf
+ :
+ geopandas.geodataframe.GeoDataFrame
+
+
+ network_structure
+ :
+ NetworkStructure
+
+
+ max_netw_assign_dist
+ :
+ int | float
+
+
+ data_id_col
+ :
+ str | None = None
+
+
)->[
+
DataMap
+
GeoDataFrame
+
]
+
@@ -80,7 +107,7 @@ layout: ../../layouts/PageLayout.astro
### Notes
-:::warning
+:::note
The `max_assign_dist` parameter should not be set overly low. The `max_assign_dist` parameter sets a crow-flies
distance limit on how far the algorithm will search in its attempts to encircle the data point. If the
`max_assign_dist` is too small, then the algorithm is potentially hampered from finding a starting node; or, if a
@@ -110,7 +137,78 @@ representation of variations of metrics along street-fronts.
-
compute_accessibilities( data_gdf: geopandas.geodataframe.GeoDataFrame, landuse_column_label: str, accessibility_keys: list[str], nodes_gdf: geopandas.geodataframe.GeoDataFrame, network_structure: NetworkStructure, max_netw_assign_dist: int = 400, distances: list[int] | None = None, betas: list[float] | None = None, data_id_col: str | None = None, angular: bool = False, spatial_tolerance: int = 0, min_threshold_wt: float | None = None, jitter_scale: float = 0.0) -> tuple[geopandas.geodataframe.GeoDataFrame, geopandas.geodataframe.GeoDataFrame]:
+
compute_accessibilities
+
(
+
+ data_gdf
+ :
+ geopandas.geodataframe.GeoDataFrame
+
+
+ landuse_column_label
+ :
+ str
+
+
+ accessibility_keys
+ :
+ list[str]
+
+
+ nodes_gdf
+ :
+ geopandas.geodataframe.GeoDataFrame
+
+
+ network_structure
+ :
+ NetworkStructure
+
+
+ max_netw_assign_dist
+ :
+ int = 400
+
+
+ distances
+ :
+ list[int] | None = None
+
+
+ betas
+ :
+ list[float] | None = None
+
+
+ data_id_col
+ :
+ str | None = None
+
+
+ angular
+ :
+ bool = False
+
+
+ spatial_tolerance
+ :
+ int = 0
+
+
+ min_threshold_wt
+ :
+ float | None = None
+
+
+ jitter_scale
+ :
+ float = 0.0
+
+
)->[
+
GeoDataFrame
+
GeoDataFrame
+
]
+
@@ -305,7 +403,93 @@ print(nodes_gdf["cc_metric_c_400_non_weighted"])
-
compute_mixed_uses( data_gdf: geopandas.geodataframe.GeoDataFrame, landuse_column_label: str, nodes_gdf: geopandas.geodataframe.GeoDataFrame, network_structure: NetworkStructure, max_netw_assign_dist: int = 400, compute_hill: bool | None = True, compute_hill_weighted: bool | None = True, compute_shannon: bool | None = False, compute_gini: bool | None = False, distances: list[int] | None = None, betas: list[float] | None = None, data_id_col: str | None = None, angular: bool = False, spatial_tolerance: int = 0, min_threshold_wt: float | None = None, jitter_scale: float = 0.0) -> tuple[geopandas.geodataframe.GeoDataFrame, geopandas.geodataframe.GeoDataFrame]:
+
compute_mixed_uses
+
(
+
+ data_gdf
+ :
+ geopandas.geodataframe.GeoDataFrame
+
+
+ landuse_column_label
+ :
+ str
+
+
+ nodes_gdf
+ :
+ geopandas.geodataframe.GeoDataFrame
+
+
+ network_structure
+ :
+ NetworkStructure
+
+
+ max_netw_assign_dist
+ :
+ int = 400
+
+
+ compute_hill
+ :
+ bool | None = True
+
+
+ compute_hill_weighted
+ :
+ bool | None = True
+
+
+ compute_shannon
+ :
+ bool | None = False
+
+
+ compute_gini
+ :
+ bool | None = False
+
+
+ distances
+ :
+ list[int] | None = None
+
+
+ betas
+ :
+ list[float] | None = None
+
+
+ data_id_col
+ :
+ str | None = None
+
+
+ angular
+ :
+ bool = False
+
+
+ spatial_tolerance
+ :
+ int = 0
+
+
+ min_threshold_wt
+ :
+ float | None = None
+
+
+ jitter_scale
+ :
+ float = 0.0
+
+
)->[
+
GeoDataFrame
+
GeoDataFrame
+
]
+
@@ -550,7 +734,73 @@ been applied.
-
compute_stats( data_gdf: geopandas.geodataframe.GeoDataFrame, stats_column_label: str | list[str] | tuple[str], nodes_gdf: geopandas.geodataframe.GeoDataFrame, network_structure: NetworkStructure, max_netw_assign_dist: int = 400, distances: list[int] | None = None, betas: list[float] | None = None, data_id_col: str | None = None, angular: bool = False, spatial_tolerance: int = 0, min_threshold_wt: float | None = None, jitter_scale: float = 0.0) -> tuple[geopandas.geodataframe.GeoDataFrame, geopandas.geodataframe.GeoDataFrame]:
+
compute_stats
+
(
+
+ data_gdf
+ :
+ geopandas.geodataframe.GeoDataFrame
+
+
+ stats_column_label
+ :
+ str | list[str] | tuple[str]
+
+
+ nodes_gdf
+ :
+ geopandas.geodataframe.GeoDataFrame
+
+
+ network_structure
+ :
+ NetworkStructure
+
+
+ max_netw_assign_dist
+ :
+ int = 400
+
+
+ distances
+ :
+ list[int] | None = None
+
+
+ betas
+ :
+ list[float] | None = None
+
+
+ data_id_col
+ :
+ str | None = None
+
+
+ angular
+ :
+ bool = False
+
+
+ spatial_tolerance
+ :
+ int = 0
+
+
+ min_threshold_wt
+ :
+ float | None = None
+
+
+ jitter_scale
+ :
+ float = 0.0
+
+
)->[
+
GeoDataFrame
+
GeoDataFrame
+
]
+
@@ -740,3 +990,4 @@ computed distances:
+
diff --git a/docs/src/pages/metrics/networks.md b/docs/src/pages/metrics/networks.md
index cb3f7ea8..1b9ad36b 100644
--- a/docs/src/pages/metrics/networks.md
+++ b/docs/src/pages/metrics/networks.md
@@ -1,6 +1,7 @@
---
layout: ../../layouts/PageLayout.astro
---
+
# networks
@@ -57,7 +58,52 @@ may therefore be preferable when working at small thresholds on decomposed netwo
-
node_centrality_shortest( network_structure: NetworkStructure, nodes_gdf: geopandas.geodataframe.GeoDataFrame, distances: list[int] | None = None, betas: list[float] | None = None, compute_closeness: bool | None = True, compute_betweenness: bool | None = True, min_threshold_wt: float = 0.01831563888873418, jitter_scale: float = 0.0) -> geopandas.geodataframe.GeoDataFrame:
+
node_centrality_shortest
+
(
+
+ network_structure
+ :
+ NetworkStructure
+
+
+ nodes_gdf
+ :
+ geopandas.geodataframe.GeoDataFrame
+
+
+ distances
+ :
+ list[int] | None = None
+
+
+ betas
+ :
+ list[float] | None = None
+
+
+ compute_closeness
+ :
+ bool | None = True
+
+
+ compute_betweenness
+ :
+ bool | None = True
+
+
+ min_threshold_wt
+ :
+ float = 0.01831563888873418
+
+
+ jitter_scale
+ :
+ float = 0.0
+
+
)->[
+
GeoDataFrame
+
]
+
@@ -183,7 +229,52 @@ network representations.
-
node_centrality_simplest( network_structure: NetworkStructure, nodes_gdf: geopandas.geodataframe.GeoDataFrame, distances: list[int] | None = None, betas: list[float] | None = None, compute_closeness: bool | None = True, compute_betweenness: bool | None = True, min_threshold_wt: float = 0.01831563888873418, jitter_scale: float = 0.0) -> geopandas.geodataframe.GeoDataFrame:
+
node_centrality_simplest
+
(
+
+ network_structure
+ :
+ NetworkStructure
+
+
+ nodes_gdf
+ :
+ geopandas.geodataframe.GeoDataFrame
+
+
+ distances
+ :
+ list[int] | None = None
+
+
+ betas
+ :
+ list[float] | None = None
+
+
+ compute_closeness
+ :
+ bool | None = True
+
+
+ compute_betweenness
+ :
+ bool | None = True
+
+
+ min_threshold_wt
+ :
+ float = 0.01831563888873418
+
+
+ jitter_scale
+ :
+ float = 0.0
+
+
)->[
+
GeoDataFrame
+
]
+
@@ -306,7 +397,52 @@ The following keys use the simplest-path (shortest-angular-path) heuristic, and
-
segment_centrality( network_structure: NetworkStructure, nodes_gdf: geopandas.geodataframe.GeoDataFrame, distances: list[int] | None = None, betas: list[float] | None = None, compute_closeness: bool | None = True, compute_betweenness: bool | None = True, min_threshold_wt: float = 0.01831563888873418, jitter_scale: float = 0.0) -> geopandas.geodataframe.GeoDataFrame:
+
segment_centrality
+
(
+
+ network_structure
+ :
+ NetworkStructure
+
+
+ nodes_gdf
+ :
+ geopandas.geodataframe.GeoDataFrame
+
+
+ distances
+ :
+ list[int] | None = None
+
+
+ betas
+ :
+ list[float] | None = None
+
+
+ compute_closeness
+ :
+ bool | None = True
+
+
+ compute_betweenness
+ :
+ bool | None = True
+
+
+ min_threshold_wt
+ :
+ float = 0.01831563888873418
+
+
+ jitter_scale
+ :
+ float = 0.0
+
+
)->[
+
GeoDataFrame
+
]
+
@@ -420,3 +556,4 @@ The following keys use the simplest-path (shortest-angular-path) heuristic, and
+
diff --git a/docs/src/pages/metrics/observe.md b/docs/src/pages/metrics/observe.md
index 0ed8b53d..58406e98 100644
--- a/docs/src/pages/metrics/observe.md
+++ b/docs/src/pages/metrics/observe.md
@@ -1,29 +1,49 @@
---
layout: ../../layouts/PageLayout.astro
---
+
# observe
-Observe module for computing observations derived from `networkX` graphs. These methods are generally sufficiently simple that further computational optimisation is not required. Network centrality methods (which do require further computational optimisation due to their complexity) are handled separately in the [`networks`](/metrics/networks) module.
+
+ Observe module for computing observations derived from `networkX` graphs. These methods are generally sufficiently simple that further computational optimisation is not required. Network centrality methods (which do require further computational optimisation due to their complexity) are handled separately in the [`networks`](/metrics/networks) module.
+
+
## ContinuityEntry
-State management for an individual street continuity entry. This corresponds to an individual street name, route name or number, or highway type.
+
+
+ State management for an individual street continuity entry. This corresponds to an individual street name, route name or number, or highway type.
+
+
## ContinuityEntry
+
-
ContinuityEntry(entry_name: str)
+
ContinuityEntry
+
(
+
+ entry_name
+ :
+ str
+
+
)
+
-Instances a continuity entry.
+
+ Instances a continuity entry.
+
+
## generate_key
@@ -31,95 +51,224 @@ Instances a continuity entry.
@staticmethod
-
generate_key(start_nd_key: str, end_nd_key: str, edge_idx: int):
+
generate_key
+
(
+
+ start_nd_key
+ :
+ str
+
+
+ end_nd_key
+ :
+ str
+
+
+ edge_idx
+ :
+ int
+
+
)
+
+
-Generate a unique key given uncertainty of start and end node order.
+ Generate a unique key given uncertainty of start and end node order.
+
+
## add_edge
+
-
add_edge( self, length: float, start_nd_key: str, end_nd_key: str, edge_idx: int) -> None:
+
add_edge
+
(
+
+ self
+
+
+ length
+ :
+ float
+
+
+ start_nd_key
+ :
+ str
+
+
+ end_nd_key
+ :
+ str
+
+
+ edge_idx
+ :
+ int
+
+
)
+
-Adds edge details to a continuity entry.
+
+ Adds edge details to a continuity entry.
+
+
+
## StreetContinuityReport
-State management for a collection of street continuity metrics. Each key in the `entries` attribute corresponds to a `ContinuityEntry`.
+
+
+ State management for a collection of street continuity metrics. Each key in the `entries` attribute corresponds to a `ContinuityEntry`.
+
+
## StreetContinuityReport
+
-
StreetContinuityReport(method: str)
+
StreetContinuityReport
+
(
+
+ method
+ :
+ str
+
+
)
+
-Instance a street continuity report.
+
+ Instance a street continuity report.
+
+
## scaffold_entry
+
-
scaffold_entry(self, entry_name: str) -> None:
+
scaffold_entry
+
(
+
+ self
+
+
+ entry_name
+ :
+ str
+
+
)
+
+
-Adds a new continuity entry to the report's entries.
+ Adds a new continuity entry to the report's entries.
+
+
## report_by_count
+
-
report_by_count(self, n_items: int = 10) -> None:
+
report_by_count
+
(
+
+ self
+
+
+ n_items
+ :
+ int = 10
+
+
)
+
-Print a report sorted by entry counts.
+
+ Print a report sorted by entry counts.
+
+
## report_by_length
+
-
report_by_length(self, n_items: int = 10) -> None:
+
report_by_length
+
(
+
+ self
+
+
+ n_items
+ :
+ int = 10
+
+
)
+
-Print a report sorted by entry lengths.
+
+ Print a report sorted by entry lengths.
+
+
## street_continuity
+
-
street_continuity( nx_multigraph: networkx.classes.multigraph.MultiGraph, method: str | tuple[str, str]) -> tuple[networkx.classes.multigraph.MultiGraph, StreetContinuityReport]:
+
street_continuity
+
(
+
+ nx_multigraph
+ :
+ networkx.classes.multigraph.MultiGraph
+
+
+ method
+ :
+ str | tuple[str, str]
+
+
)->[
+
MultiGraph
+
StreetContinuityReport
+
]
+
-Compute the street continuity for a given graph. This requires a graph with `names`, `routes`, or `highways` edge keys corresponding to the selected `method` parameter. These keys are available if importing an OSM network with [`osm_graph_from_poly`](/tools/io#osm-graph-from-poly) or if importing OS Open Roads data with [nx_from_open_roads](/tools/io#nx-from-open-roads).
+ Compute the street continuity for a given graph. This requires a graph with `names`, `routes`, or `highways` edge keys corresponding to the selected `method` parameter. These keys are available if importing an OSM network with [`osm_graph_from_poly`](/tools/io#osm-graph-from-poly) or if importing OS Open Roads data with [nx_from_open_roads](/tools/io#nx-from-open-roads).
### Parameters
-
nx_multigraph
@@ -127,8 +276,7 @@ Compute the street continuity for a given graph. This requires a graph with `nam
-A `networkX` `MultiGraph` in a projected coordinate system, containing `x` and `y` node attributes, and `geom` edge attributes containing `LineString` geoms. Edges should contain "names", "routes", or "highways" keys corresponding to the specified `method` parameter.
-
+ A `networkX` `MultiGraph` in a projected coordinate system, containing `x` and `y` node attributes, and `geom` edge attributes containing `LineString` geoms. Edges should contain "names", "routes", or "highways" keys corresponding to the specified `method` parameter.
@@ -138,12 +286,10 @@ A `networkX` `MultiGraph` in a projected coordinate system, containing `x` and `
-The type of continuity metric to compute, where available options are "names", "routes", or "highways".
-
+ The type of continuity metric to compute, where available options are "names", "routes", or "highways".
### Returns
-
@@ -162,24 +307,36 @@ A copy of the input `networkX` `MultiGraph` with new edge keys corresponding to
-An instance of [`StreetContinuityReport`](/metrics/observe#streetcontinuityreport) containing the computed state for the selected method.
-
+ An instance of [`StreetContinuityReport`](/metrics/observe#streetcontinuityreport) containing the computed state for the selected method.
+
+
@@ -211,8 +365,12 @@ A copy of the input `networkX` `MultiGraph` with new edge keys corresponding to
-An instance of [`StreetContinuityReport`](/metrics/observe#streetcontinuityreport) containing the computed state for the "hybrid" method.
-
+ An instance of [`StreetContinuityReport`](/metrics/observe#streetcontinuityreport) containing the computed state for the "hybrid" method.
+
+
+
+
+
diff --git a/docs/src/pages/tools/graphs.md b/docs/src/pages/tools/graphs.md
index a7480f16..c2c5eab6 100644
--- a/docs/src/pages/tools/graphs.md
+++ b/docs/src/pages/tools/graphs.md
@@ -1,6 +1,7 @@
---
layout: ../../layouts/PageLayout.astro
---
+