Skip to content

Commit

Permalink
finishes new docs workflow
Browse files Browse the repository at this point in the history
  • Loading branch information
songololo committed Oct 15, 2023
1 parent ae56b7b commit 9dea4e1
Show file tree
Hide file tree
Showing 14 changed files with 1,876 additions and 158 deletions.
61 changes: 59 additions & 2 deletions docs/generate_docs.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
from __future__ import annotations

import importlib
import inspect
from pathlib import Path

import docstring_parser
Expand Down Expand Up @@ -96,7 +97,7 @@ def strip_markdown(text: str) -> str:
return cleaned_text


def process_docstring(doc_str: str) -> str:
def custom_process_docstring(doc_str: str) -> str:
"""Process a docstring."""
doc_str_frag: str = ""
parsed_doc_str = docstring_parser.parse(doc_str)
Expand Down Expand Up @@ -163,9 +164,65 @@ def process_docstring(doc_str: str) -> str:
return doc_str_frag


def custom_format_signature(sig: inspect.Signature, colon: bool = True) -> str:
"""pdoc currently returns expanded annotations - problematic for npt.Arraylike etc."""
# First get a list with all params as strings.
params: list[str] = doc._PrettySignature._params(sig) # type: ignore
return_annot = doc._PrettySignature._return_annotation_str(sig) # type: ignore
parsed_return_annot: list[str] = []
if return_annot not in ["", "None", None]:
ra = return_annot.lstrip("tuple[")
ra = ra.rstrip("]")
rs = ra.split(",")
for r in rs:
r = r.strip()
if "." in r:
r = r.split(".")[-1]
if r.lower() not in ["any", "nonetype"]:
parsed_return_annot.append(r)
# build tags
if len(params) <= 1 and len(parsed_return_annot) <= 1:
sig_fragment: tags.div = tags.div(cls="signature")
else:
sig_fragment: tags.div = tags.div(cls="signature multiline")
with sig_fragment:
tags.span("(", cls="pt")
# nest sig params for CSS alignment
for param in params:
param_fragment = tags.div(cls="param")
if ":" in param:
param_text, annot = param.split(":")
if "any" in annot.strip().lower():
annot = None
elif annot.strip().lower().startswith("union"):
annot = None
else:
param_text = param
annot = None
with param_fragment:
tags.span(param_text, cls="pn")
if annot is not None:
with param_fragment:
tags.span(":", cls="pc")
tags.span(annot, cls="pa")
sig_fragment += param_fragment
if not parsed_return_annot:
with sig_fragment:
tags.span(")", cls="pt")
else:
with sig_fragment:
tags.span(")->[", cls="pt")
for parsed_return in parsed_return_annot:
tags.span(parsed_return, cls="pr")
tags.span("]", cls="pt")

return sig_fragment.render()


if __name__ == "__main__":
# Add custom function
render.env.filters["process_docstring"] = process_docstring # type: ignore
render.env.filters["custom_process_docstring"] = custom_process_docstring # type: ignore
render.env.filters["custom_format_signature"] = custom_format_signature # type: ignore
here = Path(__file__).parent

module_file_maps = [
Expand Down
19 changes: 11 additions & 8 deletions docs/pdoc_templates/module.html.jinja2
Original file line number Diff line number Diff line change
@@ -1,9 +1,11 @@
{#
DON'T INDENT -> CAUSES ISSUES FOR MARKDOWN OUTPUT
PRESERVE WHITE SPACE AROUND MARKDOWN BLOCKS FOR PARSER
#}
{% extends "frame.html.jinja2" %}

{% block content %}
<section class="module">

{% block module_info %}
{% if module.namespace %}
Expand All @@ -20,6 +22,7 @@ DON'T INDENT -> CAUSES ISSUES FOR MARKDOWN OUTPUT
{% endfor %}
{% endblock %}

</section>
{% endblock content %}
{#
End of content, beginning of helper macros.
Expand All @@ -34,7 +37,6 @@ See https://pdoc.dev/docs/pdoc/render_helpers.html#DefaultMacroExtension for an
{{ submodule(doc) }}
{% else %}
{{ variable(doc) }}
{{ docstring(doc) }}
{% endif %}
{% enddefaultmacro %}

Expand Down Expand Up @@ -107,29 +109,30 @@ See https://pdoc.dev/docs/pdoc/render_helpers.html#DefaultMacroExtension for an
<div class="content">
{% if fn.name == "__init__" %}
<span class="name">{{ ".".join(fn.qualname.split(".")[:-1]) }}</span>
{{- fn.signature_without_self | format_signature(colon=False) | linkify }}
{{- fn.signature_without_self | custom_format_signature | safe }}
{% else %}
<span class="name">{{ fn.name }}</span>
{{- fn.signature | format_signature(colon=True) | linkify }}
{{- fn.signature | custom_format_signature | safe }}
{% endif %}
</div>
{{ docstring(fn) }}
</div>
{% enddefaultmacro %}

{% defaultmacro variable(var) -%}
<span class="name">{{ var.name }}</span>{{ annotation(var) }}{{ default_value(var) }}
{% enddefaultmacro %}

{% defaultmacro submodule(mod) -%}
<span class="name">{{ mod.taken_from | link }}</span>
{{ docstring(mod) }}
{% enddefaultmacro %}

{% defaultmacro variable(var) -%}
<span class="name">{{ var.name }}</span>{{ annotation(var) }}{{ default_value(var) }}
{{ docstring(doc) }}
{% enddefaultmacro %}

{% defaultmacro docstring(var) %}
{% if var %}
{% if var.docstring %}
{{ var.docstring | process_docstring | safe }}
{{ var.docstring | custom_process_docstring | safe }}
{% endif %}
{% endif %}
{% enddefaultmacro %}
Expand Down
2 changes: 1 addition & 1 deletion docs/src/layouts/PageLayout.astro
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ const navPaths = [
</div>
<StripeTitle />
</div>
<div id='content-col' class="md-content">
<div id='content-col'>
<slot />
<Footer />
</div>
Expand Down
77 changes: 51 additions & 26 deletions docs/src/pages/intro.md
Original file line number Diff line number Diff line change
Expand Up @@ -119,11 +119,15 @@ from cityseer.metrics import networks
nodes_gdf, edges_gdf, network_structure = io.network_structure_from_nx(G_decomp, crs=3395)
# the underlying method allows the computation of various centralities simultaneously, e.g.
nodes_gdf = networks.segment_centrality(
network_structure=network_structure, # the network structure for which to compute the measures
nodes_gdf=nodes_gdf, # the nodes GeoDataFrame, to which the results will be written
distances=[200, 400, 800, 1600], # the distance thresholds for which to compute centralities
# the network structure for which to compute the measures
network_structure=network_structure,
# the nodes GeoDataFrame, to which the results will be written
nodes_gdf=nodes_gdf,
# the distance thresholds for which to compute centralities
distances=[200, 400, 800, 1600],
)
nodes_gdf.head() # the results are now in the GeoDataFrame
# the results are now in the GeoDataFrame
nodes_gdf.head()
```

```python
Expand Down Expand Up @@ -167,18 +171,26 @@ data_gdf.head()
# example easy-wrapper method for computing mixed-uses
# this is a distance weighted form of hill diversity
nodes_gdf, data_gdf = layers.compute_mixed_uses(
data_gdf, # the source data
landuse_column_label="categorical_landuses", # column in the dataframe which contains the landuse labels
nodes_gdf=nodes_gdf, # nodes GeoDataFrame - the results are written here
network_structure=network_structure, # measures will be computed relative to pedestrian distances over the network
distances=[200, 400, 800, 1600], # distance thresholds for which you want to compute the measures
# the source data
data_gdf,
# column in the dataframe which contains the landuse labels
landuse_column_label="categorical_landuses",
# nodes GeoDataFrame - the results are written here
nodes_gdf=nodes_gdf,
# measures will be computed relative to pedestrian distances over the network
network_structure=network_structure,
# distance thresholds for which you want to compute the measures
distances=[200, 400, 800, 1600],
)
print(nodes_gdf.columns) # the GeoDataFrame will contain the results of the calculations
print(nodes_gdf["cc_metric_q0_800_hill"]) # which can be retrieved as needed
# the GeoDataFrame will contain the results of the calculations
print(nodes_gdf.columns)
# which can be retrieved as needed
print(nodes_gdf["cc_metric_q0_800_hill"])
```

```python
# for curiosity's sake - plot the assignments to see which edges the data points were assigned to
# for curiosity's sake:
# plot the assignments to see which edges the data points were assigned to
plot.plot_assignment(network_structure, G_decomp, data_gdf, dpi=200, figsize=(4, 4))
```

Expand Down Expand Up @@ -210,15 +222,22 @@ _800m distance-weighted mixed-uses._
```python
# compute landuse accessibilities for land-use types a, b, c
nodes_gdf, data_gdf = layers.compute_accessibilities(
data_gdf, # the source data
landuse_column_label="categorical_landuses", # column in the dataframe which contains the landuse labels
accessibility_keys=["a", "b", "c"], # the landuse categories for which to compute accessibilities
nodes_gdf=nodes_gdf, # nodes GeoDataFrame - the results are written here
network_structure=network_structure, # measures will be computed relative to pedestrian distances over the network
distances=[200, 400, 800, 1600], # distance thresholds for which you want to compute the measures
# the source data
data_gdf,
# column in the dataframe which contains the landuse labels
landuse_column_label="categorical_landuses",
# the landuse categories for which to compute accessibilities
accessibility_keys=["a", "b", "c"],
# nodes GeoDataFrame - the results are written here
nodes_gdf=nodes_gdf,
# measures will be computed relative to pedestrian distances over the network
network_structure=network_structure,
# distance thresholds for which you want to compute the measures
distances=[200, 400, 800, 1600],
)
# accessibilities are computed in both weighted and unweighted forms, e.g. for "a" and "b" landuse codes
print(nodes_gdf[["cc_metric_a_800_weighted", "cc_metric_b_1600_non_weighted"]]) # and can be retrieved as needed
# accessibilities are computed in both weighted and unweighted forms
# e.g. for "a" and "b" landuse codes in weighted and non weighted, respectively
print(nodes_gdf[["cc_metric_a_800_weighted", "cc_metric_b_1600_non_weighted"]])
```

Aggregations can likewise be computed for numerical data. Let's generate some mock numerical data:
Expand All @@ -228,13 +247,19 @@ numerical_data_gdf = mock.mock_numerical_data(G_decomp, num_arrs=3)
numerical_data_gdf.head()
# compute stats for column mock_numerical_1
nodes_gdf, numerical_data_gdf = layers.compute_stats(
numerical_data_gdf, # the source data
stats_column_label="mock_numerical_1", # numerical column to compute stats for
nodes_gdf=nodes_gdf, # nodes GeoDataFrame - the results are written here
network_structure=network_structure, # measures will be computed relative to pedestrian distances over the network
distances=[800, 1600], # distance thresholds for which you want to compute the measures
# the source data
numerical_data_gdf,
# numerical column to compute stats for
stats_column_label="mock_numerical_1",
# nodes GeoDataFrame - the results are written here
nodes_gdf=nodes_gdf,
# measures will be computed relative to pedestrian distances over the network
network_structure=network_structure,
# distance thresholds for which you want to compute the measures
distances=[800, 1600],
)
# statistical aggregations are calculated for each requested column, and in the following forms:
# statistical aggregations are calculated for each requested column,
# and in the following forms:
# max, min, sum, sum_weighted, mean, mean_weighted, variance, variance_weighted
print(nodes_gdf["cc_metric_max_800"])
print(nodes_gdf["cc_metric_mean_wt_800"])
Expand Down
Loading

0 comments on commit 9dea4e1

Please sign in to comment.