Skip to content

Commit

Permalink
Support lat/lon, lat/lng etc - closes #39
Browse files Browse the repository at this point in the history
  • Loading branch information
simonw committed Jan 13, 2024
1 parent 7b4d34b commit 4680ac8
Show file tree
Hide file tree
Showing 5 changed files with 139 additions and 20 deletions.
9 changes: 8 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,14 @@ If you are deploying using the `datasette publish` command you can use the `--in

datasette publish cloudrun mydb.db --install=datasette-cluster-map

If any of your tables have a `latitude` and `longitude` column, a map will be automatically displayed.
If any of your tables have one of the following pairs of columns a map will be automatically displayed:

- `latitude` and `longitude`
- `lat` and `lng`
- `lat` and `lon`
- `lat` and `long`
- `*_latitude` and `*_longitude`
- `*_lat` and `*_lng` for any of the three variants of `lng`

## Configuration

Expand Down
102 changes: 87 additions & 15 deletions datasette_cluster_map/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
from datasette import hookimpl
from typing import List
import json


Expand All @@ -12,7 +13,7 @@

@hookimpl
def extra_js_urls(database, table, columns, view_name, datasette):
if not has_columns(database, table, columns, view_name, datasette):
if not find_columns(database, table, columns, view_name, datasette):
return []
return [
{
Expand All @@ -26,7 +27,8 @@ def extra_js_urls(database, table, columns, view_name, datasette):

@hookimpl
def extra_body_script(database, table, columns, view_name, datasette):
if not has_columns(database, table, columns, view_name, datasette):
location_columns = find_columns(database, table, columns, view_name, datasette)
if not location_columns:
return []
config = (
datasette.plugin_config("datasette-cluster-map", database=database, table=table)
Expand All @@ -43,14 +45,23 @@ def extra_body_script(database, table, columns, view_name, datasette):
json.dumps(config.get("tile_layer_options") or TILE_LAYER_OPTIONS)
)
)
for key in ("latitude_column", "longitude_column", "container"):
value = config.get(key)
if value:
js.append(
"window.DATASETTE_CLUSTER_MAP_{} = {};".format(
key.upper(), json.dumps(value)
)
if config.get("container"):
js.append(
"window.DATASETTE_CLUSTER_MAP_CONTAINER = {};".format(
json.dumps(config["container"])
)
)
# latitude_column and longitude_column
js.append(
"window.DATASETTE_CLUSTER_MAP_LATITUDE_COLUMN = {};".format(
json.dumps(location_columns[0])
)
)
js.append(
"window.DATASETTE_CLUSTER_MAP_LONGITUDE_COLUMN = {};".format(
json.dumps(location_columns[1])
)
)
js.append("window.datasette = window.datasette || {};")
js.append(
"datasette.cluster_map = {\n"
Expand All @@ -68,16 +79,77 @@ def extra_body_script(database, table, columns, view_name, datasette):
return "\n".join(js)


def has_columns(database, table, columns, view_name, datasette):
def find_columns(database, table, columns, view_name, datasette):
print(
"find_columns: database={}, table={}, columns={}, view_name={}".format(
database, table, columns, view_name
)
)
if view_name not in ("database", "table"):
return False
return []
if not columns:
return False
return []
# If columns are configured, check for those
columns = [column.lower() for column in columns]

config = (
datasette.plugin_config("datasette-cluster-map", database=database, table=table)
or {}
)
latitude_column = config.get("latitude_column") or "latitude"
longitude_column = config.get("longitude_column") or "longitude"
return latitude_column.lower() in columns and longitude_column.lower() in columns
latitude_column = config.get("latitude_column")
longitude_column = config.get("longitude_column")

if not latitude_column or not longitude_column:
# Detect those columns instead
location_columns = location_columns_from_columns(columns)
if not location_columns:
return []
latitude_column, longitude_column = location_columns

if latitude_column.lower() in columns and longitude_column.lower() in columns:
return [latitude_column, longitude_column]


def _match(pattern, column):
# latitude matches "latitude" or "foo_latitude"
return column.lower() == pattern or column.lower().endswith("_" + pattern)


LATITUDE_PATTERNS = ["latitude", "lat"]
LONGITUDE_PATTERNS = ["longitude", "lon", "lng", "long"]
LOCATION_PRIORITIES = (
("latitude", "longitude"),
("lat", "lon"),
("lat", "lng"),
("lat", "long"),
)


def location_columns_from_columns(columns: List[str]) -> List[str]:
latitude_col = None
longitude_col = None
lowercase_columns = [col.lower() for col in columns]
cols_to_case = {col.lower(): col for col in columns}

# First look for the priority pairings - return if found
for lat, lon in LOCATION_PRIORITIES:
if lat in lowercase_columns and lon in lowercase_columns:
return [cols_to_case[lat], cols_to_case[lon]]

# Now try for the wildcard patterns instead
for col in columns:
if any(_match(lat, col) for lat in LATITUDE_PATTERNS):
if latitude_col is not None:
# Already have latitude, so this is ambiguous
return []
latitude_col = col
elif any(_match(lon, col) for lon in LONGITUDE_PATTERNS):
if longitude_col is not None:
# Already have longitude, so this is ambiguous
return []
longitude_col = col

if latitude_col is None or longitude_col is None:
return []

return [latitude_col, longitude_col]
4 changes: 2 additions & 2 deletions datasette_cluster_map/static/datasette-cluster-map.js
Original file line number Diff line number Diff line change
Expand Up @@ -40,15 +40,15 @@ document.addEventListener("DOMContentLoaded", () => {
if (
col.toLowerCase() ==
(
window.DATASETTE_CLUSTER_MAP_LATITUDE_COLUMN || "latitude"
window.DATASETTE_CLUSTER_MAP_LATITUDE_COLUMN
).toLowerCase()
) {
latitudeColumn = col;
}
if (
col.toLowerCase() ==
(
window.DATASETTE_CLUSTER_MAP_LONGITUDE_COLUMN || "longitude"
window.DATASETTE_CLUSTER_MAP_LONGITUDE_COLUMN
).toLowerCase()
) {
longitudeColumn = col;
Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ def get_long_description():
license="Apache License, Version 2.0",
classifiers=[
"Framework :: Datasette",
"License :: OSI Approved :: Apache Software License"
"License :: OSI Approved :: Apache Software License",
],
version=VERSION,
packages=["datasette_cluster_map"],
Expand Down
42 changes: 41 additions & 1 deletion tests/test_cluster_map.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from datasette_cluster_map import extra_js_urls
from datasette_cluster_map import location_columns_from_columns
from datasette.app import Datasette
import pytest
import sqlite_utils
Expand Down Expand Up @@ -89,6 +89,15 @@ def db_path(tmp_path_factory):
'window.DATASETTE_CLUSTER_MAP_CONTAINER = "#map-goes-here";',
],
),
# This one should detect the columns even though they aren't configured
(
{},
"places_lat_lng",
[
'window.DATASETTE_CLUSTER_MAP_LATITUDE_COLUMN = "lat";',
'window.DATASETTE_CLUSTER_MAP_LONGITUDE_COLUMN = "lng";',
],
),
],
)
async def test_plugin_config(db_path, config, table, expected_fragments):
Expand Down Expand Up @@ -157,3 +166,34 @@ async def test_respects_base_url():
).strip()
in response.text
)


@pytest.mark.parametrize(
"input,expected",
(
([], []),
(["a", "b"], []),
(["a", "b", "latitude", "longitude"], ["latitude", "longitude"]),
(["a", "b", "lat", "lon", "c"], ["lat", "lon"]),
(["a", "b", "lat", "lng", "c"], ["lat", "lng"]),
(["a", "b", "lat", "long", "c"], ["lat", "long"]),
# Wildcard matches
(["a", "foo_latitude", "foo_longitude"], ["foo_latitude", "foo_longitude"]),
(["a", "foo_latitude", "bar_longitude"], ["foo_latitude", "bar_longitude"]),
(["a", "foo_lat", "foo_long"], ["foo_lat", "foo_long"]),
(["a", "foo_lat", "foo_lon"], ["foo_lat", "foo_lon"]),
(["a", "foo_lat", "foo_lng"], ["foo_lat", "foo_lng"]),
# latitude, longitude takes priority over country_long
# https://github.com/simonw/datasette-cluster-map/issues/39#issuecomment-1890310833
(["country_long", "latitude", "longitude"], ["latitude", "longitude"]),
# latitude, longitude takes priority:
(["a", "lat", "lon", "c", "latitude", "longitude"], ["latitude", "longitude"]),
# Ambiguous wildcards:
(["a", "foo_lat", "foo_lng", "foo_latitude", "foo_longitude"], []),
),
)
def test_location_columns_from_columns(input, expected):
actual = location_columns_from_columns(input)
actual_cap = location_columns_from_columns([col.upper() for col in input])
assert actual == expected
assert actual_cap == [col.upper() for col in expected]

0 comments on commit 4680ac8

Please sign in to comment.