diff --git a/.gitignore b/.gitignore index 0889d95..00b5c89 100644 --- a/.gitignore +++ b/.gitignore @@ -169,3 +169,5 @@ node_modules/ .ruff_cache/ .env-cdk + +.envrc diff --git a/README.md b/README.md index 712ecd6..bf596b9 100644 --- a/README.md +++ b/README.md @@ -64,9 +64,9 @@ Once the applications are *up*, you'll need to add STAC **Collections** and **It Then you can start exploring your dataset with: - - the STAC Metadata service [http://localhost:8081](http://localhost:8081) - - the Raster service [http://localhost:8082](http://localhost:8082) - - the browser UI [http://localhost:8085](http://localhost:8085) +- the STAC Metadata service [http://localhost:8081](http://localhost:8081) +- the Raster service [http://localhost:8082](http://localhost:8082) +- the browser UI [http://localhost:8085](http://localhost:8085) If you've added a vector dataset to the `public` schema in the Postgres database, they will be available through the **Vector** service at [http://localhost:8083](http://localhost:8083). @@ -113,3 +113,16 @@ Then, deploy ``` npx cdk deploy --all --require-approval never ``` + +## Development + +```shell +source .venv/bin/activate + +python -m pip install -e \ + 'runtimes/business/logic' \ + 'runtimes/eoapi/raster' \ + 'runtimes/eoapi/stac' \ + 'runtimes/eoapi/vector' + +``` diff --git a/docker-compose.yml b/docker-compose.yml index 2ec13b0..c886fbb 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -127,14 +127,15 @@ services: - HOST=0.0.0.0 - PORT=8083 - WEB_CONCURRENCY=10 - - POSTGRES_USER=username - - POSTGRES_PASS=password + - POSTGRES_USER=business + - POSTGRES_PASS=casual - POSTGRES_DBNAME=postgis - POSTGRES_HOST=database - POSTGRES_PORT=5432 - DB_MIN_CONN_SIZE=1 - DB_MAX_CONN_SIZE=10 - EOAPI_VECTOR_DEBUG=TRUE + - EOAPI_VECTOR_SCHEMAS=["business"] env_file: - path: .env required: false @@ -153,6 +154,44 @@ services: volumes: - ./dockerfiles/scripts:/tmp/scripts + business: + build: + context: . + dockerfile: dockerfiles/Dockerfile.business + ports: + - "${MY_DOCKER_IP:-127.0.0.1}:8084:8084" + environment: + - PYTHONUNBUFFERED=1 + # Application + - HOST=0.0.0.0 + - PORT=8084 + - WEB_CONCURRENCY=10 + - POSTGRES_USER=business + - POSTGRES_PASS=casual + - POSTGRES_DBNAME=postgis + - POSTGRES_HOST=database + - POSTGRES_PORT=5432 + - DB_MIN_CONN_SIZE=1 + - DB_MAX_CONN_SIZE=10 + - DEBUG=True + env_file: + - path: .env + required: false + - path: .business.env + required: false + command: bash -c "bash /tmp/scripts/wait-for-it.sh -t 120 -h database -p 5432 && /start.sh" + develop: + watch: + - action: sync+restart + path: ./runtimes/business/logic/business + target: /opt/bitnami/python/lib/python3.11/site-packages/business + - action: rebuild + path: ./runtimes/business/logic/pyproject.toml + depends_on: + - database + volumes: + - ./dockerfiles/scripts:/tmp/scripts + database: image: ghcr.io/stac-utils/pgstac:v0.8.5 environment: @@ -167,6 +206,7 @@ services: command: postgres -N 500 volumes: - ./.pgdata:/var/lib/postgresql/data + - ./scripts/init-business-user.sql:/docker-entrypoint-initdb.d/zzz-init-business-user.sql networks: default: diff --git a/dockerfiles/Dockerfile.business b/dockerfiles/Dockerfile.business new file mode 100644 index 0000000..77e65c8 --- /dev/null +++ b/dockerfiles/Dockerfile.business @@ -0,0 +1,12 @@ +ARG PYTHON_VERSION=3.11 + +FROM ghcr.io/vincentsarago/uvicorn-gunicorn:${PYTHON_VERSION} + +ENV CURL_CA_BUNDLE /etc/ssl/certs/ca-certificates.crt + +COPY runtimes/business/logic /tmp/logic +RUN pip install /tmp/logic +RUN rm -rf /tmp/logic + +ENV MODULE_NAME business.logic.main +ENV VARIABLE_NAME app diff --git a/infrastructure/__init__.py b/infrastructure/__init__.py index e69de29..3dc1f76 100644 --- a/infrastructure/__init__.py +++ b/infrastructure/__init__.py @@ -0,0 +1 @@ +__version__ = "0.1.0" diff --git a/requirements.txt b/requirements.txt index b314925..cb28fbe 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,5 +1,7 @@ -eoapi-cdk==7.2.0 +eoapi-cdk==7.2.1 pydantic==2.7 pydantic-settings[yaml]==2.2.1 +pystac_client==0.8.3 +pypgstac[psycopg]==0.8.5 boto3==1.24.15 typing-extensions diff --git a/runtimes/business/logic/README.md b/runtimes/business/logic/README.md new file mode 100644 index 0000000..bfa2450 --- /dev/null +++ b/runtimes/business/logic/README.md @@ -0,0 +1 @@ +# Business Logic diff --git a/runtimes/business/logic/business/logic/__init__.py b/runtimes/business/logic/business/logic/__init__.py new file mode 100644 index 0000000..93405e7 --- /dev/null +++ b/runtimes/business/logic/business/logic/__init__.py @@ -0,0 +1,3 @@ +"""business.app.""" + +__version__ = "0.1.0" diff --git a/runtimes/business/logic/business/logic/config.py b/runtimes/business/logic/business/logic/config.py new file mode 100644 index 0000000..b459bdf --- /dev/null +++ b/runtimes/business/logic/business/logic/config.py @@ -0,0 +1,61 @@ +"""Settings.""" + +from enum import Enum +from typing import Any + +from pydantic_core.core_schema import FieldValidationInfo +from pydantic import PostgresDsn, field_validator +from pydantic_settings import BaseSettings + + +class ModeEnum(str, Enum): + development = "development" + production = "production" + testing = "testing" + + +class Settings(BaseSettings): + """Settings""" + + mode: ModeEnum = ModeEnum.development + postgres_user: str + postgres_pass: str + postgres_dbname: str + postgres_host: str + postgres_port: int + async_database_uri: PostgresDsn | str = "" + + cors_origins: str = "*" + cors_methods: str = "GET,POST,OPTIONS" + cachecontrol: str = "public, max-age=3600" + debug: bool = False + root_path: str = "" + + model_config = { + "env_file": ".env", + "extra": "allow", + } + + @field_validator("async_database_uri", mode="after") + def assemble_db_connection(cls, v: str | None, info: FieldValidationInfo) -> Any: + if isinstance(v, str): + if v == "": + return PostgresDsn.build( + scheme="postgresql+asyncpg", + username=info.data["postgres_user"], + password=info.data["postgres_pass"], + host=info.data["postgres_host"], + port=info.data["postgres_port"], + path=info.data["postgres_dbname"], + ) + return v + + @field_validator("cors_origins") + def parse_cors_origin(cls, v): + """Parse CORS origins.""" + return [origin.strip() for origin in v.split(",")] + + @field_validator("cors_methods") + def parse_cors_methods(cls, v): + """Parse CORS methods.""" + return [method.strip() for method in v.split(",")] diff --git a/runtimes/business/logic/business/logic/main.py b/runtimes/business/logic/business/logic/main.py new file mode 100644 index 0000000..77d7808 --- /dev/null +++ b/runtimes/business/logic/business/logic/main.py @@ -0,0 +1,65 @@ +from contextlib import asynccontextmanager +from typing import Annotated, List, Union + +from geojson_pydantic import Feature, FeatureCollection +from fastapi import Depends, FastAPI, HTTPException +from sqlmodel import SQLModel +from sqlmodel.ext.asyncio.session import AsyncSession + +from business.logic import __version__ as version +from business.logic import models +from business.logic.session import get_session, engine + + +@asynccontextmanager +async def lifespan(app: FastAPI): + # startup + async with engine.begin() as conn: + await conn.run_sync(SQLModel.metadata.create_all) + + yield + + # shutdown + + +Session = Annotated[AsyncSession, Depends(get_session)] + +app = FastAPI( + title="Business Logic", + version=version, + lifespan=lifespan, +) + + +@app.post("/properties") +async def create_property( + session: Session, geojson: Union[Feature, FeatureCollection] +) -> List[int]: + if isinstance(geojson, Feature): + geojson = FeatureCollection( + type="FeatureCollection", + features=[geojson], + ) + + ids = [] + for feature in geojson.features: + property = models.Property(geometry=feature.geometry.wkt) + session.add(property) + await session.flush() + ids.append(property.id) + + await session.commit() + + return ids + + +@app.get("/properties/{id}", response_model=models.Property) +async def get_property(session: Session, id: int): + property = await session.get(models.Property, id) + + if not property: + raise HTTPException( + status_code=404, detail=f"No properties with id {id} found!" + ) + + return property diff --git a/runtimes/business/logic/business/logic/models.py b/runtimes/business/logic/business/logic/models.py new file mode 100644 index 0000000..d9015c7 --- /dev/null +++ b/runtimes/business/logic/business/logic/models.py @@ -0,0 +1,14 @@ +from typing import Optional +from geoalchemy2 import Geometry +from pydantic import BaseModel +from sqlmodel import Column, Field, SQLModel + + +class PropertyCreate(BaseModel): + geometry: str = Field( + default=None, sa_column=Column(Geometry("MULTIPOLYGON", srid=4326)) + ) + + +class Property(PropertyCreate, SQLModel, table=True): + id: Optional[int] = Field(default=None, primary_key=True) diff --git a/runtimes/business/logic/business/logic/session.py b/runtimes/business/logic/business/logic/session.py new file mode 100644 index 0000000..efb919a --- /dev/null +++ b/runtimes/business/logic/business/logic/session.py @@ -0,0 +1,22 @@ +from collections.abc import AsyncGenerator +from sqlalchemy.orm import sessionmaker +from business.logic.config import ModeEnum, Settings +from sqlalchemy.ext.asyncio import create_async_engine +from sqlmodel.ext.asyncio.session import AsyncSession +from sqlalchemy.pool import NullPool, AsyncAdaptedQueuePool + +settings = Settings() + +engine = create_async_engine( + str(settings.async_database_uri), + poolclass=NullPool + if settings.mode == ModeEnum.testing + else AsyncAdaptedQueuePool, # Asincio pytest works with NullPool + echo=settings.mode == ModeEnum.development, +) + + +async def get_session() -> AsyncGenerator: + async_session = sessionmaker(engine, class_=AsyncSession, expire_on_commit=False) + async with async_session() as session: + yield session diff --git a/runtimes/business/logic/pyproject.toml b/runtimes/business/logic/pyproject.toml new file mode 100644 index 0000000..cf718aa --- /dev/null +++ b/runtimes/business/logic/pyproject.toml @@ -0,0 +1,30 @@ +[project] +name = "business.logic" +description = "Business logic" +readme = "README.md" +requires-python = ">=3.8" +authors = [ + {name = "Henry Rodman", email = "henry@developmentseed.com"}, +] +license = {text = "MIT"} +dynamic = ["version"] +dependencies = [ + "asyncpg", + "fastapi>=0.112.1", + "geoalchemy2>=0.15.2", + "geojson_pydantic>=1.1.0", + "pydantic_settings>=2.4.0", + "sqlmodel>=0.0.21", +] + +[build-system] +requires = ["pdm-pep517"] +build-backend = "pdm.pep517.api" + +[tool.pdm.version] +source = "file" +path = "business/logic/__init__.py" + +[tool.pdm.build] +includes = ["business/logic"] +excludes = ["tests/", "**/.mypy_cache", "**/.DS_Store"] diff --git a/runtimes/eoapi/vector/eoapi/vector/app.py b/runtimes/eoapi/vector/eoapi/vector/app.py index 0a1183e..d10b1a7 100644 --- a/runtimes/eoapi/vector/eoapi/vector/app.py +++ b/runtimes/eoapi/vector/eoapi/vector/app.py @@ -58,8 +58,7 @@ async def lifespan(app: FastAPI): await connect_to_db( app, settings=postgres_settings, - # We enable both pgstac and public schemas (pgstac will be used by custom functions) - schemas=["pgstac", "public"], + schemas=settings.schemas, user_sql_files=list(CUSTOM_SQL_DIRECTORY.glob("*.sql")), # type: ignore ) @@ -67,7 +66,7 @@ async def lifespan(app: FastAPI): await register_collection_catalog( app, # For the Tables' Catalog we only use the `public` schema - schemas=["public"], + schemas=settings.schemas, # We exclude public functions exclude_function_schemas=["public"], # We allow non-spatial tables diff --git a/runtimes/eoapi/vector/eoapi/vector/config.py b/runtimes/eoapi/vector/eoapi/vector/config.py index 1b5d1fa..26b3369 100644 --- a/runtimes/eoapi/vector/eoapi/vector/config.py +++ b/runtimes/eoapi/vector/eoapi/vector/config.py @@ -1,6 +1,6 @@ """API settings.""" -from pydantic import field_validator +from typing import List from pydantic_settings import BaseSettings @@ -8,8 +8,9 @@ class ApiSettings(BaseSettings): """API settings""" name: str = "eoAPI-vector" - cors_origins: str = "*" - cors_methods: str = "GET" + schemas: List[str] = ["pgstac", "public"] + cors_origins: List[str] = ["*"] + cors_methods: List[str] = ["GET"] cachecontrol: str = "public, max-age=3600" debug: bool = False root_path: str = "" @@ -21,13 +22,3 @@ class ApiSettings(BaseSettings): "env_file": ".env", "extra": "allow", } - - @field_validator("cors_origins") - def parse_cors_origin(cls, v): - """Parse CORS origins.""" - return [origin.strip() for origin in v.split(",")] - - @field_validator("cors_methods") - def parse_cors_methods(cls, v): - """Parse CORS methods.""" - return [method.strip() for method in v.split(",")] diff --git a/runtimes/eoapi/vector/eoapi/vector/sql/functions.sql b/runtimes/eoapi/vector/eoapi/vector/sql/functions.sql deleted file mode 100644 index 4b9ac28..0000000 --- a/runtimes/eoapi/vector/eoapi/vector/sql/functions.sql +++ /dev/null @@ -1,184 +0,0 @@ -CREATE OR REPLACE FUNCTION pg_temp.jsonb2timestamptz(j jsonb) RETURNS timestamptz AS $$ - SELECT - (nullif(j->>0, 'null'))::timestamptz; -$$ LANGUAGE SQL IMMUTABLE STRICT; - --- Functions returning Collections available in PgSTAC -CREATE OR REPLACE VIEW pg_temp.pgstac_collections_view AS -SELECT - id, - pg_temp.jsonb2timestamptz(content->'extent'->'temporal'->'interval'->0->0) as start_datetime, - pg_temp.jsonb2timestamptz(content->'extent'->'temporal'->'interval'->0->1) AS end_datetime, - ST_MakeEnvelope( - (content->'extent'->'spatial'->'bbox'->0->>0)::float, - (content->'extent'->'spatial'->'bbox'->0->>1)::float, - (content->'extent'->'spatial'->'bbox'->0->>2)::float, - (content->'extent'->'spatial'->'bbox'->0->>3)::float, - 4326 - ) as geom, - content -FROM pgstac.collections; - --- Functions returning the Searches available in PgSTAC -CREATE OR REPLACE FUNCTION pg_temp.pgstac_hash( - IN queryhash text, - IN bounds geometry DEFAULT ST_MakeEnvelope(-180,-90,180,90,4326), - -- IN fields jsonb DEFAULT NULL, - -- IN _scanlimit int DEFAULT 10000, - -- IN _limit int DEFAULT 100, - -- IN _timelimit interval DEFAULT '5 seconds'::interval, - -- IN exitwhenfull boolean DEFAULT TRUE, - -- IN skipcovered boolean DEFAULT TRUE, - OUT id text, - OUT geom geometry, - OUT content jsonb -) RETURNS SETOF RECORD AS $$ -DECLARE - _scanlimit int := 10000; -- remove if add params back in - fields jsonb := '{}'::jsonb; -- remove if add params back in - search searches%ROWTYPE; - curs refcursor; - _where text; - query text; - iter_record items%ROWTYPE; - -- out_records jsonb := '{}'::jsonb[]; - -- exit_flag boolean := FALSE; - -- counter int := 1; - -- scancounter int := 1; - remaining_limit int := _scanlimit; - -- tilearea float; - -- unionedgeom geometry; - -- clippedgeom geometry; - -- unionedgeom_area float := 0; - -- prev_area float := 0; - -- excludes text[]; - -- includes text[]; - -BEGIN - - -- IF skipcovered THEN - -- exitwhenfull := TRUE; - -- END IF; - - SELECT * INTO search FROM searches WHERE hash=queryhash; - - IF NOT FOUND THEN - RAISE EXCEPTION 'Search with Query Hash % Not Found', queryhash; - END IF; - - IF st_srid(bounds) != 4326 THEN - bounds := ST_Transform(bounds, 4326); - END IF; - - -- tilearea := st_area(bounds); - _where := format( - '%s AND st_intersects(geometry, %L::geometry)', - search._where, - bounds - ); - - - FOR query IN SELECT * FROM partition_queries(_where, search.orderby) LOOP - query := format('%s LIMIT %L', query, remaining_limit); - OPEN curs FOR EXECUTE query; - LOOP - FETCH curs INTO iter_record; - EXIT WHEN NOT FOUND; - -- IF exitwhenfull OR skipcovered THEN - -- clippedgeom := st_intersection(geom, iter_record.geometry); - - -- IF unionedgeom IS NULL THEN - -- unionedgeom := clippedgeom; - -- ELSE - -- unionedgeom := st_union(unionedgeom, clippedgeom); - -- END IF; - - -- unionedgeom_area := st_area(unionedgeom); - - -- IF skipcovered AND prev_area = unionedgeom_area THEN - -- scancounter := scancounter + 1; - -- CONTINUE; - -- END IF; - - -- prev_area := unionedgeom_area; - - -- END IF; - - id := iter_record.id; - geom := iter_record.geometry; - content := content_hydrate(iter_record, fields); - RETURN NEXT; - - -- IF counter >= _limit - -- OR scancounter > _scanlimit - -- OR ftime() > _timelimit - -- OR (exitwhenfull AND unionedgeom_area >= tilearea) - -- THEN - -- exit_flag := TRUE; - -- EXIT; - -- END IF; - -- counter := counter + 1; - -- scancounter := scancounter + 1; - - END LOOP; - CLOSE curs; - -- EXIT WHEN exit_flag; - -- remaining_limit := _scanlimit - scancounter; - END LOOP; - - RETURN; -END; -$$ LANGUAGE PLPGSQL; - --- Functions returning the item count per Search for the input geometry -CREATE OR REPLACE FUNCTION pg_temp.pgstac_hash_count( - IN queryhash text, - IN bounds geometry DEFAULT ST_MakeEnvelope(-180,-90,180,90,4326), - IN depth int DEFAULT 1, - OUT geom geometry, - OUT cnt bigint -) RETURNS SETOF RECORD AS $$ -DECLARE - search record; - xmin float := ST_XMin(bounds); - xmax float := ST_XMax(bounds); - ymin float := ST_YMin(bounds); - ymax float := ST_YMax(bounds); - w float := (xmax - xmin) / depth; - h float := (ymax - ymin) / depth; - q text; -BEGIN - SELECT * INTO search FROM pgstac.searches WHERE hash=queryhash; - DROP VIEW IF EXISTS searchitems; - EXECUTE format($q$ - CREATE TEMP VIEW searchitems AS - SELECT geometry - FROM pgstac.items WHERE %s - AND ST_Intersects(geometry, %L) - ; - $q$, - search._where, - bounds - ); - RETURN QUERY - WITH grid AS ( - SELECT - ST_MakeEnvelope( - xmin + w * (a-1), - ymin + h * (b-1), - xmin + w * a, - ymin + h * b, - 4326 - ) as geom - FROM generate_series(1, depth) a, generate_series(1, depth) b - ) - SELECT - grid.geom, - count(*) as cnt - FROM - grid - JOIN searchitems ON (ST_Intersects(searchitems.geometry, grid.geom)) - GROUP BY 1 - ; -END; -$$ LANGUAGE PLPGSQL; diff --git a/scripts/init-business-user.sql b/scripts/init-business-user.sql new file mode 100644 index 0000000..69ba99e --- /dev/null +++ b/scripts/init-business-user.sql @@ -0,0 +1,4 @@ +CREATE USER business WITH PASSWORD 'casual'; +CREATE SCHEMA business AUTHORIZATION business; +GRANT ALL PRIVILEGES ON SCHEMA business TO business; +ALTER ROLE business SET search_path TO business, public; diff --git a/scripts/load-stac-records b/scripts/load-stac-records new file mode 100755 index 0000000..0f9cc5e --- /dev/null +++ b/scripts/load-stac-records @@ -0,0 +1,142 @@ +#!/usr/bin/env python + +import argparse +import io +import json + +import pystac_client +from pystac.media_type import MediaType +from pystac.extensions.item_assets import AssetDefinition, ItemAssetsExtension +from pypgstac.db import PgstacDB +from pypgstac.load import Loader, Methods + + +def bbox_type(value): + return [float(coord) for coord in value.split(",")] + + +RENDER_PARAMS = { + "io-10m-annual-lulc": { + "land_cover": { + "assets": ["supercell"], + "colormap": { + 0: (0, 0, 0, 1), + 1: (65, 155, 223, 1), + 2: (57, 125, 73, 1), + 4: (122, 135, 198, 1), + 5: (228, 150, 53, 1), + 7: (196, 40, 27, 1), + 8: (165, 155, 143, 1), + 9: (168, 235, 255, 1), + 10: (97, 97, 97, 1), + 11: (227, 226, 195, 1), + }, + "resampling": "nearest", + "minzoom": 4, + "maxzoom": 9, + }, + } +} + +ITEM_ASSETS = { + "io-10m-annual-lulc": { + "supercell": AssetDefinition( + { + "type": MediaType.COG, + "roles": ["data"], + "title": "Annual Land Use and Land Cover", + "description": ( + "Time series of annual global maps of land use and land cover (LULC). " + "It currently has data from 2017-2023. The maps are derived from ESA " + "Sentinel-2 imagery at 10m resolution. Each map is a composite of LULC " + "predictions for 9 classes throughout the year in order to generate a " + "representative snapshot of each year. This dataset was generated by " + "Impact Observatory, who used billions of human-labeled pixels (curated " + "by the National Geographic Society) to train a deep learning model for " + "land classification. The global map was produced by applying this model " + "to the Sentinel-2 annual scene collections on the Planetary Computer. " + "Each of the maps has an assessed average accuracy of over 75%. All years " + "are available under a Creative Commons BY-4.0." + ), + } + ) + } +} + + +def main(): + parser = argparse.ArgumentParser( + description=( + "Load existing STAC collection and items into a pgstac database." + "Set the PG* environment variables to configure pgstac to point at " + "your database!" + ) + ) + + parser.add_argument( + "stac_api_url", + type=str, + help="STAC API URL", + ) + + parser.add_argument( + "collection_id", + type=str, + help="collection ID", + ) + parser.add_argument( + "--bbox", + type=bbox_type, + help="Bounding box coordinates as comma-separated values (e.g., minx,miny,maxx,maxy)", + default=None, + required=False, + ) + + args = parser.parse_args() + + # fire up pgstac loader + db = PgstacDB() + loader = Loader(db=db) + + # connect to the Impact Observatory STAC API + client = pystac_client.Client.open(args.stac_api_url) + + collection = client.get_collection(args.collection_id) + collection.clear_links() + + # item assets extension + item_assets_ext = ItemAssetsExtension.ext(collection, add_if_missing=True) + + if item_assets := ITEM_ASSETS.get(args.collection_id): + item_assets_ext.item_assets = item_assets + + if render_params := RENDER_PARAMS.get(args.collection_id): + collection.extra_fields["renders"] = render_params + + if not collection: + raise ValueError( + f"No collection with id {args.collection_id} found in {args.stac_api_url}" + ) + + loader.load_collections( + io.BytesIO(json.dumps(collection.to_dict()).encode("utf-8")), + insert_mode=Methods.upsert, + ) + + print("processing items") + + search = client.search(collections=[args.collection_id], bbox=args.bbox) + + item_collection = [] + for item in search.item_collection(): + item.clear_links() + item_collection.append(item) + + loader.load_items( + (item.to_dict() for item in item_collection), + insert_mode=Methods.upsert, + ) + + +if __name__ == "__main__": + main() diff --git a/tests/test_property.geojson b/tests/test_property.geojson new file mode 100644 index 0000000..f2a137f --- /dev/null +++ b/tests/test_property.geojson @@ -0,0 +1,8 @@ +{ +"type": "FeatureCollection", +"name": "test_property", +"crs": { "type": "name", "properties": { "name": "urn:ogc:def:crs:OGC:1.3:CRS84" } }, +"features": [ +{ "type": "Feature", "properties": { }, "geometry": { "type": "Polygon", "coordinates": [ [ [ -91.742578636523405, 47.909134634588504 ], [ -91.741660072737218, 47.897237538767619 ], [ -91.715756573966644, 47.896843385284519 ], [ -91.716748622855732, 47.910612343762601 ], [ -91.734605502859296, 47.910267548729266 ], [ -91.742578636523405, 47.909134634588504 ] ] ] } } +] +} diff --git a/tests/test_property.qmd b/tests/test_property.qmd new file mode 100644 index 0000000..816d4ad --- /dev/null +++ b/tests/test_property.qmd @@ -0,0 +1,27 @@ + + + + + + + + + + + + + + + + + 0 + 0 + + + + + false + + + +