From c111e44be6e760b944fb2c0979bd816f5e1aee1a Mon Sep 17 00:00:00 2001 From: Emrys Roef Date: Tue, 10 Sep 2024 12:08:10 +0200 Subject: [PATCH 01/35] fix logging --- brdr/__init__.py | 8 -------- brdr/aligner.py | 4 ---- brdr/logger.py | 18 ++++++++++++------ 3 files changed, 12 insertions(+), 18 deletions(-) diff --git a/brdr/__init__.py b/brdr/__init__.py index 33b93db..d3ec452 100644 --- a/brdr/__init__.py +++ b/brdr/__init__.py @@ -1,9 +1 @@ -import logging - -logging.basicConfig( - level=logging.INFO, - format="%(asctime)s - %(message)s", - datefmt="%d-%b-%y %H:%M:%S", -) - __version__ = "0.2.0" diff --git a/brdr/aligner.py b/brdr/aligner.py index 6a8e090..78e2e5a 100644 --- a/brdr/aligner.py +++ b/brdr/aligner.py @@ -51,10 +51,6 @@ date_format = "%Y-%m-%d" -logging.basicConfig( - level=logging.INFO, format="%(asctime)s - %(message)s", datefmt="%d-%b-%y %H:%M:%S" -) - ################### diff --git a/brdr/logger.py b/brdr/logger.py index 4cadc47..682a436 100644 --- a/brdr/logger.py +++ b/brdr/logger.py @@ -1,9 +1,15 @@ import logging +import sys -logging.basicConfig( - level=logging.INFO, format="%(asctime)s - %(message)s", datefmt="%d-%b-%y %H:%M:%S" +LOGGER = logging.getLogger("BRDR") +handler = logging.StreamHandler(sys.stdout) +handler.setLevel(logging.INFO) +formatter = logging.Formatter( + fmt="%(asctime)s - %(message)s", + datefmt="%d-%b-%y %H:%M:%S", ) - +handler.setFormatter(formatter) +LOGGER.addHandler(handler) class Logger: def __init__(self, feedback=None): @@ -13,16 +19,16 @@ def feedback_debug(self, text): if self.feedback is not None: # self.feedback.pushInfo(text) return - logging.debug(text) + LOGGER.debug(text) def feedback_info(self, text): if self.feedback is not None: self.feedback.pushInfo(text) return - logging.info(text) + LOGGER.info(text) def feedback_warning(self, text): if self.feedback is not None: self.feedback.pushInfo(text) return - logging.warning(text) + LOGGER.warning(text) From c2e5d78d04debb9af973ef14531775de885b075f Mon Sep 17 00:00:00 2001 From: dieuska Date: Tue, 10 Sep 2024 12:33:04 +0200 Subject: [PATCH 02/35] version to 0.2.1 --- brdr/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/brdr/__init__.py b/brdr/__init__.py index d3ec452..3ced358 100644 --- a/brdr/__init__.py +++ b/brdr/__init__.py @@ -1 +1 @@ -__version__ = "0.2.0" +__version__ = "0.2.1" From 7ae9d12d3e560914f7506d6ef6f5fa27b1413400 Mon Sep 17 00:00:00 2001 From: dieuska Date: Tue, 10 Sep 2024 12:39:22 +0200 Subject: [PATCH 03/35] warning_to_debug --- brdr/aligner.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/brdr/aligner.py b/brdr/aligner.py index 78e2e5a..2230981 100644 --- a/brdr/aligner.py +++ b/brdr/aligner.py @@ -920,7 +920,7 @@ def _postprocess_preresult(self, preresult, geom_thematic) -> ProcessResult: 4 * pi * (geom_thematic.area / (geom_thematic.length**2)) > THRESHOLD_CIRCLE_RATIO ): - self.logger.feedback_warning( + self.logger.feedback_debug( "Circle: -->resulting geometry = original geometry" ) return {"result": geom_thematic} From 0cb1449d4218987f3ee96299a7de82be4404759c Mon Sep 17 00:00:00 2001 From: dieuska Date: Thu, 12 Sep 2024 09:21:50 +0200 Subject: [PATCH 04/35] #71 added safe_equals function to catch GEOSException errors --- brdr/aligner.py | 9 ++------- brdr/geometry_utils.py | 46 +++++++++++++++++++++++++++++++++++++++++- examples/example_ao.py | 1 + 3 files changed, 48 insertions(+), 8 deletions(-) diff --git a/brdr/aligner.py b/brdr/aligner.py index 2230981..2d116d4 100644 --- a/brdr/aligner.py +++ b/brdr/aligner.py @@ -10,7 +10,6 @@ from shapely import GeometryCollection from shapely import Polygon from shapely import STRtree -from shapely import equals from shapely import get_parts from shapely import make_valid from shapely import remove_repeated_points @@ -25,7 +24,7 @@ from brdr.constants import THRESHOLD_CIRCLE_RATIO from brdr.enums import GRBType from brdr.enums import OpenbaarDomeinStrategy -from brdr.geometry_utils import buffer_neg +from brdr.geometry_utils import buffer_neg, safe_equals from brdr.geometry_utils import buffer_neg_pos from brdr.geometry_utils import buffer_pos from brdr.geometry_utils import calculate_geom_by_intersection_and_reference @@ -521,11 +520,7 @@ def get_formula(self, geometry: BaseGeometry, with_geom=False): if version_date is not None and version_date > last_version_date: last_version_date = version_date - try: - equal = equals(geom_intersection, geom_reference) - except: - equal = False - if equal: + if safe_equals(geom_intersection, geom_reference): full = True area = round(geom_reference.area, 2) perc = 100 diff --git a/brdr/geometry_utils.py b/brdr/geometry_utils.py index 396f43c..6f688e7 100644 --- a/brdr/geometry_utils.py +++ b/brdr/geometry_utils.py @@ -1,7 +1,7 @@ import logging import numpy as np -from shapely import GEOSException +from shapely import GEOSException, equals from shapely import GeometryCollection from shapely import Polygon from shapely import STRtree @@ -191,6 +191,50 @@ def safe_union(geom_a: BaseGeometry, geom_b: BaseGeometry) -> BaseGeometry: return geom +def safe_equals(geom_a, geom_b): + """ + Checks equality between two geometries with error handling. + + This function computes the equality between two Shapely geometry objects + (`geom_a` and `geom_b`). It incorporates error handling to address potential + exceptions that might arise due to topological inconsistencies in the + geometries, similar to non-noded intersections between linestrings. + + Args: + geom_a (BaseGeometry): The first Shapely geometry object. + geom_b (BaseGeometry): The second Shapely geometry object + + Returns: + Boolean: The equality between 2 Shapely-geometries + + Logs: + - If a `GEOSException` occurs: + - A warning message is logged with the WKT representations of both + geometries. + - The function attempts to buffer both geometries by a small value + (0.0000001) and then perform the equality-operation. + - If any other exception occurs: + - An error message is logged indicating that False is returned. + """ + # function to solve exceptional error: shapely.errors.GEOSException: + # TopologyException: found non-noded intersection between LINESTRING + # see: https://gis.stackexchange.com/questions/50399 + try: + equal = equals(geom_a, geom_b) + except GEOSException: + logging.debug("equals_error") + try: + logging.warning( + "equals_error for geoms:" + geom_a.wkt + " and " + geom_b.wkt + ) + equal = equals(buffer(geom_a, 0.0000001), buffer(geom_b, 0.0000001)) + except Exception: # noqa + logging.error("equals_error: False returned") + equal = False + + return equal + + def safe_intersection(geom_a: BaseGeometry, geom_b: BaseGeometry) -> BaseGeometry: """ Calculates the intersection of two geometries with error handling. diff --git a/examples/example_ao.py b/examples/example_ao.py index d63c26c..557ebd9 100644 --- a/examples/example_ao.py +++ b/examples/example_ao.py @@ -16,6 +16,7 @@ # Load thematic data & reference data # dict_theme = get_oe_dict_by_ids([206363], oetype='erfgoedobjecten') aanduidingsobjecten = range(1, 10) + aanduidingsobjecten =[117798,116800,117881] dict_theme = get_oe_dict_by_ids( aanduidingsobjecten, oetype="aanduidingsobjecten" ) # noqa From e12b429cfcce0d8bc9c8564e01d9038ade012416 Mon Sep 17 00:00:00 2001 From: dieuska Date: Thu, 12 Sep 2024 11:02:05 +0200 Subject: [PATCH 05/35] #66 added brdr-version to formula --- brdr/aligner.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/brdr/aligner.py b/brdr/aligner.py index 2d116d4..e407cc1 100644 --- a/brdr/aligner.py +++ b/brdr/aligner.py @@ -17,6 +17,7 @@ from shapely import unary_union from shapely.geometry.base import BaseGeometry +from brdr import __version__ from brdr.constants import BUFFER_MULTIPLICATION_FACTOR from brdr.constants import CORR_DISTANCE from brdr.constants import DEFAULT_CRS @@ -485,6 +486,7 @@ def get_formula(self, geometry: BaseGeometry, with_geom=False): """ dict_formula = { "alignment_date": datetime.now().strftime(date_format), + "brdr_version": str(__version__), "reference_source": self.dict_reference_source, "full": True, "reference_features": {}, From e7aa0ad520a1892feeb0634b07bbf10b3c2e5521 Mon Sep 17 00:00:00 2001 From: dieuska Date: Thu, 12 Sep 2024 16:45:25 +0200 Subject: [PATCH 06/35] #65 Added OnroerendErfgoedLoader --- brdr/oe.py | 161 +++++++++++++++++++++++++++++++++++++++++ brdr/utils.py | 91 +---------------------- tests/test_examples.py | 2 +- tests/test_loader.py | 2 +- tests/test_oe.py | 49 +++++++++++++ tests/test_utils.py | 13 ++-- 6 files changed, 222 insertions(+), 96 deletions(-) create mode 100644 brdr/oe.py create mode 100644 tests/test_oe.py diff --git a/brdr/oe.py b/brdr/oe.py new file mode 100644 index 0000000..a118264 --- /dev/null +++ b/brdr/oe.py @@ -0,0 +1,161 @@ +import logging +from datetime import datetime +from enum import Enum + +import requests +from shapely import box +from shapely.geometry import shape + +from brdr.constants import DOWNLOAD_LIMIT, DEFAULT_CRS +from brdr.loader import GeoJsonLoader +from brdr.logger import LOGGER +from brdr.utils import get_collection_by_partition + +log = logging.getLogger(__name__) +date_format = "%Y-%m-%d" + + +class OEType(str, Enum): + """ + Different types of Onroerend Eefgoed-objects are available: + + * AO: aanduidingsobjecten + * EO: erfgoedobjecten + """ + + AO = "aanduidingsobjecten" + EO = "erfgoedobjecten" + + +def get_oe_dict_by_ids(objectids, oetype=OEType.AO): + """ + Fetches thematic data for a list of objectIDs from the Inventaris Onroerend Erfgoed + API. + + This function retrieves information about designated heritage objects + (erfgoedobjecten or aanduidingsobjecten) from the Flemish Agency for Heritage ( + Inventaris Onroerend Erfgoed) based on a list of their IDs. + + Args: + objectids (list): A list of objectIDs of 'erfgoedobjecten' or + 'aanduidingsobjecten'. + oetype (string): A string: 'aanduidingsobjecten' (default) or 'erfgoedobjecten' + + Returns: + dict: A dictionary where keys are objectIDs (as strings) and values are + GeoJSON geometry objects. If an erfgoedobject/aanduidingsobject is not + found, a corresponding warning message will be logged, but it won't be\ + included in the returned dictionary. + + Raises: + requests.exceptions.RequestException: If there is an error fetching data from + the API. + """ + logging.warning("deprecated method, use OnroerendErfgoedLoader instead") + dict_thematic = {} + if oetype==OEType.AO: + typename = "aanduidingsobjecten" + #id_property = "aanduid_id" + elif oetype==OEType.EO: + typename = "erfgoedobjecten" + #id_property = "erfgoed_id" + else: + logging.warning("Undefined OE-type: " + str(oetype) + ": Empty collection returned") + return {},None + + base_url = "https://inventaris.onroerenderfgoed.be/" + typename + "/" + headers = {"Accept": "application/json"} + for a in objectids: + url = base_url + str(a) + response = requests.get(url, headers=headers).json() + if "id" in response.keys(): + key = str(response["id"]) + geom = shape(response["locatie"]["contour"]) + dict_thematic[key] = geom + else: + logging.warning("object id " + str(a) + " not available in " + oetype) + return dict_thematic + + +def get_collection_oe_objects(oetype=OEType.AO,objectids=None,bbox=None,limit=DOWNLOAD_LIMIT, partition=1000 ,crs=DEFAULT_CRS): + """ + Fetches GeoJSON data for designated heritage objects (aanduidingsobjecten) within + a bounding box. + + This function retrieves information about aanduidingsobjecten from the Flemish + Mercator public WFS service using a bounding box (bbox) as a filter. The bbox should + be provided in the format "xmin,ymin,xmax,ymax" (EPSG:31370 projection). + + Args: + bbox (str): A comma-separated string representing the bounding box in EPSG:31370 + projection (e.g., "100000,500000,200000,600000"). + limit (int, optional): The maximum number of features to retrieve per request. + Defaults to 1000. + + Returns: + dict: A dictionary containing the retrieved GeoJSON feature collection. This + collection might be truncated if the total number of features exceeds + the specified limit. + """ + if oetype==OEType.AO: + typename = "ps:ps_aandobj" + id_property = "aanduid_id" + elif oetype==OEType.EO: + typename = "lu:lu_wet_erfgobj_pub" + id_property = "erfgoed_id" + else: + logging.warning("Undefined OE-type: " + str(oetype) + ": Empty collection returned") + return {},None + + + theme_url = ( + "https://www.mercator.vlaanderen.be/raadpleegdienstenmercatorpubliek/wfs?" + "SERVICE=WFS&REQUEST=GetFeature&VERSION=2.0.0&" + f"TYPENAMES={typename}&" + f"SRSNAME={crs}" + "&outputFormat=application/json" + + ) + if objectids is not None: + filter = f"&CQL_FILTER={id_property} IN (" + ', '.join(str(o) for o in objectids) + ")" + theme_url = theme_url + filter + bbox_polygon = None + if bbox is not None: + bbox_polygon = box(*tuple(o for o in bbox)) + + return get_collection_by_partition( + theme_url, geometry=bbox_polygon, partition=partition, limit=limit, crs=crs + ),id_property + + +class OnroerendErfgoedLoader(GeoJsonLoader): + def __init__(self, objectids=None, oetype=OEType.AO, bbox = None,limit=DOWNLOAD_LIMIT, partition=1000,crs=DEFAULT_CRS): + if (objectids is None and bbox is None) or (objectids is not None and bbox is not None): + raise ValueError("Please provide a ID-filter OR a BBOX-filter, not both") + super().__init__() + self.objectids = objectids + self.oetype = oetype + self.bbox = bbox + self.limit= limit + self.part = partition + self.crs=crs + self.data_dict_source["source"] = "Onroerend Erfgoed" + + def load_data(self): + + #geom_union = buffer_pos(self.aligner.get_thematic_union(), MAX_REFERENCE_BUFFER) + collection,id_property = get_collection_oe_objects( + oetype=self.oetype, + objectids=self.objectids, + bbox=self.bbox, + partition=self.part, + limit = self.limit, + crs = self.crs + ) + self.id_property = id_property + self.input = dict(collection) + self.data_dict_source["version_date"] = datetime.now().strftime(date_format) + LOGGER.debug(f"OnroerendErfgoed-objects downloaded") + return super().load_data() + + diff --git a/brdr/utils.py b/brdr/utils.py index 7b2dbec..3409297 100644 --- a/brdr/utils.py +++ b/brdr/utils.py @@ -197,87 +197,6 @@ def polygonize_reference_data(dict_ref): return dict_ref -def get_oe_dict_by_ids(objectids, oetype="aanduidingsobjecten"): - """ - Fetches thematic data for a list of objectIDs from the Inventaris Onroerend Erfgoed - API. - - This function retrieves information about designated heritage objects - (erfgoedobjecten or aanduidingsobjecten) from the Flemish Agency for Heritage ( - Inventaris Onroerend Erfgoed) based on a list of their IDs. - - Args: - objectids (list): A list of objectIDs of 'erfgoedobjecten' or - 'aanduidingsobjecten'. - oetype (string): A string: 'aanduidingsobjecten' (default) or 'erfgoedobjecten' - - Returns: - dict: A dictionary where keys are objectIDs (as strings) and values are - GeoJSON geometry objects. If an erfgoedobject/aanduidingsobject is not - found, a corresponding warning message will be logged, but it won't be\ - included in the returned dictionary. - - Raises: - requests.exceptions.RequestException: If there is an error fetching data from - the API. - """ - dict_thematic = {} - base_url = "https://inventaris.onroerenderfgoed.be/" + oetype + "/" - headers = {"Accept": "application/json"} - for a in objectids: - url = base_url + str(a) - response = requests.get(url, headers=headers).json() - if "id" in response.keys(): - key = str(response["id"]) - geom = shape(response["locatie"]["contour"]) - dict_thematic[key] = geom - else: - logging.warning("object id " + str(a) + " not available in " + oetype) - return dict_thematic - - -def get_oe_geojson_by_bbox(bbox, limit=1000): - """ - Fetches GeoJSON data for designated heritage objects (aanduidingsobjecten) within - a bounding box. - - This function retrieves information about aanduidingsobjecten from the Flemish - Mercator public WFS service using a bounding box (bbox) as a filter. The bbox should - be provided in the format "xmin,ymin,xmax,ymax" (EPSG:31370 projection). - - Args: - bbox (str): A comma-separated string representing the bounding box in EPSG:31370 - projection (e.g., "100000,500000,200000,600000"). - limit (int, optional): The maximum number of features to retrieve per request. - Defaults to 1000. - - Returns: - dict: A dictionary containing the retrieved GeoJSON feature collection. This - collection might be truncated if the total number of features exceeds - the specified limit. - """ - theme_url = ( - "https://www.mercator.vlaanderen.be/raadpleegdienstenmercatorpubliek/wfs?" - "SERVICE=WFS&REQUEST=GetFeature&VERSION=2.0.0&" - "TYPENAMES=ps:ps_aandobj&" - f"COUNT={str(limit)}&" - "SRSNAME=urn:ogc:def:crs:EPSG::31370&" - f"BBOX={bbox}&outputFormat=application/json" - ) - start_index = 0 - collection = {} - while True: - url = theme_url + "&startIndex=" + str(start_index) - feature_collection = requests.get(url).json() - if ( - "features" not in feature_collection - or len(feature_collection["features"]) == 0 - ): - break - start_index = start_index + limit - collection = collection | feature_collection - return collection - def get_breakpoints_zerostreak(x, y): """ @@ -561,18 +480,16 @@ def get_collection_by_partition( collection = {} if geometry is None: collection = get_collection( - _add_bbox_to_url(url=url, crs=crs, bbox=None), limit + add_bbox_to_url(url=url, crs=crs, bbox=None), limit ) elif partition < 1: collection = get_collection( - _add_bbox_to_url(url=url, crs=crs, bbox=get_bbox(geometry)), limit + add_bbox_to_url(url=url, crs=crs, bbox=get_bbox(geometry)), limit ) else: geoms = get_partitions(geometry, partition) for g in geoms: - coll = get_collection( - _add_bbox_to_url(url=url, crs=crs, bbox=get_bbox(g)), limit - ) + coll = get_collection(add_bbox_to_url(url=url, crs=crs, bbox=get_bbox(g)), limit) if collection == {}: collection = dict(coll) elif "features" in collection and "features" in coll: @@ -580,7 +497,7 @@ def get_collection_by_partition( return collection -def _add_bbox_to_url(url, crs=DEFAULT_CRS, bbox=None): +def add_bbox_to_url(url, crs=DEFAULT_CRS, bbox=None): # Load the Base reference data if bbox is not None: url = url + "&bbox-crs=" + crs + "&bbox=" + bbox diff --git a/tests/test_examples.py b/tests/test_examples.py index f16c908..c1f2470 100644 --- a/tests/test_examples.py +++ b/tests/test_examples.py @@ -7,9 +7,9 @@ from brdr.grb import GRBActualLoader from brdr.loader import DictLoader from brdr.loader import GeoJsonLoader +from brdr.oe import get_oe_dict_by_ids from brdr.utils import diffs_from_dict_series from brdr.utils import get_breakpoints_zerostreak -from brdr.utils import get_oe_dict_by_ids from brdr.utils import multipolygons_to_singles diff --git a/tests/test_loader.py b/tests/test_loader.py index 5a04467..5349466 100644 --- a/tests/test_loader.py +++ b/tests/test_loader.py @@ -2,7 +2,7 @@ from brdr.enums import GRBType from brdr.grb import GRBActualLoader from brdr.loader import DictLoader -from brdr.utils import get_oe_dict_by_ids +from brdr.oe import get_oe_dict_by_ids class TestExamples: diff --git a/tests/test_oe.py b/tests/test_oe.py new file mode 100644 index 0000000..eea585c --- /dev/null +++ b/tests/test_oe.py @@ -0,0 +1,49 @@ +import unittest +from datetime import date, timedelta + +import numpy as np +from shapely import Polygon, from_wkt + +from brdr.aligner import Aligner +from brdr.enums import GRBType +from brdr.grb import ( + get_last_version_date, + is_grb_changed, + get_geoms_affected_by_grb_change, + evaluate, + GRBActualLoader, + GRBFiscalParcelLoader, +) +from brdr.loader import DictLoader +from brdr.oe import OnroerendErfgoedLoader, OEType +from brdr.utils import ( + get_series_geojson_dict, +) + + +class TestOE(unittest.TestCase): + def test_onroerenderfgoedloader_by_aanduidid(self): + loader = OnroerendErfgoedLoader(objectids = [120288,10275],oetype=OEType.AO) + aligner=Aligner() + aligner.load_thematic_data(loader) + assert len (aligner.dict_thematic.keys())==2 + + def test_onroerenderfgoedloader_by_erfgoedid(self): + loader = OnroerendErfgoedLoader(objectids = [42549],oetype=OEType.EO) + aligner=Aligner() + aligner.load_thematic_data(loader) + assert len (aligner.dict_thematic.keys())==1 + + def test_onroerenderfgoedloader_by_bbox(self): + loader = OnroerendErfgoedLoader(bbox=[172000,172000,174000,174000], oetype=OEType.EO) + aligner = Aligner() + aligner.load_thematic_data(loader) + assert len(aligner.dict_thematic.keys()) >0 + + def test_onroerenderfgoedloader_by_bbox_and_objectid(self): + + with self.assertRaises(Exception) as context: + loader = OnroerendErfgoedLoader(objectids=[42549],bbox=[172000,172000,174000,174000], oetype=OEType.EO) + + with self.assertRaises(Exception) as context: + loader = OnroerendErfgoedLoader(objectids=None,bbox=None, oetype=OEType.EO) diff --git a/tests/test_utils.py b/tests/test_utils.py index f4e7c45..c068270 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -5,13 +5,12 @@ from shapely.geometry import Polygon from brdr.constants import MULTI_SINGLE_ID_SEPARATOR +from brdr.oe import get_oe_dict_by_ids, OEType from brdr.typings import ProcessResult from brdr.utils import diffs_from_dict_series from brdr.utils import filter_dict_by_key from brdr.utils import get_breakpoints_zerostreak from brdr.utils import get_collection -from brdr.utils import get_oe_dict_by_ids -from brdr.utils import get_oe_geojson_by_bbox from brdr.utils import merge_process_results from brdr.utils import multipolygons_to_singles from brdr.utils import polygonize_reference_data @@ -97,7 +96,7 @@ def test_get_oe_dict_by_ids(self): def test_get_oe_dict_by_ids_erfgoedobject(self): eo_id = 206363 - dict_thematic = get_oe_dict_by_ids([eo_id], oetype="erfgoedobjecten") + dict_thematic = get_oe_dict_by_ids([eo_id], oetype=OEType.EO) self.assertFalse(is_empty(dict_thematic[str(eo_id)])) def test_get_oe_dict_by_ids_empty(self): @@ -109,10 +108,10 @@ def test_get_oe_dict_by_ids_not_existing(self): dict_thematic = get_oe_dict_by_ids([aanduid_id]) self.assertEqual(dict_thematic, {}) - def test_get_oe_geojson_by_bbox(self): - bbox = "172000,172000,174000,174000" - collection = get_oe_geojson_by_bbox(bbox) - self.assertEqual(collection["type"], "FeatureCollection") + # def test_get_oe_geojson_by_bbox(self): + # bbox = "172000,172000,174000,174000" + # collection = get_oe_geojson_by_bbox(bbox) + # self.assertEqual(collection["type"], "FeatureCollection") def test_filter_dict_by_key_empty_dict(self): data = {} From b77ca2f44c0239322861a245634c232f365b8f7c Mon Sep 17 00:00:00 2001 From: dieuska Date: Mon, 16 Sep 2024 09:48:01 +0200 Subject: [PATCH 07/35] #63 Added a GRBSpecificDateLoader to grb --- brdr/grb.py | 99 ++++++++++++++++++++++++++- examples/example_grbspecificloader.py | 16 +++++ tests/test_grb.py | 21 +++++- 3 files changed, 134 insertions(+), 2 deletions(-) create mode 100644 examples/example_grbspecificloader.py diff --git a/brdr/grb.py b/brdr/grb.py index 4f6e35d..361f215 100644 --- a/brdr/grb.py +++ b/brdr/grb.py @@ -2,6 +2,7 @@ import logging from datetime import date from datetime import datetime +from copy import deepcopy from shapely import intersects from shapely.geometry import shape @@ -18,7 +19,7 @@ from brdr.constants import MAX_REFERENCE_BUFFER from brdr.enums import Evaluation from brdr.enums import GRBType -from brdr.geometry_utils import buffer_pos +from brdr.geometry_utils import buffer_pos, safe_intersection from brdr.geometry_utils import create_donut from brdr.geometry_utils import features_by_geometric_operation from brdr.geometry_utils import get_bbox @@ -264,6 +265,70 @@ def get_collection_grb_fiscal_parcels( return get_collection_by_partition( url, geometry=geometry, partition=partition, limit=limit, crs=crs ) +def get_collection_grb_parcels_by_date( + geometry, + date, + partition=1000, + limit=DOWNLOAD_LIMIT, + crs=DEFAULT_CRS, +): + collection_year_after = get_collection_grb_fiscal_parcels( + year=str(date.year), + geometry=geometry, + partition=partition, + crs=crs, + ) + #Filter on specific date: delete all features > specific_date + #TODO: experimental loader; unclear if we have to use "year-1 & year" OR if we have to use "year & year + 1" + collection_year_after_filtered = deepcopy(collection_year_after) + logging.debug(len (collection_year_after_filtered["features"])) + if "features" in collection_year_after_filtered and len (collection_year_after_filtered["features"])>0: + removed_features =[] + for feature in collection_year_after_filtered["features"]: + versiondate = datetime.strptime( + feature["properties"][GRB_VERSION_DATE][:10], date_format + ).date() + if versiondate > date: + removed_features.append(feature) + collection_year_after_filtered["features"].remove(feature) + logging.debug(len(collection_year_after_filtered["features"])) + #if no features are removed, return the full collection of year_after + if len(removed_features)==0: + return collection_year_after + # if features are removed, search for the features in year before + collection_year_before = get_collection_grb_fiscal_parcels( + year=str(date.year-1), + geometry=geometry, + partition=partition, + crs=crs, + ) + kept_features = [] + if "features" in collection_year_before and len(collection_year_before)>0: + for feature in collection_year_before["features"]: + for rf in removed_features: + geom_feature = shape(feature["geometry"]) + geom_removed_feature= shape(rf["geometry"]) + if intersects(geom_feature, geom_removed_feature): + intersection =safe_intersection(geom_feature, geom_removed_feature) + if intersection.area>1: + if feature not in kept_features: + kept_features.append(feature) + + + #search for intersection and check if it more than x% + #keep these features + + #add them to + + collection_specific_date = deepcopy(collection_year_after_filtered) + filtered_features = collection_year_after_filtered["features"] + specific_date_features = filtered_features + kept_features + logging.debug(len(specific_date_features)) + collection_specific_date["features"]=specific_date_features + + + + return collection_specific_date def evaluate( @@ -473,3 +538,35 @@ def load_data(self): self.input = dict(collection) self.aligner.logger.feedback_info(f"Adpf downloaded for year: {self.year}") return super().load_data() + +class GRBSpecificDateParcelLoader(GeoJsonLoader): + def __init__(self, date, aligner, partition=1000): + logging.warning("experimental loader; use with care!!!") + try: + date = datetime.strptime(date, date_format + ).date() + if date.year>=datetime.now().year: + raise ValueError("The GRBSpecificDateParcelLoader can only be used for dates prior to the current year.") + except Exception: + raise ValueError("No valid date, please provide a date in the format: " + date_format) + super().__init__(_input=None, id_property=GRB_PARCEL_ID) + self.aligner = aligner + self.date = date + self.part = partition + self.data_dict_source["source"] = "Adp" + self.data_dict_source["version_date"] = date.strftime(date_format) + + def load_data(self): + if not self.aligner.dict_thematic: + raise ValueError("Thematic data not loaded") + geom_union = buffer_pos(self.aligner.get_thematic_union(), MAX_REFERENCE_BUFFER) + collection= get_collection_grb_parcels_by_date( + date=self.date, + geometry=geom_union, + partition=self.part, + crs=self.aligner.CRS, + ) + self.input = dict(collection) + self.aligner.logger.feedback_info(f"Parcels downloaded for specific date: {self.date.strftime(date_format)}") + return super().load_data() + diff --git a/examples/example_grbspecificloader.py b/examples/example_grbspecificloader.py new file mode 100644 index 0000000..8e7197a --- /dev/null +++ b/examples/example_grbspecificloader.py @@ -0,0 +1,16 @@ +from shapely import from_wkt + +from brdr.aligner import Aligner +from brdr.grb import GRBSpecificDateParcelLoader +from brdr.loader import DictLoader + +aligner = Aligner() +thematic_dict = { + "theme_id_1": from_wkt( + "Polygon ((172283.76869662097305991 174272.85233648214489222, 172276.89871930953813717 174278.68436246179044247, 172274.71383684969623573 174280.57171753142029047, 172274.63047763772192411 174280.64478165470063686, 172272.45265833073062822 174282.52660570573061705, 172269.33533191855531186 174285.22093996312469244, 172265.55258252174826339 174288.49089696351438761, 172258.77032718938426115 174294.22654021997004747, 172258.63259260458289646 174294.342757155187428, 172254.93673790179309435 174288.79932878911495209, 172248.71360730109154247 174279.61860501393675804, 172248.96566232520854101 174279.43056782521307468, 172255.25363882273086347 174274.73737183399498463, 172257.08298882702365518 174273.37133203260600567, 172259.32325354730710387 174271.69890458136796951, 172261.65807284769834951 174269.9690355472266674, 172266.35596220899606124 174266.4871726930141449, 172273.34350050613284111 174261.30863015633076429, 172289.60360219911672175 174249.35944479051977396, 172293.30328181147342548 174246.59864199347794056, 172297.34760522318538278 174253.10583685990422964, 172289.53060952731175348 174259.6846851697191596, 172292.86485871637705714 174265.19099397677928209, 172283.76869662097305991 174272.85233648214489222))" + ) +} +loader = DictLoader(thematic_dict) +aligner.load_thematic_data(loader) +loader = GRBSpecificDateParcelLoader(date="2023-07-03", aligner=aligner) +aligner.load_reference_data(loader) diff --git a/tests/test_grb.py b/tests/test_grb.py index 9c587fd..7cb4aa1 100644 --- a/tests/test_grb.py +++ b/tests/test_grb.py @@ -12,7 +12,7 @@ get_geoms_affected_by_grb_change, evaluate, GRBActualLoader, - GRBFiscalParcelLoader, + GRBFiscalParcelLoader, GRBSpecificDateParcelLoader, ) from brdr.loader import DictLoader from brdr.utils import ( @@ -235,3 +235,22 @@ def test_evaluate(self): ) print(fc["result"]) + + + def test_grbspecificdateparcelloader(self): + aligner = Aligner() + thematic_dict = { + "theme_id_1": from_wkt( + "Polygon ((172283.76869662097305991 174272.85233648214489222, 172276.89871930953813717 174278.68436246179044247, 172274.71383684969623573 174280.57171753142029047, 172274.63047763772192411 174280.64478165470063686, 172272.45265833073062822 174282.52660570573061705, 172269.33533191855531186 174285.22093996312469244, 172265.55258252174826339 174288.49089696351438761, 172258.77032718938426115 174294.22654021997004747, 172258.63259260458289646 174294.342757155187428, 172254.93673790179309435 174288.79932878911495209, 172248.71360730109154247 174279.61860501393675804, 172248.96566232520854101 174279.43056782521307468, 172255.25363882273086347 174274.73737183399498463, 172257.08298882702365518 174273.37133203260600567, 172259.32325354730710387 174271.69890458136796951, 172261.65807284769834951 174269.9690355472266674, 172266.35596220899606124 174266.4871726930141449, 172273.34350050613284111 174261.30863015633076429, 172289.60360219911672175 174249.35944479051977396, 172293.30328181147342548 174246.59864199347794056, 172297.34760522318538278 174253.10583685990422964, 172289.53060952731175348 174259.6846851697191596, 172292.86485871637705714 174265.19099397677928209, 172283.76869662097305991 174272.85233648214489222))" + ) + } + aligner = Aligner() + loader = DictLoader(thematic_dict) + aligner.load_thematic_data(loader) + loader = GRBSpecificDateParcelLoader(date="2023-01-03", aligner=aligner) + aligner.load_reference_data(loader) + assert len (aligner.dict_reference.keys())==53 + + loader = GRBSpecificDateParcelLoader(date="2023-08-03", aligner=aligner) + aligner.load_reference_data(loader) + assert len (aligner.dict_reference.keys())==52 From 4ce564156b1afa30f9d2a04bde7d29fed761dbd2 Mon Sep 17 00:00:00 2001 From: dieuska Date: Mon, 16 Sep 2024 16:51:44 +0200 Subject: [PATCH 08/35] #64 added update_to_actual_grb-function --- brdr/aligner.py | 13 +--- brdr/constants.py | 2 +- brdr/grb.py | 91 ++++++++++++++++++++++-- examples/example_update_to_actual_grb.py | 29 ++++++++ tests/test_aligner.py | 21 +++--- tests/test_examples.py | 12 ++-- tests/test_grb.py | 22 +++++- 7 files changed, 156 insertions(+), 34 deletions(-) create mode 100644 examples/example_update_to_actual_grb.py diff --git a/brdr/aligner.py b/brdr/aligner.py index e407cc1..e784720 100644 --- a/brdr/aligner.py +++ b/brdr/aligner.py @@ -18,12 +18,11 @@ from shapely.geometry.base import BaseGeometry from brdr import __version__ -from brdr.constants import BUFFER_MULTIPLICATION_FACTOR +from brdr.constants import BUFFER_MULTIPLICATION_FACTOR, LAST_VERSION_DATE from brdr.constants import CORR_DISTANCE from brdr.constants import DEFAULT_CRS from brdr.constants import GRB_VERSION_DATE from brdr.constants import THRESHOLD_CIRCLE_RATIO -from brdr.enums import GRBType from brdr.enums import OpenbaarDomeinStrategy from brdr.geometry_utils import buffer_neg, safe_equals from brdr.geometry_utils import buffer_neg_pos @@ -34,7 +33,6 @@ from brdr.geometry_utils import safe_intersection from brdr.geometry_utils import safe_symmetric_difference from brdr.geometry_utils import safe_union -from brdr.grb import GRBActualLoader from brdr.loader import DictLoader from brdr.loader import GeoJsonFileLoader from brdr.loader import GeoJsonLoader @@ -491,7 +489,7 @@ def get_formula(self, geometry: BaseGeometry, with_geom=False): "full": True, "reference_features": {}, "reference_od": None, - "last_version_date": None, + LAST_VERSION_DATE: None, } full_total = True @@ -553,7 +551,7 @@ def get_formula(self, geometry: BaseGeometry, with_geom=False): } dict_formula["full"] = full_total if last_version_date is not None: - dict_formula["last_version_date"] = last_version_date.strftime(date_format) + dict_formula[LAST_VERSION_DATE] = last_version_date.strftime(date_format) geom_od = buffer_pos( buffer_neg( safe_difference(geometry, make_valid(unary_union(intersected))), @@ -1102,8 +1100,3 @@ def load_reference_data_url(self, url, name_reference_id): logging.warning("deprecated method, use load_reference_data instead") loader = GeoJsonUrlLoader(url, name_reference_id) self.load_reference_data(loader) - - def load_reference_data_grb_actual(self, *, grb_type=GRBType.ADP, partition=1000): - logging.warning("deprecated method, use load_reference_data instead") - loader = GRBActualLoader(grb_type=grb_type, partition=partition, aligner=self) - self.load_reference_data(loader) diff --git a/brdr/constants.py b/brdr/constants.py index 57aa20e..1bbf87a 100644 --- a/brdr/constants.py +++ b/brdr/constants.py @@ -38,7 +38,7 @@ # MULTI_SINGLE_ID_SEPARATOR #separator to split multipolygon_ids to single polygons MULTI_SINGLE_ID_SEPARATOR = "*$*" - +LAST_VERSION_DATE = "last_version_date" # GRB_CONSTANTS # URL of the OGC feature API of actual GRB to extract collections GRB_FEATURE_URL = "https://geo.api.vlaanderen.be/GRB/ogc/features/collections" diff --git a/brdr/grb.py b/brdr/grb.py index 361f215..ddd1a94 100644 --- a/brdr/grb.py +++ b/brdr/grb.py @@ -1,14 +1,16 @@ import json import logging +from copy import deepcopy from datetime import date from datetime import datetime -from copy import deepcopy -from shapely import intersects +import numpy as np +from shapely import intersects, Polygon from shapely.geometry import shape from shapely.geometry.base import BaseGeometry -from brdr.constants import DEFAULT_CRS +from brdr.aligner import Aligner +from brdr.constants import DEFAULT_CRS, LAST_VERSION_DATE from brdr.constants import DOWNLOAD_LIMIT from brdr.constants import GRB_BUILDING_ID from brdr.constants import GRB_FEATURE_URL @@ -23,8 +25,9 @@ from brdr.geometry_utils import create_donut from brdr.geometry_utils import features_by_geometric_operation from brdr.geometry_utils import get_bbox -from brdr.loader import GeoJsonLoader -from brdr.utils import dict_series_by_keys +from brdr.loader import GeoJsonLoader, DictLoader +from brdr.logger import Logger +from brdr.utils import dict_series_by_keys, get_series_geojson_dict from brdr.utils import geojson_to_dicts from brdr.utils import get_collection from brdr.utils import get_collection_by_partition @@ -488,6 +491,84 @@ def check_equality( return True, Evaluation.EQUALITY_GEOM_3 return False, Evaluation.NO_PREDICTION_5 +def update_to_actual_grb(featurecollection, id_theme_fieldname, formula_field="formula", max_distance_for_actualisation=2, feedback=None ): + """ + Function to update a thematic featurecollection to the most actual version of GRB. + Important to notice that the featurecollection needs a 'formula' for the base-alignment. + """ + logger = Logger(feedback) + # Load featurecollection into a shapely_dict: + dict_thematic = {} + dict_thematic_formula = {} + + last_version_date = datetime.now().date() + for feature in featurecollection["features"]: + + id_theme = feature["properties"][id_theme_fieldname] + try: + geom = shape(feature["geometry"]) + except Exception: + geom = Polygon() + logger.feedback_debug("id theme: " + id_theme) + logger.feedback_debug ("geometry (wkt): " + geom.wkt) + dict_thematic[id_theme] = geom + try: + dict_thematic_formula[id_theme] = json.loads(feature["properties"][formula_field]) + logger.feedback_debug ("formula: " +str(dict_thematic_formula[id_theme])) + except Exception: + raise Exception ("Formula -attribute-field (json) cannot be loaded") + try: + logger.feedback_debug(str(dict_thematic_formula[id_theme])) + if LAST_VERSION_DATE in dict_thematic_formula[id_theme] and dict_thematic_formula[id_theme][LAST_VERSION_DATE] is not None and dict_thematic_formula[id_theme][LAST_VERSION_DATE] != "": + str_lvd = dict_thematic_formula[id_theme][LAST_VERSION_DATE] + lvd = datetime.strptime(str_lvd, date_format).date() + if lvd < last_version_date: + last_version_date = lvd + except: + raise Exception(f"Problem with {LAST_VERSION_DATE}") + + # if feedback.isCanceled(): + # return {} + datetime_start = last_version_date + datetime_end = datetime.now().date() + #thematic_dict_result = dict(dict_thematic) + base_aligner_result = Aligner(feedback=feedback) + base_aligner_result.load_thematic_data(DictLoader(dict_thematic)) + base_aligner_result.name_thematic_id = id_theme_fieldname + + dict_affected, dict_unchanged = get_geoms_affected_by_grb_change( + base_aligner_result, + grb_type=GRBType.ADP, + date_start=datetime_start, + date_end=datetime_end, + one_by_one=False, + ) + logger.feedback_info("Number of possible affected OE-thematic during timespan: " + str(len(dict_affected))) + if len(dict_affected) == 0: + logger.feedback_info("No change detected in referencelayer during timespan. Script is finished") + return {} + logger.feedback_debug(str(datetime_start)) + logger.feedback_debug(str(formula_field)) + + # Initiate a Aligner to reference thematic features to the actual borders + actual_aligner = Aligner(feedback=feedback) + actual_aligner.load_thematic_data(DictLoader(dict_affected)) + actual_aligner.load_reference_data( + GRBActualLoader(grb_type=GRBType.ADP, partition=1000, aligner=actual_aligner)) + + series = np.arange(0, max_distance_for_actualisation * 100, 10, dtype=int) / 100 + dict_series, dict_predicted, diffs_dict = actual_aligner.predictor(series) + dict_evaluated, prop_dictionary = evaluate(actual_aligner, dict_series, dict_predicted, dict_thematic_formula, + threshold_area=5, threshold_percentage=1, + dict_unchanged=dict_unchanged) + + return get_series_geojson_dict( + dict_evaluated, + crs=actual_aligner.CRS, + id_field=actual_aligner.name_thematic_id, + series_prop_dict=prop_dictionary, + ) + class GRBActualLoader(GeoJsonLoader): def __init__(self, grb_type: GRBType, aligner, partition: int = 1000): diff --git a/examples/example_update_to_actual_grb.py b/examples/example_update_to_actual_grb.py new file mode 100644 index 0000000..7606c5b --- /dev/null +++ b/examples/example_update_to_actual_grb.py @@ -0,0 +1,29 @@ +from brdr.aligner import Aligner +from brdr.grb import GRBFiscalParcelLoader +from brdr.grb import update_to_actual_grb +from brdr.loader import GeoJsonFileLoader + +#Create a featurecollection (aligned on 2022), to use for the 'update_to_actual_grb' +base_year = "2022" +base_aligner = Aligner() +name_thematic_id = "theme_identifier" +loader = GeoJsonFileLoader("themelayer.geojson", name_thematic_id) +base_aligner.load_thematic_data(loader) +base_aligner.load_reference_data( + GRBFiscalParcelLoader(year=base_year, aligner=base_aligner) +) +base_process_result = base_aligner.process_dict_thematic(relevant_distance=2) +fcs = base_aligner.get_results_as_geojson(formula=True) +featurecollection_base_result= fcs["result"] +print (featurecollection_base_result) +#Update Featurecollection to actual version +featurecollection = update_to_actual_grb(featurecollection_base_result,base_aligner.name_thematic_id) +#Print results +for feature in featurecollection["result"]["features"]: + print( + feature["properties"][name_thematic_id] + + ": " + + feature["properties"]["evaluation"] + ) +geojson = featurecollection["result"] +print(geojson) diff --git a/tests/test_aligner.py b/tests/test_aligner.py index bb44228..3f93a1a 100644 --- a/tests/test_aligner.py +++ b/tests/test_aligner.py @@ -12,6 +12,7 @@ from brdr.enums import OpenbaarDomeinStrategy from brdr.geometry_utils import buffer_neg_pos from brdr.geometry_utils import grid_bounds +from brdr.grb import GRBActualLoader from brdr.loader import GeoJsonLoader from brdr.typings import FeatureCollection from brdr.typings import ProcessResult @@ -136,8 +137,8 @@ def test_load_reference_data_grb_actual_adp(self): } self.sample_aligner.load_thematic_data_dict(thematic_dict) # LOAD REFERENCE DICTIONARY - self.sample_aligner.load_reference_data_grb_actual( - grb_type=GRBType.ADP, partition=1000 + self.sample_aligner.load_reference_data(GRBActualLoader(aligner=self.sample_aligner, + grb_type=GRBType.ADP, partition=1000) ) self.assertGreater(len(self.sample_aligner.dict_reference), 0) @@ -154,8 +155,8 @@ def test_load_reference_data_grb_actual_gbg(self): } self.sample_aligner.load_thematic_data_dict(thematic_dict) # LOAD REFERENCE DICTIONARY - self.sample_aligner.load_reference_data_grb_actual( - grb_type=GRBType.GBG, partition=1000 + self.sample_aligner.load_reference_data(GRBActualLoader(aligner=self.sample_aligner, + grb_type=GRBType.GBG, partition=1000) ) self.assertGreater(len(self.sample_aligner.dict_reference), 0) @@ -172,8 +173,8 @@ def test_load_reference_data_grb_actual_knw(self): } self.sample_aligner.load_thematic_data_dict(thematic_dict) # LOAD REFERENCE DICTIONARY - self.sample_aligner.load_reference_data_grb_actual( - grb_type=GRBType.KNW, partition=1000 + self.sample_aligner.load_reference_data(GRBActualLoader(aligner=self.sample_aligner, + grb_type=GRBType.KNW, partition=1000) ) self.sample_aligner.process_dict_thematic() self.assertGreaterEqual(len(self.sample_aligner.dict_reference), 0) @@ -215,8 +216,8 @@ def test_process_interior_ring(self): } self.sample_aligner.load_thematic_data_dict(thematic_dict) # LOAD REFERENCE DICTIONARY - self.sample_aligner.load_reference_data_grb_actual( - grb_type=GRBType.ADP, partition=1000 + self.sample_aligner.load_reference_data(GRBActualLoader(aligner=self.sample_aligner, + grb_type=GRBType.GBG, partition=1000) ) result_dict = self.sample_aligner.process_dict_thematic() self.assertEqual(len(result_dict), len(thematic_dict)) @@ -226,8 +227,8 @@ def test_process_circle(self): thematic_dict = {"key": geometry} self.sample_aligner.load_thematic_data_dict(thematic_dict) # LOAD REFERENCE DICTIONARY - self.sample_aligner.load_reference_data_grb_actual( - grb_type=GRBType.ADP, partition=1000 + self.sample_aligner.load_reference_data(GRBActualLoader(aligner=self.sample_aligner, + grb_type=GRBType.GBG, partition=1000) ) results_dict = self.sample_aligner.process_dict_thematic() self.assertEqual(geometry, results_dict["key"]["result"]) diff --git a/tests/test_examples.py b/tests/test_examples.py index c1f2470..5661de4 100644 --- a/tests/test_examples.py +++ b/tests/test_examples.py @@ -54,8 +54,9 @@ def test_example_multi_to_single(self): dict_theme = get_oe_dict_by_ids([110082]) dict_theme = multipolygons_to_singles(dict_theme) aligner.load_thematic_data_dict(dict_theme) - aligner.load_reference_data_grb_actual(grb_type=GRBType.GBG, partition=1000) - + aligner.load_reference_data(GRBActualLoader(aligner=aligner, + grb_type=GRBType.GBG, partition=1000) + ) rel_dist = 5 result_dict = aligner.process_dict_thematic(rel_dist, 4) for process_results in result_dict.values(): @@ -227,10 +228,9 @@ def test_example_predictor(self): # Load thematic data & reference data dict_theme = get_oe_dict_by_ids([131635]) aligner.load_thematic_data_dict(dict_theme) - aligner.load_reference_data_grb_actual( - grb_type=GRBType.ADP, partition=1000 - ) # gebruik de actuele adp-percelen adp= administratieve percelen - + aligner.load_reference_data(GRBActualLoader(aligner=aligner, + grb_type=GRBType.GBG, partition=1000) + ) series = np.arange(0, 300, 10, dtype=int) / 100 # predict which relevant distances are interesting to propose as resulting # geometry diff --git a/tests/test_grb.py b/tests/test_grb.py index 7cb4aa1..e62ca3e 100644 --- a/tests/test_grb.py +++ b/tests/test_grb.py @@ -5,14 +5,14 @@ from shapely import Polygon, from_wkt from brdr.aligner import Aligner -from brdr.enums import GRBType +from brdr.enums import GRBType, Evaluation from brdr.grb import ( get_last_version_date, is_grb_changed, get_geoms_affected_by_grb_change, evaluate, GRBActualLoader, - GRBFiscalParcelLoader, GRBSpecificDateParcelLoader, + GRBFiscalParcelLoader, GRBSpecificDateParcelLoader, update_to_actual_grb, ) from brdr.loader import DictLoader from brdr.utils import ( @@ -254,3 +254,21 @@ def test_grbspecificdateparcelloader(self): loader = GRBSpecificDateParcelLoader(date="2023-08-03", aligner=aligner) aligner.load_reference_data(loader) assert len (aligner.dict_reference.keys())==52 + + def test_grbspecificdateparcelloader(self): + #Create a featurecollection (aligned on 2022), to use for the 'update_to_actual_grb' + name_thematic_id = "theme_identifier" + featurecollection_base_result = {"crs": {"properties": {"name": "EPSG:31370"}, "type": "name"}, "features": [{"geometry": {"coordinates": [[[174165.099014, 179510.530095], [174165.8317, 179512.9879], [174171.989, 179533.6401], [174176.4529, 179548.8062], [174179.309, 179558.51], [174179.380292, 179558.485703], [174181.1589, 179557.8801], [174187.9589, 179555.5901], [174190.259, 179554.81], [174197.229, 179552.4601], [174199.5291, 179551.6901], [174203.588398, 179550.315901], [174204.019, 179550.1701], [174196.945502, 179518.200008], [174196.899, 179517.9901], [174193.6237, 179503.5462], [174193.0752, 179501.1272], [174193.069, 179501.1002], [174192.963218, 179501.135794], [174183.549015, 179504.310095], [174174.279, 179507.4301], [174167.8091, 179509.6149], [174165.099014, 179510.530095]]], "type": "Polygon"}, "properties": {"area": 1390.3280890476424, "formula": "{\"alignment_date\": \"2024-09-16\", \"brdr_version\": \"0.2.1\", \"reference_source\": {\"source\": \"Adpf\", \"version_date\": \"2022-01-01\"}, \"full\": true, \"reference_features\": {\"24126B0049/00X000\": {\"full\": true, \"area\": 502.91, \"percentage\": 100, \"geometry\": null}, \"24126B0049/00Z000\": {\"full\": true, \"area\": 398.32, \"percentage\": 100, \"geometry\": null}, \"24126B0049/00Y000\": {\"full\": true, \"area\": 489.09, \"percentage\": 100, \"geometry\": null}}, \"reference_od\": null, \"last_version_date\": \"2022-07-29\"}", "perimeter": 155.9132823823815, "relevant_distance": 2, "shape_index": 0.11214135973414749, "theme_identifier": "100"}, "type": "Feature"}, {"geometry": {"coordinates": [[[174149.124298, 179571.446101], [174149.4742, 179571.3366], [174140.7496, 179544.3599], [174140.0649, 179544.0909], [174131.8684, 179521.8687], [174127.3538, 179523.3958], [174125.1598, 179524.1334], [174118.177, 179526.5181], [174117.5579, 179526.7295], [174121.3028, 179537.5797], [174134.5641, 179576.001], [174141.4845, 179573.8361], [174149.124298, 179571.446101]]], "type": "Polygon"}, "properties": {"area": 818.9938386019529, "formula": "{\"alignment_date\": \"2024-09-16\", \"brdr_version\": \"0.2.1\", \"reference_source\": {\"source\": \"Adpf\", \"version_date\": \"2022-01-01\"}, \"full\": true, \"reference_features\": {\"24126B0051/00W000\": {\"full\": true, \"area\": 419.99, \"percentage\": 100, \"geometry\": null}, \"24126B0051/00M002\": {\"full\": true, \"area\": 399.01, \"percentage\": 100, \"geometry\": null}}, \"reference_od\": null, \"last_version_date\": \"2022-07-29\"}", "perimeter": 135.6337116105736, "relevant_distance": 2, "shape_index": 0.16561017338311657, "theme_identifier": "200"}, "type": "Feature"}, {"geometry": {"coordinates": [[[174111.549006, 179153.956005], [174111.5042, 179153.9243], [174110.0614, 179154.1094], [174068.867, 179159.3947], [174068.8661, 179159.4262], [174068.8626, 179159.5573], [174073.7483, 179188.9357], [174120.4387, 179180.3235], [174116.1333, 179157.2025], [174111.549006, 179153.956005]]], "type": "Polygon"}, "properties": {"area": 1344.8114559611831, "formula": "{\"alignment_date\": \"2024-09-16\", \"brdr_version\": \"0.2.1\", \"reference_source\": {\"source\": \"Adpf\", \"version_date\": \"2022-01-01\"}, \"full\": true, \"reference_features\": {\"24126B0031/00N005\": {\"full\": true, \"area\": 1344.81, \"percentage\": 100, \"geometry\": null}}, \"reference_od\": null, \"last_version_date\": \"2019-07-25\"}", "perimeter": 149.60002606562426, "relevant_distance": 2, "shape_index": 0.11124237929598835, "theme_identifier": "300"}, "type": "Feature"}, {"geometry": {"coordinates": [[[174034.4177, 178984.8249], [174030.7603, 178982.3136], [174030.6565, 178982.4711], [174025.7399, 178989.9312], [174018.094404, 178999.593195], [174017.939403, 178999.788996], [174016.3725, 179001.7693], [174018.7192, 179003.659], [174021.115, 179005.7825], [174019.7443, 179007.5141], [174019.7371, 179007.5233], [174015.7101, 179025.628], [174040.6882, 179032.2831], [174037.3194, 178987.071901], [174037.2994, 178986.8036], [174036.3836, 178986.1748], [174034.4177, 178984.8249]]], "type": "Polygon"}, "properties": {"area": 842.1930629252586, "formula": "{\"alignment_date\": \"2024-09-16\", \"brdr_version\": \"0.2.1\", \"reference_source\": {\"source\": \"Adpf\", \"version_date\": \"2022-01-01\"}, \"full\": true, \"reference_features\": {\"24126B0031/00T007\": {\"full\": true, \"area\": 842.19, \"percentage\": 100, \"geometry\": null}}, \"reference_od\": null, \"last_version_date\": \"2019-07-25\"}", "perimeter": 130.58810547796506, "relevant_distance": 2, "shape_index": 0.1550572086457025, "theme_identifier": "400"}, "type": "Feature"}, {"geometry": {"coordinates": [[[173966.389028, 179298.100271], [173965.849202, 179298.315899], [173964.192, 179298.978], [173958.0291, 179301.4402], [173953.8952, 179302.1971], [173948.0517, 179303.2669], [173947.9791, 179303.2803], [173945.8891, 179303.6902], [173911.239422, 179309.581196], [173910.388103, 179309.7266], [173909.9886, 179309.7948], [173905.785701, 179319.638098], [173905.1785, 179321.060399], [173900.5608, 179331.8751], [173900.9241, 179331.8081], [173940.7763, 179325.5153], [173944.092, 179324.9918], [173949.739089, 179324.100202], [173962.1865, 179322.1395], [173966.131594, 179321.518001], [173966.499, 179321.4602], [173970.4676, 179319.931], [173974.1291, 179318.5202], [173972.379009, 179313.840224], [173968.2391, 179302.7402], [173968.229604, 179302.716411], [173966.389028, 179298.100271]]], "type": "Polygon"}, "properties": {"area": 1379.498322959166, "formula": "{\"alignment_date\": \"2024-09-16\", \"brdr_version\": \"0.2.1\", \"reference_source\": {\"source\": \"Adpf\", \"version_date\": \"2022-01-01\"}, \"full\": true, \"reference_features\": {\"24126B0006/00M002\": {\"full\": true, \"area\": 386.55, \"percentage\": 100, \"geometry\": null}, \"24126B0006/00E002\": {\"full\": true, \"area\": 409.64, \"percentage\": 100, \"geometry\": null}, \"24126B0006/00N002\": {\"full\": true, \"area\": 108.75, \"percentage\": 100, \"geometry\": null}, \"24126B0006/00F002\": {\"full\": true, \"area\": 474.56, \"percentage\": 100, \"geometry\": null}}, \"reference_od\": null, \"last_version_date\": \"2021-07-07\"}", "perimeter": 178.54520703582963, "relevant_distance": 2, "shape_index": 0.12942763616619105, "theme_identifier": "500"}, "type": "Feature"}, {"geometry": {"coordinates": [[[174240.361258, 179443.003306], [174240.4272, 179443.1969], [174234.5671, 179445.0969], [174241.3871, 179463.097], [174241.474, 179463.0721], [174244.1019, 179471.6328], [174249.4882, 179469.7988], [174254.26, 179468.16], [174256.144, 179467.513], [174254.5936, 179463.058], [174252.2125, 179456.2165], [174251.3099, 179453.623], [174249.5697, 179448.6229], [174249.0652, 179448.8045], [174248.960701, 179448.502502], [174246.296344, 179440.805126], [174240.361258, 179443.003306]]], "type": "Polygon"}, "properties": {"area": 354.31723731849075, "formula": "{\"alignment_date\": \"2024-09-16\", \"brdr_version\": \"0.2.1\", \"reference_source\": {\"source\": \"Adpf\", \"version_date\": \"2022-01-01\"}, \"full\": false, \"reference_features\": {\"24126B0027/00B002\": {\"full\": false, \"area\": 58.59, \"percentage\": 10.55, \"geometry\": null}, \"24126B0027/00R000\": {\"full\": true, \"area\": 161.68, \"percentage\": 100, \"geometry\": null}, \"24126B0027/00K003\": {\"full\": true, \"area\": 134.05, \"percentage\": 100, \"geometry\": null}}, \"reference_od\": null, \"last_version_date\": \"2019-07-25\"}", "perimeter": 82.71815334885017, "relevant_distance": 2, "shape_index": 0.23345788642649634, "theme_identifier": "600"}, "type": "Feature"}], "type": "FeatureCollection"} + + #Update Featurecollection to actual version + featurecollection = update_to_actual_grb(featurecollection_base_result,name_thematic_id) + #Print results + for feature in featurecollection["result"]["features"]: + assert isinstance(feature["properties"]["evaluation"],Evaluation) + # print( + # feature["properties"][name_thematic_id] + # + ": " + # + feature["properties"]["evaluation"] + # ) + # geojson = featurecollection["result"] + # print(geojson) From 7fdc114ff29990397b85a38e73dbd68cd33f0a7d Mon Sep 17 00:00:00 2001 From: dieuska Date: Tue, 17 Sep 2024 08:45:10 +0200 Subject: [PATCH 09/35] dict properties & source to DictLoader --- brdr/loader.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/brdr/loader.py b/brdr/loader.py index 8242333..3b41b70 100644 --- a/brdr/loader.py +++ b/brdr/loader.py @@ -21,13 +21,14 @@ def load_data(self): class DictLoader(Loader): - def __init__(self, data_dict: dict[str:BaseGeometry]): - # TODO: add dict_properties & dict_source? + def __init__(self, data_dict: dict[str:BaseGeometry],data_dict_properties: dict[str:str] = {},data_dict_source: dict[str:str]= { + }): super().__init__() self.data_dict = data_dict + self.data_dict_properties = data_dict_properties + self.data_dict_source = data_dict_source def load_data(self): - # self._prepare_reference_data() return super().load_data() @@ -36,8 +37,7 @@ def __init__( self, *, id_property: str = None, - _input: FeatureCollection = None, - # data_dict_properties=None, TODO? + _input: FeatureCollection = None ): super().__init__() self.id_property = id_property From 6617a1921b0df594966a00d49e7293fb9ee3dd99 Mon Sep 17 00:00:00 2001 From: dieuska Date: Tue, 17 Sep 2024 12:45:08 +0200 Subject: [PATCH 10/35] #59 refactored formula version_date --- brdr/aligner.py | 73 +++++++++-------- brdr/constants.py | 3 + brdr/grb.py | 34 ++++---- brdr/loader.py | 33 ++++++-- brdr/oe.py | 5 +- examples/example_131635.py | 6 +- examples/example_ao.py | 11 +-- examples/example_eo.py | 18 ++--- examples/example_evaluate_ao.py | 14 ++-- examples/example_evaluate_multi_to_single.py | 76 +++--------------- examples/example_multi_to_single.py | 83 ++++++++++---------- examples/example_multipolygon.py | 18 ++--- examples/example_parcel_change_detector.py | 15 +--- 13 files changed, 164 insertions(+), 225 deletions(-) diff --git a/brdr/aligner.py b/brdr/aligner.py index e784720..23af03e 100644 --- a/brdr/aligner.py +++ b/brdr/aligner.py @@ -18,13 +18,12 @@ from shapely.geometry.base import BaseGeometry from brdr import __version__ -from brdr.constants import BUFFER_MULTIPLICATION_FACTOR, LAST_VERSION_DATE +from brdr.constants import BUFFER_MULTIPLICATION_FACTOR, LAST_VERSION_DATE, VERSION_DATE, DATE_FORMAT from brdr.constants import CORR_DISTANCE from brdr.constants import DEFAULT_CRS -from brdr.constants import GRB_VERSION_DATE from brdr.constants import THRESHOLD_CIRCLE_RATIO from brdr.enums import OpenbaarDomeinStrategy -from brdr.geometry_utils import buffer_neg, safe_equals +from brdr.geometry_utils import buffer_neg from brdr.geometry_utils import buffer_neg_pos from brdr.geometry_utils import buffer_pos from brdr.geometry_utils import calculate_geom_by_intersection_and_reference @@ -47,8 +46,6 @@ from brdr.utils import merge_process_results from brdr.utils import write_geojson -date_format = "%Y-%m-%d" - ################### @@ -483,13 +480,12 @@ def get_formula(self, geometry: BaseGeometry, with_geom=False): with_geom is True). """ dict_formula = { - "alignment_date": datetime.now().strftime(date_format), + "alignment_date": datetime.now().strftime(DATE_FORMAT), "brdr_version": str(__version__), "reference_source": self.dict_reference_source, "full": True, "reference_features": {}, - "reference_od": None, - LAST_VERSION_DATE: None, + "reference_od": None } full_total = True @@ -501,57 +497,59 @@ def get_formula(self, geometry: BaseGeometry, with_geom=False): intersected = [] for key_ref in ref_intersections: geom = None + version_date = None geom_reference = self.dict_reference[key_ref] geom_intersection = make_valid(safe_intersection(geometry, geom_reference)) if geom_intersection.is_empty or geom_intersection is None: continue intersected.append(geom_intersection) + + perc = round(geom_intersection.area * 100 / geom_reference.area, 2) + if perc < 0.01: + continue # Add a last_version_date if available in properties if ( key_ref in self.dict_reference_properties - and GRB_VERSION_DATE in self.dict_reference_properties[key_ref] + and VERSION_DATE in self.dict_reference_properties[key_ref] ): - version_date = datetime.strptime( - self.dict_reference_properties[key_ref][GRB_VERSION_DATE][:10], - date_format, - ).date() + version_date = self.dict_reference_properties[key_ref][VERSION_DATE] if last_version_date is None and version_date is not None: last_version_date = version_date if version_date is not None and version_date > last_version_date: last_version_date = version_date - if safe_equals(geom_intersection, geom_reference): - full = True - area = round(geom_reference.area, 2) - perc = 100 - if with_geom: - geom = to_geojson(geom_reference) - else: - perc = round(geom_intersection.area * 100 / geom_reference.area, 2) - if perc < 0.01: - continue - elif perc > 99.99: + # if safe_equals(geom_intersection, geom_reference): + # full = True + # area = round(geom_reference.area, 2) + # perc = 100 + # if with_geom: + # geom = geom_reference + if perc > 99.99: full = True area = round(geom_reference.area, 2) perc = 100 if with_geom: - geom = to_geojson(geom_reference) - else: - full = False - full_total = False - area = round(geom_intersection.area, 2) - if with_geom: - geom = to_geojson(geom_intersection) + geom = geom_reference + else: + full = False + full_total = False + area = round(geom_intersection.area, 2) + if with_geom: + geom = geom_intersection dict_formula["reference_features"][key_ref] = { "full": full, "area": area, - "percentage": perc, - "geometry": geom, + "percentage": perc } + if version_date is not None: + dict_formula["reference_features"][key_ref][VERSION_DATE] = version_date.strftime(DATE_FORMAT) + if with_geom: + dict_formula["reference_features"][key_ref]["geometry"] = to_geojson(geom) + dict_formula["full"] = full_total if last_version_date is not None: - dict_formula[LAST_VERSION_DATE] = last_version_date.strftime(date_format) + dict_formula[LAST_VERSION_DATE] = last_version_date.strftime(DATE_FORMAT) geom_od = buffer_pos( buffer_neg( safe_difference(geometry, make_valid(unary_union(intersected))), @@ -563,9 +561,10 @@ def get_formula(self, geometry: BaseGeometry, with_geom=False): area_od = round(geom_od.area, 2) if area_od > 0: dict_formula["reference_od"] = { - "area": area_od, - "geometry": to_geojson(geom_od), + "area": area_od } + if with_geom: + dict_formula["reference_od"]["geometry"] = to_geojson(geom_od) self.logger.feedback_debug(str(dict_formula)) return dict_formula @@ -1050,7 +1049,7 @@ def load_reference_data(self, loader: Loader): ( self.dict_reference, self.dict_reference_properties, - self.dict_reference_source, + self.dict_reference_source ) = loader.load_data() self._prepare_reference_data() diff --git a/brdr/constants.py b/brdr/constants.py index 1bbf87a..48c7c94 100644 --- a/brdr/constants.py +++ b/brdr/constants.py @@ -39,6 +39,9 @@ MULTI_SINGLE_ID_SEPARATOR = "*$*" LAST_VERSION_DATE = "last_version_date" +VERSION_DATE = "version_date" + +DATE_FORMAT = "%Y-%m-%d" # GRB_CONSTANTS # URL of the OGC feature API of actual GRB to extract collections GRB_FEATURE_URL = "https://geo.api.vlaanderen.be/GRB/ogc/features/collections" diff --git a/brdr/grb.py b/brdr/grb.py index ddd1a94..a827ceb 100644 --- a/brdr/grb.py +++ b/brdr/grb.py @@ -10,7 +10,7 @@ from shapely.geometry.base import BaseGeometry from brdr.aligner import Aligner -from brdr.constants import DEFAULT_CRS, LAST_VERSION_DATE +from brdr.constants import DEFAULT_CRS, LAST_VERSION_DATE, DATE_FORMAT, VERSION_DATE from brdr.constants import DOWNLOAD_LIMIT from brdr.constants import GRB_BUILDING_ID from brdr.constants import GRB_FEATURE_URL @@ -33,7 +33,8 @@ from brdr.utils import get_collection_by_partition log = logging.getLogger(__name__) -date_format = "%Y-%m-%d" + +datetime_format_TZ = "%Y-%m-%dT%H:%M:%SZ" def is_grb_changed( @@ -191,7 +192,7 @@ def get_last_version_date( for c in collection["features"]: if intersects(geometry, shape(c["geometry"])): versiondate = datetime.strptime( - c["properties"][GRB_VERSION_DATE], date_format + c["properties"][GRB_VERSION_DATE], DATE_FORMAT ).date() update_dates.append(versiondate) @@ -237,11 +238,11 @@ def get_collection_grb_actual( versiondate_filter_end = "" if date_start is not None: versiondate_filter_start = ( - GRB_VERSION_DATE + ">" + date_start.strftime(date_format) + GRB_VERSION_DATE + ">" + date_start.strftime(DATE_FORMAT) ) versiondate_filter = versiondate_filter_start if date_end is not None: - versiondate_filter_end = GRB_VERSION_DATE + "<" + date_end.strftime(date_format) + versiondate_filter_end = GRB_VERSION_DATE + "<" + date_end.strftime(DATE_FORMAT) versiondate_filter = versiondate_filter_end if not (date_start is None and date_end is None): versiondate_filter = versiondate_filter_start + " AND " + versiondate_filter_end @@ -289,7 +290,7 @@ def get_collection_grb_parcels_by_date( removed_features =[] for feature in collection_year_after_filtered["features"]: versiondate = datetime.strptime( - feature["properties"][GRB_VERSION_DATE][:10], date_format + feature["properties"][GRB_VERSION_DATE][:10], DATE_FORMAT ).date() if versiondate > date: removed_features.append(feature) @@ -521,7 +522,7 @@ def update_to_actual_grb(featurecollection, id_theme_fieldname, formula_field="f logger.feedback_debug(str(dict_thematic_formula[id_theme])) if LAST_VERSION_DATE in dict_thematic_formula[id_theme] and dict_thematic_formula[id_theme][LAST_VERSION_DATE] is not None and dict_thematic_formula[id_theme][LAST_VERSION_DATE] != "": str_lvd = dict_thematic_formula[id_theme][LAST_VERSION_DATE] - lvd = datetime.strptime(str_lvd, date_format).date() + lvd = datetime.strptime(str_lvd, DATE_FORMAT).date() if lvd < last_version_date: last_version_date = lvd except: @@ -577,6 +578,7 @@ def __init__(self, grb_type: GRBType, aligner, partition: int = 1000): self.grb_type = grb_type self.part = partition self.data_dict_source["source"] = grb_type.value + self.versiondate_info= {"name": GRB_VERSION_DATE,"format": DATE_FORMAT} def load_data(self): if not self.aligner.dict_thematic: @@ -590,7 +592,7 @@ def load_data(self): ) self.id_property = id_property self.input = dict(collection) - self.data_dict_source["version_date"] = datetime.now().strftime(date_format) + self.data_dict_source[VERSION_DATE] = datetime.now().strftime(DATE_FORMAT) self.aligner.logger.feedback_info(f"GRB downloaded: {self.grb_type}") return super().load_data() @@ -602,9 +604,10 @@ def __init__(self, year: str, aligner, partition=1000): self.year = year self.part = partition self.data_dict_source["source"] = "Adpf" - self.data_dict_source["version_date"] = datetime(int(year), 1, 1).strftime( - date_format + self.data_dict_source[VERSION_DATE] = datetime(int(year), 1, 1).strftime( + DATE_FORMAT ) + self.versiondate_info= {"name": GRB_VERSION_DATE,"format": datetime_format_TZ} def load_data(self): if not self.aligner.dict_thematic: @@ -624,18 +627,19 @@ class GRBSpecificDateParcelLoader(GeoJsonLoader): def __init__(self, date, aligner, partition=1000): logging.warning("experimental loader; use with care!!!") try: - date = datetime.strptime(date, date_format - ).date() + date = datetime.strptime(date, DATE_FORMAT + ).date() if date.year>=datetime.now().year: raise ValueError("The GRBSpecificDateParcelLoader can only be used for dates prior to the current year.") except Exception: - raise ValueError("No valid date, please provide a date in the format: " + date_format) + raise ValueError("No valid date, please provide a date in the format: " + DATE_FORMAT) super().__init__(_input=None, id_property=GRB_PARCEL_ID) self.aligner = aligner self.date = date self.part = partition self.data_dict_source["source"] = "Adp" - self.data_dict_source["version_date"] = date.strftime(date_format) + self.data_dict_source[VERSION_DATE] = date.strftime(DATE_FORMAT) + self.versiondate_info= {"name": GRB_VERSION_DATE,"format": datetime_format_TZ} def load_data(self): if not self.aligner.dict_thematic: @@ -648,6 +652,6 @@ def load_data(self): crs=self.aligner.CRS, ) self.input = dict(collection) - self.aligner.logger.feedback_info(f"Parcels downloaded for specific date: {self.date.strftime(date_format)}") + self.aligner.logger.feedback_info(f"Parcels downloaded for specific date: {self.date.strftime(DATE_FORMAT)}") return super().load_data() diff --git a/brdr/loader.py b/brdr/loader.py index 3b41b70..b2ef8e5 100644 --- a/brdr/loader.py +++ b/brdr/loader.py @@ -4,7 +4,10 @@ import requests as requests from shapely import make_valid from shapely.geometry.base import BaseGeometry +from win32ctypes.pywin32.pywintypes import datetime +from brdr.constants import DATE_FORMAT +from brdr.constants import VERSION_DATE from brdr.typings import FeatureCollection from brdr.utils import geojson_to_dicts @@ -14,15 +17,34 @@ def __init__(self): self.data_dict: dict[str, BaseGeometry] = {} self.data_dict_properties: dict[str, dict] = {} self.data_dict_source: dict[str, str] = {} + self.versiondate_info: dict[str, str] = None def load_data(self): self.data_dict = {x: make_valid(self.data_dict[x]) for x in self.data_dict} + if self.versiondate_info is not None: + for key in self.data_dict_properties.keys(): + try: + self.data_dict_properties[key][VERSION_DATE] = datetime.strptime( + self.data_dict_properties[key][self.versiondate_info["name"]], + self.versiondate_info["format"], + ) + except: + #Catch, to try extracting only the date with default -date format if specific format does not work + self.data_dict_properties[key][VERSION_DATE] = datetime.strptime( + self.data_dict_properties[key][self.versiondate_info["name"]][:10], + DATE_FORMAT, + ) + return self.data_dict, self.data_dict_properties, self.data_dict_source class DictLoader(Loader): - def __init__(self, data_dict: dict[str:BaseGeometry],data_dict_properties: dict[str:str] = {},data_dict_source: dict[str:str]= { - }): + def __init__( + self, + data_dict: dict[str:BaseGeometry], + data_dict_properties: dict[str:str] = {}, + data_dict_source: dict[str:str] = {}, + ): super().__init__() self.data_dict = data_dict self.data_dict_properties = data_dict_properties @@ -33,12 +55,7 @@ def load_data(self): class GeoJsonLoader(Loader): - def __init__( - self, - *, - id_property: str = None, - _input: FeatureCollection = None - ): + def __init__(self, *, id_property: str = None, _input: FeatureCollection = None): super().__init__() self.id_property = id_property self.input = _input diff --git a/brdr/oe.py b/brdr/oe.py index a118264..3fc2ff6 100644 --- a/brdr/oe.py +++ b/brdr/oe.py @@ -6,13 +6,12 @@ from shapely import box from shapely.geometry import shape -from brdr.constants import DOWNLOAD_LIMIT, DEFAULT_CRS +from brdr.constants import DOWNLOAD_LIMIT, DEFAULT_CRS, DATE_FORMAT, VERSION_DATE from brdr.loader import GeoJsonLoader from brdr.logger import LOGGER from brdr.utils import get_collection_by_partition log = logging.getLogger(__name__) -date_format = "%Y-%m-%d" class OEType(str, Enum): @@ -154,7 +153,7 @@ def load_data(self): ) self.id_property = id_property self.input = dict(collection) - self.data_dict_source["version_date"] = datetime.now().strftime(date_format) + self.data_dict_source[VERSION_DATE] = datetime.now().strftime(DATE_FORMAT) LOGGER.debug(f"OnroerendErfgoed-objects downloaded") return super().load_data() diff --git a/examples/example_131635.py b/examples/example_131635.py index 3e9f354..9ca8433 100644 --- a/examples/example_131635.py +++ b/examples/example_131635.py @@ -1,8 +1,7 @@ from brdr.aligner import Aligner from brdr.enums import GRBType from brdr.grb import GRBActualLoader -from brdr.loader import DictLoader -from brdr.utils import get_oe_dict_by_ids +from brdr.oe import OnroerendErfgoedLoader from examples import print_formula from examples import show_map @@ -12,8 +11,7 @@ # Initiate brdr aligner = Aligner() # Load thematic data & reference data - dict_theme = get_oe_dict_by_ids([131635]) - loader = DictLoader(dict_theme) + loader = OnroerendErfgoedLoader([131635]) aligner.load_thematic_data(loader) loader = GRBActualLoader(grb_type=GRBType.ADP, partition=1000, aligner=aligner) aligner.load_reference_data(loader) diff --git a/examples/example_ao.py b/examples/example_ao.py index 557ebd9..5041fad 100644 --- a/examples/example_ao.py +++ b/examples/example_ao.py @@ -3,8 +3,8 @@ from brdr.aligner import Aligner from brdr.enums import GRBType from brdr.grb import GRBActualLoader -from brdr.loader import DictLoader -from brdr.utils import get_oe_dict_by_ids, dict_series_by_keys +from brdr.oe import OnroerendErfgoedLoader +from brdr.utils import dict_series_by_keys from examples import show_map, plot_series if __name__ == "__main__": @@ -14,13 +14,10 @@ # Initiate brdr aligner = Aligner() # Load thematic data & reference data - # dict_theme = get_oe_dict_by_ids([206363], oetype='erfgoedobjecten') aanduidingsobjecten = range(1, 10) aanduidingsobjecten =[117798,116800,117881] - dict_theme = get_oe_dict_by_ids( - aanduidingsobjecten, oetype="aanduidingsobjecten" - ) # noqa - loader = DictLoader(dict_theme) + + loader = OnroerendErfgoedLoader(aanduidingsobjecten) aligner.load_thematic_data(loader) loader = GRBActualLoader(grb_type=GRBType.ADP, partition=1000, aligner=aligner) aligner.load_reference_data(loader) diff --git a/examples/example_eo.py b/examples/example_eo.py index e08a8e1..fbcadb0 100644 --- a/examples/example_eo.py +++ b/examples/example_eo.py @@ -2,7 +2,9 @@ from brdr.aligner import Aligner from brdr.enums import GRBType -from brdr.utils import get_oe_dict_by_ids, write_geojson, dict_series_by_keys +from brdr.grb import GRBActualLoader +from brdr.oe import OnroerendErfgoedLoader, OEType +from brdr.utils import write_geojson, dict_series_by_keys from examples import show_map, plot_series if __name__ == "__main__": @@ -25,17 +27,9 @@ # 206368, 206786 ] - dict_theme = get_oe_dict_by_ids(erfgoedobjecten, oetype="erfgoedobjecten") - aligner.load_thematic_data_dict(dict_theme) - aligner.load_reference_data_grb_actual(grb_type=GRBType.ADP, partition=1000) - - # RESULTS - # rel_dist = 0.2 - # dict_results_by_distance = {} - # #put resulting tuple in a dictionary - # dict_results_by_distance[rel_dist] = aligner.process_dict_thematic(rel_dist,2) - # aligner.export_results("output/") - # show_map(dict_results_by_distance, aligner.dict_thematic, aligner.dict_reference) + loader = OnroerendErfgoedLoader(objectids=erfgoedobjecten, oetype=OEType.EO) + aligner.load_thematic_data(loader) + aligner.load_reference_data(GRBActualLoader(aligner=aligner,grb_type=GRBType.ADP)) series = np.arange(0, 200, 20, dtype=int) / 100 # predict which relevant distances are interesting to propose as resulting geometry diff --git a/examples/example_evaluate_ao.py b/examples/example_evaluate_ao.py index 9d9f3cf..3b6c88c 100644 --- a/examples/example_evaluate_ao.py +++ b/examples/example_evaluate_ao.py @@ -11,16 +11,14 @@ GRBActualLoader, ) from brdr.loader import DictLoader -from brdr.utils import get_series_geojson_dict, get_oe_dict_by_ids - -# dict_theme = get_oe_dict_by_ids([125610,148305,127615,122316,120153,124699,115489, -# 120288,120387,124762,148143,116141]) -dict_theme = get_oe_dict_by_ids([10047, 10048, 10049, 10050, 10051, 10056]) -dict_theme = get_oe_dict_by_ids([120288]) -print(dict_theme) +from brdr.oe import OnroerendErfgoedLoader +from brdr.utils import get_series_geojson_dict base_aligner = Aligner() -base_aligner.load_thematic_data(DictLoader(dict_theme)) +# = OnroerendErfgoedLoader([125610,148305,127615,122316,120153,124699,115489,120288,120387,124762,148143,116141]) +#loader = OnroerendErfgoedLoader([10047, 10048, 10049, 10050, 10051, 10056]) +loader = OnroerendErfgoedLoader([120288]) +base_aligner.load_thematic_data(loader) base_year = "2022" base_aligner.load_reference_data( GRBFiscalParcelLoader(year=base_year, aligner=base_aligner) diff --git a/examples/example_evaluate_multi_to_single.py b/examples/example_evaluate_multi_to_single.py index 3a68ca8..593da3c 100644 --- a/examples/example_evaluate_multi_to_single.py +++ b/examples/example_evaluate_multi_to_single.py @@ -9,74 +9,18 @@ from brdr.grb import evaluate from brdr.grb import get_geoms_affected_by_grb_change from brdr.loader import DictLoader -from brdr.utils import get_oe_dict_by_ids +from brdr.oe import OnroerendErfgoedLoader from brdr.utils import get_series_geojson_dict from brdr.utils import merge_process_results -from brdr.utils import multipolygons_to_singles -# thematic_dict = { -# 1: from_wkt( -# "MultiPolygon (((173503.67240000001038425 179630.29370000000926666, " -# "173517.47036350998678245 179636.5546227699960582, " -# "173526.74985151999862865 179640.51110276998952031, " -# "173534.37954751998768188 179620.73651076000533067, " -# "173538.27292352000949904 179622.00793476001126692, " -# "173540.02590000000782311 179616.82500000001164153, " -# "173540.5209000000031665 179615.20139999999082647, " -# "173541.88829999999143183 179610.71599999998579733, " -# "173545.56226753001101315 179598.47103874001186341, " -# "173535.27874752000207081 179595.28806274000089616, " -# "173524.47042751000844873 179591.97017474001040682, " -# "173507.03150000001187436 179623.90200000000186265, " -# "173504.75964350000140257 179629.70476677000988275, " -# "173503.67240000001038425 179630.29370000000926666))," -# "((173606.28779999999096617 179670.28599999999278225, " -# "173610.20720000000437722 179671.40659999998752028, " -# "173614.6045999999914784 179672.66380000000935979, " -# "173631.59880000000703149 179677.52259999999660067, " -# "173631.89120000001275912 179677.60620000000926666, " -# "173633.32480000000214204 179678.01610000000800937, " -# "173633.32519999999203719 179678.01610000000800937, " -# "173636.02556358999572694 179678.24044679998769425, " -# "173637.36501959001179785 179678.00627079998957925, " -# "173638.8345875900122337 179677.26476680001360364, " -# "173639.96585159000824206 179676.19820680000702851, " -# "173640.61596359001123346 179675.11871879998943768, " -# "173641.02953159000026062 179673.73823879999690689, " -# "173641.81839559000218287 179668.88179079000838101, " -# "173643.26172358999610879 179659.99692678998690099, " -# "173644.17860000001383014 179654.35260000001289882, " -# "173644.70509999999194406 179651.11160000000381842, " -# "173646.01050000000395812 179643.0746999999973923, " -# "173646.98720000000321306 179637.06119999999646097, " -# "173647.33480000001145527 179634.92029999999795109, " -# "173648.38750000001164153 179628.81059999999706633, " -# "173648.38829999999143183 179628.80609999998705462, " -# "173648.63229999999748543 179627.39060000001336448, " -# "173648.78820000000996515 179626.48589999999967404, " -# "173637.05239999998593703 179624.23920000001089647, " -# "173628.10649999999441206 179622.52669999998761341, " -# "173626.2447999999858439 179622.17029999999795109, " -# "173623.44330000001355074 179621.6339999999909196, " -# "173612.57180000000516884 179619.55280000000493601, " -# "173609.84570000000530854 179627.77809999999590218, " -# "173606.07050000000162981 179639.75099999998928979, " -# "173602.54409999999916181 179650.93530000001192093, " -# "173600.03229999999166466 179658.901400000002468, " -# "173599.82219999999506399 179659.60740000000805594, " -# "173598.40669999999227002 179664.36720000000786968, " -# "173597.43900000001303852 179667.62049999998998828, " -# "173597.43859999999403954 179667.62040000001434237, " -# "173597.401400000002468 179667.74530000000959262, " -# "173606.28779999999096617 179670.28599999999278225)))" -# ) -# } -thematic_dict = get_oe_dict_by_ids([9946]) +multi_as_single_modus = False + # Align the multipolygon to the fiscal parcels 2022 -# thematic_dict = multipolygons_to_singles(thematic_dict) + base_aligner = Aligner() -base_aligner.multi_as_single_modus = False -base_aligner.load_thematic_data(DictLoader(thematic_dict)) +base_aligner.multi_as_single_modus = multi_as_single_modus +loader = OnroerendErfgoedLoader([9946]) +base_aligner.load_thematic_data(loader) base_year = "2022" base_aligner.load_reference_data( GRBFiscalParcelLoader(year=base_year, aligner=base_aligner) @@ -92,10 +36,9 @@ thematic_dict_result[key] = base_process_result[key]["result"] thematic_dict_formula[key] = base_aligner.get_formula(thematic_dict_result[key]) -thematic_dict_result = multipolygons_to_singles(thematic_dict_result) # Determine all features that are possibly changed during timespan base_aligner_result = Aligner() -# base_aligner.multi_as_single_modus=False +base_aligner_result.multi_as_single_modus = multi_as_single_modus base_aligner_result.load_thematic_data(DictLoader(thematic_dict_result)) dict_affected, dict_unchanged = get_geoms_affected_by_grb_change( base_aligner_result, @@ -106,8 +49,9 @@ ) # Align the possibly affected geometry on the actual GRB parcels (evaluation) -# dict_affected = multipolygons_to_singles(dict_affected) + actual_aligner = Aligner() +actual_aligner.multi_as_single_modus = multi_as_single_modus loader = DictLoader(dict_affected) actual_aligner.load_thematic_data(loader) loader = GRBActualLoader(grb_type=GRBType.ADP, partition=1000, aligner=actual_aligner) diff --git a/examples/example_multi_to_single.py b/examples/example_multi_to_single.py index fa4bf14..d12fea3 100644 --- a/examples/example_multi_to_single.py +++ b/examples/example_multi_to_single.py @@ -1,46 +1,47 @@ from brdr.aligner import Aligner from brdr.enums import GRBType -from brdr.utils import get_oe_dict_by_ids -from brdr.utils import multipolygons_to_singles +from brdr.grb import GRBActualLoader +from brdr.oe import OnroerendErfgoedLoader from examples import print_formula from examples import show_map -# example to Change a dictionary form multipolygon to single before executing the -# aligner. Can be used on the thematic dictionary as the reference dictionary - - -if __name__ == "__main__": - # EXAMPLE for a thematic MultiPolygon - dict_theme = get_oe_dict_by_ids([110082]) - - # WITHOUT MULTI_TO_SINGLE - # Initiate brdr - aligner = Aligner() - # Load thematic data & reference data - # Get a specific feature of OE that exists out of a Multipolygon - - aligner.load_thematic_data_dict(dict_theme) - aligner.load_reference_data_grb_actual(grb_type=GRBType.GBG, partition=1000) - - rel_dist = 2 - dict_results_by_distance = {rel_dist: aligner.process_dict_thematic(rel_dist, 4)} - aligner.export_results("output/") - show_map(dict_results_by_distance, aligner.dict_thematic, aligner.dict_reference) - - print_formula(dict_results_by_distance, aligner) - - # WITH MULTI_TO_SINGLE - # Initiate brdr - aligner = Aligner() - # Load thematic data & reference data - # Get a specific feature of OE that exists out of a Multipolygon - dict_theme = multipolygons_to_singles(dict_theme) - aligner.load_thematic_data_dict(dict_theme) - aligner.load_reference_data_grb_actual(grb_type=GRBType.GBG, partition=1000) - - rel_dist = 5 - dict_results_by_distance = {rel_dist: aligner.process_dict_thematic(rel_dist, 4)} - aligner.export_results("output/") - show_map(dict_results_by_distance, aligner.dict_thematic, aligner.dict_reference) - - print_formula(dict_results_by_distance, aligner) +# EXAMPLE of "multi_as_single_modus" + +# Initiate brdr +aligner = Aligner() +# WITHOUT MULTI_TO_SINGLE +aligner.multi_as_single_modus = False +# Load thematic data & reference data +# Get a specific feature of OE that exists out of a Multipolygon +loader = OnroerendErfgoedLoader([110082]) +aligner.load_thematic_data(loader) +aligner.load_reference_data( + GRBActualLoader(aligner=aligner, grb_type=GRBType.GBG, partition=1000) +) + +rel_dist = 20 +dict_results_by_distance = {rel_dist: aligner.process_dict_thematic(rel_dist, 4)} +aligner.export_results("output/") +show_map(dict_results_by_distance, aligner.dict_thematic, aligner.dict_reference) + +print_formula(dict_results_by_distance, aligner) + +# WITH MULTI_TO_SINGLE + +# Initiate brdr +aligner = Aligner() +aligner.multi_as_single_modus = True +# Load thematic data & reference data +# Get a specific feature of OE that exists out of a Multipolygon +loader = OnroerendErfgoedLoader([110082]) +aligner.load_thematic_data(loader) +aligner.load_reference_data( + GRBActualLoader(aligner=aligner, grb_type=GRBType.GBG, partition=1000) +) + +rel_dist = 20 +dict_results_by_distance = {rel_dist: aligner.process_dict_thematic(rel_dist, 4)} +aligner.export_results("output/") +show_map(dict_results_by_distance, aligner.dict_thematic, aligner.dict_reference) + +print_formula(dict_results_by_distance, aligner) diff --git a/examples/example_multipolygon.py b/examples/example_multipolygon.py index db56bf6..433a28b 100644 --- a/examples/example_multipolygon.py +++ b/examples/example_multipolygon.py @@ -1,6 +1,8 @@ # Initiate brdr from brdr.aligner import Aligner from brdr.enums import GRBType +from brdr.grb import GRBActualLoader +from brdr.loader import DictLoader from brdr.utils import multipolygons_to_singles from brdr.utils import write_geojson @@ -17,20 +19,12 @@ ) # gebruik de actuele adp-percelen adp= administratieve percelen aligner = Aligner() -aligner.load_thematic_data_dict( - aligner0.dict_thematic, +aligner.load_thematic_data(DictLoader(aligner0.dict_thematic)) + +aligner.load_reference_data( + GRBActualLoader(aligner=aligner, grb_type=GRBType.ADP, partition=1000) ) -aligner.load_reference_data_grb_actual(grb_type=GRBType.ADP, partition=1000) -# Example how to use the Aligner -# rel_dist = 2 -# dict_results_by_distance = {} -# dict_results_by_distance[aligner.relevant_distance] = aligner.process_dict_thematic( -# relevant_distance=rel_dist, -# od_strategy=OpenbaarDomeinStrategy.SNAP_FULL_AREA_ALL_SIDE, -# ) -# aligner.export_results("output/") -# show_map(dict_results_by_distance, aligner.dict_thematic, aligner.dict_reference) dict_series, dict_predicted, diffs = aligner.predictor() fcs = aligner.get_predictions_as_geojson(series_dict=dict_predicted, formula=True) aligner.export_results("output/") diff --git a/examples/example_parcel_change_detector.py b/examples/example_parcel_change_detector.py index 9416f25..66baf10 100644 --- a/examples/example_parcel_change_detector.py +++ b/examples/example_parcel_change_detector.py @@ -11,8 +11,7 @@ from brdr.grb import evaluate from brdr.grb import get_geoms_affected_by_grb_change from brdr.loader import DictLoader -from brdr.loader import GeoJsonLoader -from brdr.utils import get_oe_geojson_by_bbox +from brdr.oe import OnroerendErfgoedLoader # This code shows an example how the aligner can be used inside a flow of # parcel change detection: @@ -28,7 +27,7 @@ crs = "EPSG:31370" limit = 10000 # bbox = "172800,170900,173000,171100" -bbox = "172000,172000,174000,174000" +bbox = [172000,172000,174000,174000] # bbox = "170000,170000,175000,174900" # bbox = "100000,195000,105000,195900" # bbox = "150000,210000,155000,214900" @@ -41,13 +40,6 @@ excluded_area = 10000 # series of relevant distance that is used to check if we can auto-align the geometries # to the actual reference-polygons to get an 'equal' formula -# series = [ -# 0, -# 0.5, -# 1, -# 1.5, -# 2, -# ] series = np.arange(0, 200, 10, dtype=int) / 100 # BASE # ===== @@ -55,8 +47,7 @@ # base_year base_aligner = Aligner() # Load the thematic data to evaluate -loader = GeoJsonLoader(_input=get_oe_geojson_by_bbox(bbox), id_property="aanduid_id") -# loader = DictLoader(get_oe_dict_by_ids(['554','1573','124023','1873','1782','1324'])) +loader = OnroerendErfgoedLoader(bbox=bbox) base_aligner.load_thematic_data(loader) logging.info( From 8578c2cb4033f59f5b440a90dd543fc819a4efe0 Mon Sep 17 00:00:00 2001 From: dieuska Date: Tue, 17 Sep 2024 13:38:02 +0200 Subject: [PATCH 11/35] added speedtest added speedtest before refactoring of dict_series --- examples/example_speedtest.py | 44 +++++++++++++++++++++++++++++++++++ 1 file changed, 44 insertions(+) create mode 100644 examples/example_speedtest.py diff --git a/examples/example_speedtest.py b/examples/example_speedtest.py new file mode 100644 index 0000000..936ddd0 --- /dev/null +++ b/examples/example_speedtest.py @@ -0,0 +1,44 @@ +import statistics +from datetime import datetime +from brdr.aligner import Aligner +from brdr.loader import GeoJsonFileLoader + +# Initiate brdr +aligner = Aligner(relevant_distance=2) +# Load local thematic data and reference data +# loader = GeoJsonFileLoader( +# "../tests/testdata/theme.geojson", "theme_identifier" +# ) +loader = GeoJsonFileLoader( + "../tests/testdata/themelayer_not_referenced.geojson", "theme_identifier" +) +aligner.load_thematic_data(loader) +loader = GeoJsonFileLoader("../tests/testdata/reference_leuven.geojson", "capakey") +aligner.load_reference_data(loader) + +times=[] +for iter in range(1, 11): + starttime= datetime.now() + + # Example how to use the Aligner + aligner.predictor() + fcs = aligner.get_predictions_as_geojson(formula=True) + endtime=datetime.now() + seconds = (endtime-starttime).total_seconds() + times.append(seconds) + print(seconds) +print ("duration: " + str(times)) + +print ("Min: " + str(min(times))) +print ("Max: " + str(max(times))) +print ("Mean: " + str(statistics.mean(times))) +print ("Median: " + str(statistics.median(times))) +print ("Stdv: " + str(statistics.stdev(times))) + +# #BEFORE REFACTORING dict_series +# duration: [25.652311, 27.894154, 19.641618, 19.929254, 44.754033, 25.218422, 23.167992, 18.649832, 22.899336, 52.108296] +# Min: 18.649832 +# Max: 52.108296 +# Mean: 27.9915248 +# Median: 24.193207 +# Stdv: 11.28891821173264 \ No newline at end of file From 98e4528362ace18f5ee51f7eb8343dc3b8ec6b14 Mon Sep 17 00:00:00 2001 From: dieuska Date: Tue, 17 Sep 2024 14:03:19 +0200 Subject: [PATCH 12/35] commented unused functions functions that seems not be used anymore are commented; afterwards to remove --- brdr/geometry_utils.py | 10 ++- brdr/oe.py | 1 + brdr/utils.py | 138 +++++++++++++---------------------- examples/__init__.py | 39 +++++++++- tests/test_aligner.py | 6 +- tests/test_geometry_utils.py | 8 +- tests/test_utils.py | 63 ++++++++-------- 7 files changed, 131 insertions(+), 134 deletions(-) diff --git a/brdr/geometry_utils.py b/brdr/geometry_utils.py index 6f688e7..de7513c 100644 --- a/brdr/geometry_utils.py +++ b/brdr/geometry_utils.py @@ -376,7 +376,7 @@ def safe_symmetric_difference(geom_a, geom_b): return geom -def grid_bounds(geom: BaseGeometry, delta: float): +def _grid_bounds(geom: BaseGeometry, delta: float): """ Divides a geometric area (specified by `geom`) into a grid of rectangular partitions. @@ -417,7 +417,7 @@ def grid_bounds(geom: BaseGeometry, delta: float): return grid -def get_relevant_polygons_from_geom(geometry: BaseGeometry, buffer_distance: float): +def _get_relevant_polygons_from_geom(geometry: BaseGeometry, buffer_distance: float): """ Get only the relevant parts (polygon) from a geometry. Points, Lines and Polygons smaller than relevant distance are excluded from the @@ -528,7 +528,7 @@ def calculate_geom_by_intersection_and_reference( # in the result. The function below tries to exclude these non-logical parts. # see eo_id 206363 with relevant distance=0.2m and SNAP_ALL_SIDE if is_openbaar_domein: - geom = get_relevant_polygons_from_geom(geom, buffer_distance) + geom = _get_relevant_polygons_from_geom(geom, buffer_distance) elif not geom_relevant_intersection.is_empty and geom_relevant_difference.is_empty: geom = geom_reference elif geom_relevant_intersection.is_empty and not geom_relevant_difference.is_empty: @@ -607,7 +607,7 @@ def get_partitions(geom, delta): # partitioning with a quadtree with rectangles? # https://www.fundza.com/algorithmic/quadtree/index.html prepared_geom = prep(geom) - partitions = grid_bounds(geom, delta) + partitions = _grid_bounds(geom, delta) filtered_grid = list(filter(prepared_geom.intersects, partitions)) return filtered_grid @@ -657,9 +657,11 @@ def get_bbox(geometry): def geojson_polygon_to_multipolygon(geojson): """ + #TODO: add an example/test so it is clear this function is used (inside brdrQ) Transforms a geojson: Checks if there are Polygon-features and transforms them into MultiPolygons, so all objects are of type 'MultiPolygon' (or null-geometry). It is important that geometry-type is consitent (f.e. in QGIS) to show and style the geojson-layer """ + if geojson is None or "features" not in geojson or geojson["features"] is None: return geojson for f in geojson["features"]: diff --git a/brdr/oe.py b/brdr/oe.py index 3fc2ff6..04c02c7 100644 --- a/brdr/oe.py +++ b/brdr/oe.py @@ -51,6 +51,7 @@ def get_oe_dict_by_ids(objectids, oetype=OEType.AO): the API. """ logging.warning("deprecated method, use OnroerendErfgoedLoader instead") + # TODO remove function dict_thematic = {} if oetype==OEType.AO: typename = "aanduidingsobjecten" diff --git a/brdr/utils.py b/brdr/utils.py index 3409297..baeedf2 100644 --- a/brdr/utils.py +++ b/brdr/utils.py @@ -43,7 +43,7 @@ def get_series_geojson_dict( if results_type not in features_list_dict: features_list_dict[results_type] = [] - feature = feature_from_geom(geom, properties, geom_attributes) + feature = _feature_from_geom(geom, properties, geom_attributes) features_list_dict[results_type].append(feature) crs_geojson = {"type": "name", "properties": {"name": crs}} @@ -53,7 +53,7 @@ def get_series_geojson_dict( } -def feature_from_geom( +def _feature_from_geom( geom: BaseGeometry, properties: dict = None, geom_attributes=True, @@ -89,7 +89,7 @@ def geojson_from_dict(dictionary, crs, id_field, prop_dict=None, geom_attributes for key, geom in dictionary.items(): properties = dict(prop_dict or {}).get(key, {}) properties[id_field] = key - features.append(feature_from_geom(geom, properties, geom_attributes)) + features.append(_feature_from_geom(geom, properties, geom_attributes)) crs_geojson = {"type": "name", "properties": {"name": crs}} geojson = FeatureCollection(features, crs=crs_geojson) return geojson @@ -222,7 +222,7 @@ def get_breakpoints_zerostreak(x, y): * counter of #relevant_distances where zero-streak holds on * extreme value for zero_streak """ - derivative = numerical_derivative(x, y) + derivative = _numerical_derivative(x, y) # plt.plot(x, y, label="y") # plt.plot( x, derivative, label="derivative") # plt.legend() @@ -282,7 +282,7 @@ def get_breakpoints_zerostreak(x, y): return extremes, zero_streaks -def numerical_derivative(x, y): +def _numerical_derivative(x, y): """ Calculate the numerical derivative of a graph. @@ -303,22 +303,22 @@ def numerical_derivative(x, y): return derivative -def filter_dict_by_key(dictionary, filter_key): - """ - Filters a dictionary to only include keys matching a specific value. - - This function creates a new dictionary containing entries from the original - dictionary where the key matches the provided `filter_key`. - - Args: - dictionary (dict): The dictionary to filter. - filter_key (str): The key value to filter by. - - Returns: - dict: A new dictionary containing only entries where the key matches the - `filter_key`. - """ - return {key: dictionary[key] for key in dictionary.keys() if key == filter_key} +# def filter_dict_by_key(dictionary, filter_key): +# """ +# Filters a dictionary to only include keys matching a specific value. +# +# This function creates a new dictionary containing entries from the original +# dictionary where the key matches the provided `filter_key`. +# +# Args: +# dictionary (dict): The dictionary to filter. +# filter_key (str): The key value to filter by. +# +# Returns: +# dict: A new dictionary containing only entries where the key matches the +# `filter_key`. +# """ +# return {key: dictionary[key] for key in dictionary.keys() if key == filter_key} def diffs_from_dict_series( @@ -480,16 +480,16 @@ def get_collection_by_partition( collection = {} if geometry is None: collection = get_collection( - add_bbox_to_url(url=url, crs=crs, bbox=None), limit + _add_bbox_to_url(url=url, crs=crs, bbox=None), limit ) elif partition < 1: collection = get_collection( - add_bbox_to_url(url=url, crs=crs, bbox=get_bbox(geometry)), limit + _add_bbox_to_url(url=url, crs=crs, bbox=get_bbox(geometry)), limit ) else: geoms = get_partitions(geometry, partition) for g in geoms: - coll = get_collection(add_bbox_to_url(url=url, crs=crs, bbox=get_bbox(g)), limit) + coll = get_collection(_add_bbox_to_url(url=url, crs=crs, bbox=get_bbox(g)), limit) if collection == {}: collection = dict(coll) elif "features" in collection and "features" in coll: @@ -497,39 +497,39 @@ def get_collection_by_partition( return collection -def add_bbox_to_url(url, crs=DEFAULT_CRS, bbox=None): +def _add_bbox_to_url(url, crs=DEFAULT_CRS, bbox=None): # Load the Base reference data if bbox is not None: url = url + "&bbox-crs=" + crs + "&bbox=" + bbox return url -def merge_dict_series( - dict_series: dict[float, dict[str, ProcessResult]] -) -> dict[float, dict[str, ProcessResult]]: - """ - Merges dict_series (dict_predicted) with seperated IDs (MULTI_SINGLE_ID_SEPARATOR) - to their original unique ID - """ - dict_series_merged = {} - for dist, item in dict_series.items(): - dict_series_merged[dist] = merge_process_results(item) - return dict_series_merged - - -def merge_dict(dictionary: dict[str, BaseGeometry]) -> dict[str, BaseGeometry]: - """ - Merges dict_series (dict_predicted) with seperated IDs (MULTI_SINGLE_ID_SEPARATOR) - to their original unique ID - """ - out_dictionary = {} - for id_theme, item in dictionary.items(): - id_theme_global = id_theme.split(MULTI_SINGLE_ID_SEPARATOR)[0] - if id_theme_global not in out_dictionary: - out_dictionary[id_theme_global] = [item] - else: - out_dictionary[id_theme_global].append(item) - return {k: make_valid(unary_union(v)) for k, v in out_dictionary.items()} +# def merge_dict_series( +# dict_series: dict[float, dict[str, ProcessResult]] +# ) -> dict[float, dict[str, ProcessResult]]: +# """ +# Merges dict_series (dict_predicted) with seperated IDs (MULTI_SINGLE_ID_SEPARATOR) +# to their original unique ID +# """ +# dict_series_merged = {} +# for dist, item in dict_series.items(): +# dict_series_merged[dist] = merge_process_results(item) +# return dict_series_merged + + +# def merge_dict(dictionary: dict[str, BaseGeometry]) -> dict[str, BaseGeometry]: +# """ +# Merges dict_series (dict_predicted) with seperated IDs (MULTI_SINGLE_ID_SEPARATOR) +# to their original unique ID +# """ +# out_dictionary = {} +# for id_theme, item in dictionary.items(): +# id_theme_global = id_theme.split(MULTI_SINGLE_ID_SEPARATOR)[0] +# if id_theme_global not in out_dictionary: +# out_dictionary[id_theme_global] = [item] +# else: +# out_dictionary[id_theme_global].append(item) +# return {k: make_valid(unary_union(v)) for k, v in out_dictionary.items()} def merge_process_results( @@ -564,42 +564,6 @@ def merge_process_results( return grouped_results -def processresult_to_dicts(dict_processresult): - """ - Transforms a dictionary with all ProcessResults to individual dictionaries of the - results - Args: - dict_processresult: - - Returns: - - """ - results = {} - results_diff = {} - results_diff_plus = {} - results_diff_min = {} - results_relevant_intersection = {} - results_relevant_diff = {} - for key in dict_processresult: - processresult = dict_processresult[key] - results[key] = processresult["result"] - results_diff[key] = processresult["result_diff"] - results_diff_plus[key] = processresult["result_diff_plus"] - results_diff_min[key] = processresult["result_diff_min"] - results_relevant_intersection[key] = processresult[ - "result_relevant_intersection" - ] - results_relevant_diff[key] = processresult["result_relevant_diff"] - - return ( - results, - results_diff, - results_diff_plus, - results_diff_min, - results_relevant_intersection, - results_relevant_diff, - ) - def dict_series_by_keys(dict_series): """ diff --git a/examples/__init__.py b/examples/__init__.py index 36ef71f..1c7b4a8 100644 --- a/examples/__init__.py +++ b/examples/__init__.py @@ -5,7 +5,6 @@ import matplotlib.pyplot as plt from brdr.typings import ProcessResult -from brdr.utils import processresult_to_dicts def _make_map(ax, result_dict, thematic_dict, reference_dict): @@ -19,7 +18,7 @@ def _make_map(ax, result_dict, thematic_dict, reference_dict): , so it can be used in matplotlib """ try: - dicts = processresult_to_dicts(result_dict) + dicts = _processresult_to_dicts(result_dict) results = dicts[0] results_diff_pos = dicts[1] results_diff_neg = dicts[2] @@ -150,3 +149,39 @@ def plot_series( plt.legend() plt.show() return + +def _processresult_to_dicts(dict_processresult): + """ + Transforms a dictionary with all ProcessResults to individual dictionaries of the + results + Args: + dict_processresult: + + Returns: + + """ + results = {} + results_diff = {} + results_diff_plus = {} + results_diff_min = {} + results_relevant_intersection = {} + results_relevant_diff = {} + for key in dict_processresult: + processresult = dict_processresult[key] + results[key] = processresult["result"] + results_diff[key] = processresult["result_diff"] + results_diff_plus[key] = processresult["result_diff_plus"] + results_diff_min[key] = processresult["result_diff_min"] + results_relevant_intersection[key] = processresult[ + "result_relevant_intersection" + ] + results_relevant_diff[key] = processresult["result_relevant_diff"] + + return ( + results, + results_diff, + results_diff_plus, + results_diff_min, + results_relevant_intersection, + results_relevant_diff, + ) diff --git a/tests/test_aligner.py b/tests/test_aligner.py index 3f93a1a..adf8986 100644 --- a/tests/test_aligner.py +++ b/tests/test_aligner.py @@ -11,7 +11,7 @@ from brdr.enums import GRBType from brdr.enums import OpenbaarDomeinStrategy from brdr.geometry_utils import buffer_neg_pos -from brdr.geometry_utils import grid_bounds +from brdr.geometry_utils import _grid_bounds from brdr.grb import GRBActualLoader from brdr.loader import GeoJsonLoader from brdr.typings import FeatureCollection @@ -32,7 +32,7 @@ def test_buffer_neg_pos(self): def test_grid_bounds_1(self): # Test _grid_bounds function delta = 1.0 - grid_partitions = grid_bounds(self.sample_geom, delta) + grid_partitions = _grid_bounds(self.sample_geom, delta) # Check if the result is a list of Polygon objects self.assertIsInstance(grid_partitions, list) @@ -44,7 +44,7 @@ def test_grid_bounds_1(self): def test_grid_bounds_2(self): # Test _grid_bounds function delta = 2.0 - grid_partitions = grid_bounds(self.sample_geom, delta) + grid_partitions = _grid_bounds(self.sample_geom, delta) # Check if the result is a list of Polygon objects self.assertIsInstance(grid_partitions, list) diff --git a/tests/test_geometry_utils.py b/tests/test_geometry_utils.py index 298baf9..4aef4f3 100644 --- a/tests/test_geometry_utils.py +++ b/tests/test_geometry_utils.py @@ -8,7 +8,7 @@ from brdr.geometry_utils import buffer_neg_pos from brdr.geometry_utils import buffer_pos from brdr.geometry_utils import get_partitions -from brdr.geometry_utils import grid_bounds +from brdr.geometry_utils import _grid_bounds from brdr.geometry_utils import safe_difference from brdr.geometry_utils import safe_intersection from brdr.geometry_utils import safe_symmetric_difference @@ -3952,17 +3952,17 @@ class TestGridBounds(unittest.TestCase): def test_grid_bounds_empty_polygon(self): """Tests grid_bounds with an empty polygon.""" polygon = Polygon() - result = grid_bounds(polygon, 1.0) + result = _grid_bounds(polygon, 1.0) self.assertEqual(result, polygon) def test_grid_bounds_small_grid(self): """Tests grid_bounds with a small area not requiring grid division.""" polygon = Polygon([(0, 0), (1, 0), (1, 1), (0, 1)]) - result = grid_bounds(polygon, 2.0) + result = _grid_bounds(polygon, 2.0) self.assertEqual(len(result), 1) def test_grid_bounds_grid_division(self): """Tests grid_bounds with an area requiring grid""" polygon = Polygon([(0, 0), (0, 5), (5, 5), (5, 0), (0, 0)]) - result = grid_bounds(polygon, 1.0) + result = _grid_bounds(polygon, 1.0) self.assertEqual(len(result), 25) diff --git a/tests/test_utils.py b/tests/test_utils.py index c068270..20a0cfd 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -8,7 +8,7 @@ from brdr.oe import get_oe_dict_by_ids, OEType from brdr.typings import ProcessResult from brdr.utils import diffs_from_dict_series -from brdr.utils import filter_dict_by_key +#from brdr.utils import filter_dict_by_key from brdr.utils import get_breakpoints_zerostreak from brdr.utils import get_collection from brdr.utils import merge_process_results @@ -94,39 +94,34 @@ def test_get_oe_dict_by_ids(self): dict_thematic = get_oe_dict_by_ids([aanduid_id]) self.assertFalse(is_empty(dict_thematic[str(aanduid_id)])) - def test_get_oe_dict_by_ids_erfgoedobject(self): - eo_id = 206363 - dict_thematic = get_oe_dict_by_ids([eo_id], oetype=OEType.EO) - self.assertFalse(is_empty(dict_thematic[str(eo_id)])) - - def test_get_oe_dict_by_ids_empty(self): - dict_thematic = get_oe_dict_by_ids([]) - self.assertEqual(dict_thematic, {}) - - def test_get_oe_dict_by_ids_not_existing(self): - aanduid_id = -1 - dict_thematic = get_oe_dict_by_ids([aanduid_id]) - self.assertEqual(dict_thematic, {}) - - # def test_get_oe_geojson_by_bbox(self): - # bbox = "172000,172000,174000,174000" - # collection = get_oe_geojson_by_bbox(bbox) - # self.assertEqual(collection["type"], "FeatureCollection") - - def test_filter_dict_by_key_empty_dict(self): - data = {} - result = filter_dict_by_key(data, "key") - self.assertEqual(result, {}) - - def test_filter_dict_by_key_single_match(self): - data = {"key1": "value1", "key2": "value2"} - result = filter_dict_by_key(data, "key1") - self.assertEqual(result, {"key1": "value1"}) - - def test_filter_dict_by_key_no_match(self): - data = {"key1": "value1", "key2": "value2"} - result = filter_dict_by_key(data, "key3") - self.assertEqual(result, {}) + # def test_get_oe_dict_by_ids_erfgoedobject(self): + # eo_id = 206363 + # dict_thematic = get_oe_dict_by_ids([eo_id], oetype=OEType.EO) + # self.assertFalse(is_empty(dict_thematic[str(eo_id)])) + # + # def test_get_oe_dict_by_ids_empty(self): + # dict_thematic = get_oe_dict_by_ids([]) + # self.assertEqual(dict_thematic, {}) + # + # def test_get_oe_dict_by_ids_not_existing(self): + # aanduid_id = -1 + # dict_thematic = get_oe_dict_by_ids([aanduid_id]) + # self.assertEqual(dict_thematic, {}) + + # def test_filter_dict_by_key_empty_dict(self): + # data = {} + # result = filter_dict_by_key(data, "key") + # self.assertEqual(result, {}) + # + # def test_filter_dict_by_key_single_match(self): + # data = {"key1": "value1", "key2": "value2"} + # result = filter_dict_by_key(data, "key1") + # self.assertEqual(result, {"key1": "value1"}) + # + # def test_filter_dict_by_key_no_match(self): + # data = {"key1": "value1", "key2": "value2"} + # result = filter_dict_by_key(data, "key3") + # self.assertEqual(result, {}) def test_diffs_from_dict_series_complete(self): """Tests diffs_from_dict_series with complete data.""" From 5784ed19c3f5694ed107f2e0f2fc912db179d9cf Mon Sep 17 00:00:00 2001 From: dieuska Date: Tue, 17 Sep 2024 19:40:18 +0200 Subject: [PATCH 13/35] #57 refactoring of the dict_series resulting in multiple changes --- brdr/aligner.py | 107 ++++++++----------- brdr/grb.py | 69 ++++++------ brdr/utils.py | 91 ++++++++-------- examples/__init__.py | 30 +++--- examples/example_131635.py | 7 +- examples/example_aligner.py | 6 +- examples/example_ao.py | 13 +-- examples/example_combined_borders_adp_gbg.py | 7 +- examples/example_eo.py | 7 +- examples/example_evaluate.py | 7 +- examples/example_evaluate_ao.py | 5 +- examples/example_evaluate_multi_to_single.py | 9 +- examples/example_multi_to_single.py | 12 +-- examples/example_multipolygon.py | 2 +- examples/example_parcel_vs_building.py | 9 +- examples/example_readme.py | 9 +- examples/example_refactor_dict_series.py | 28 +++++ examples/example_speedtest.py | 15 ++- examples/examples_predictor.py | 2 - tests/test_aligner.py | 88 +++++++-------- tests/test_examples.py | 27 ++--- tests/test_grb.py | 13 +-- tests/test_integration.py | 24 ++--- tests/test_utils.py | 11 +- 24 files changed, 300 insertions(+), 298 deletions(-) create mode 100644 examples/example_refactor_dict_series.py diff --git a/brdr/aligner.py b/brdr/aligner.py index 23af03e..eff8515 100644 --- a/brdr/aligner.py +++ b/brdr/aligner.py @@ -130,9 +130,9 @@ def __init__( # results # output-dictionaries (when processing dict_thematic) - self.dict_result: dict[str, ProcessResult] = {} + self.dict_result: dict[str, dict[float, ProcessResult]]= {} # dictionary with the 'predicted' results, grouped by relevant distance - self.dict_predicted = dict[float, dict[str, ProcessResult]] + self.dict_predicted : dict[str, dict[float, ProcessResult]] ={} # Coordinate reference system # thematic geometries and reference geometries are assumed to be in the same CRS @@ -152,7 +152,7 @@ def buffer_distance(self): def process_geometry( self, input_geometry: BaseGeometry, - relevant_distance=1, + relevant_distance:float=1, od_strategy=OpenbaarDomeinStrategy.SNAP_SINGLE_SIDE, threshold_overlap_percentage=50, ) -> ProcessResult: @@ -257,7 +257,7 @@ def process_dict_thematic( relevant_distance=1, od_strategy=OpenbaarDomeinStrategy.SNAP_SINGLE_SIDE, threshold_overlap_percentage=50, - ) -> dict[str, ProcessResult]: + ) -> dict[str, dict[float, ProcessResult]]: """ Aligns a thematic dictionary of geometries to the reference layer based on specified parameters. - method to align a thematic dictionary to the reference @@ -282,24 +282,9 @@ def process_dict_thematic( - relevant_diff: relevant differences. """ - dict_result = {} - dict_thematic = self.dict_thematic - if self.multi_as_single_modus: - dict_thematic = multipolygons_to_singles(dict_thematic) - for key in dict_thematic: - self.logger.feedback_debug("thematic id to process: " + str(key)) - try: - dict_result[key] = self.process_geometry( - dict_thematic[key], - relevant_distance, - od_strategy, - threshold_overlap_percentage, - ) - except ValueError as e: - self.logger.feedback_warning(str(e)) - if self.multi_as_single_modus: - dict_result = merge_process_results(dict_result) - self.dict_result = dict_result + self.dict_result = self.process_series(relevant_distances=[relevant_distance], + od_strategy=od_strategy, + threshold_overlap_percentage=threshold_overlap_percentage) return self.dict_result def predictor( @@ -378,9 +363,6 @@ def predictor( threshold_overlap_percentage=threshold_overlap_percentage, ) dict_thematic = self.dict_thematic - # if self.multi_as_single_modus: - # dict_series = merge_dict_series(dict_series) - # dict_thematic = merge_dict(self.dict_thematic) diffs_dict = diffs_from_dict_series(dict_series, dict_thematic) @@ -397,12 +379,12 @@ def predictor( ) logging.debug(str(theme_id)) if len(zero_streaks) == 0: - dict_predicted[relevant_distances[0]][theme_id] = dict_series[ + dict_predicted[theme_id][relevant_distances[0]] = dict_series[theme_id][ relevant_distances[0] - ][theme_id] + ] logging.info("No zero-streaks found for: " + str(theme_id)) for zs in zero_streaks: - dict_predicted[zs[0]][theme_id] = dict_series[zs[0]][theme_id] + dict_predicted[theme_id] [zs[0]]= dict_series[theme_id][zs[0]] self.dict_predicted = dict_predicted @@ -417,7 +399,7 @@ def process_series( relevant_distances: Iterable[float], od_strategy=OpenbaarDomeinStrategy.SNAP_SINGLE_SIDE, threshold_overlap_percentage=50, - ) -> dict[float, dict[str, ProcessResult]]: + ) -> dict[str, dict[float, ProcessResult]]: """ Calculates the resulting dictionaries for thematic data based on a series of relevant distances. @@ -431,13 +413,12 @@ def process_series( considering full overlap. Defaults to 50. Returns: - dict: A dictionary containing the resulting dictionaries for a series of - relevant distances: + dict: A dictionary, for every thematic ID a dictionary with the results for all distances { - 'relevant_distance_1': {theme_id_1: (ProcessResult), theme_id_2: + 'theme_id_1': {0: (ProcessResult), 0.1: (ProcessResult), ...}, - 'relevant_distance_2': {theme_id_1: (ProcessResult), theme_id_2: + 'theme_id_2': {0: (ProcessResult), 0.1: (ProcessResult), ...}, ... } @@ -446,18 +427,36 @@ def process_series( self.od_strategy = od_strategy self.threshold_overlap_percentage = threshold_overlap_percentage dict_series = {} - for s in relevant_distances: - self.logger.feedback_info( - "Processing series - relevant_distance (m):" - + str(s) - + " with ODStrategy " - + str(self.od_strategy) - ) - dict_series[s] = self.process_dict_thematic(s, od_strategy) + dict_thematic = self.dict_thematic + + if self.multi_as_single_modus: + dict_thematic = multipolygons_to_singles(dict_thematic) + + for key,geometry in dict_thematic.items(): + self.logger.feedback_info(f"thematic id {str(key)} processed with relevant distances (m) [{str(relevant_distances)}]") + dict_series[key] = {} + for relevant_distance in relevant_distances: + try: + processed_result = self.process_geometry( + geometry, + relevant_distance, + od_strategy, + threshold_overlap_percentage, + ) + except ValueError as e: + self.logger.feedback_warning(str(e)) + + dict_series[key][relevant_distance] = processed_result + + if self.multi_as_single_modus: + dict_series = merge_process_results(dict_series) + self.logger.feedback_info( "End of processing series: " + str(relevant_distances) ) - return dict_series + self.dict_result = dict_series + + return self.dict_result def get_formula(self, geometry: BaseGeometry, with_geom=False): """ @@ -568,14 +567,6 @@ def get_formula(self, geometry: BaseGeometry, with_geom=False): self.logger.feedback_debug(str(dict_formula)) return dict_formula - def get_results_as_dict(self): - """ - get a dict of the results - """ - # if self.multi_as_single_modus: - # return merge_process_results(self.dict_result) - return self.dict_result - def get_results_as_geojson(self, formula=False): """ convert the results to geojson feature collections @@ -585,15 +576,11 @@ def get_results_as_geojson(self, formula=False): in the output. Defaults to False. """ results_dict = self.dict_result - # if self.multi_as_single_modus: - # results_dict = merge_process_results(results_dict) - - return self.get_predictions_as_geojson( - formula, - {self.relevant_distance: results_dict}, + return self.get_series_as_geojson( + formula,self.dict_result, ) - def get_predictions_as_geojson(self, formula=False, series_dict=None): + def get_series_as_geojson(self, formula=False, series_dict=None): """ get a dictionary containing of the resulting geometries as geojson, based on the 'predicted' relevant distances. @@ -602,12 +589,12 @@ def get_predictions_as_geojson(self, formula=False, series_dict=None): series_dict = series_dict or self.dict_predicted prop_dictionary = defaultdict(dict) - for relevant_distance, results_dict in series_dict.items(): - for theme_id, process_results in results_dict.items(): + for theme_id, results_dict in series_dict.items(): + for relevant_distance, process_results in results_dict.items(): if formula: result = process_results["result"] formula = self.get_formula(result) - prop_dictionary[relevant_distance][theme_id] = { + prop_dictionary[theme_id][relevant_distance] = { "formula": json.dumps(formula) } diff --git a/brdr/grb.py b/brdr/grb.py index a827ceb..8ecf8da 100644 --- a/brdr/grb.py +++ b/brdr/grb.py @@ -27,10 +27,10 @@ from brdr.geometry_utils import get_bbox from brdr.loader import GeoJsonLoader, DictLoader from brdr.logger import Logger -from brdr.utils import dict_series_by_keys, get_series_geojson_dict from brdr.utils import geojson_to_dicts from brdr.utils import get_collection from brdr.utils import get_collection_by_partition +from brdr.utils import get_series_geojson_dict log = logging.getLogger(__name__) @@ -350,28 +350,26 @@ def evaluate( """ if dict_unchanged is None: dict_unchanged = {} - theme_ids = list(dict_series_by_keys(dict_series).keys()) + theme_ids = list(dict_series.keys()) dict_evaluated_result = {} prop_dictionary = {} # Fill the dictionary-structure with empty values - for dist in dict_series.keys(): - dict_evaluated_result[dist] = {} - prop_dictionary[dist] = {} - for theme_id in theme_ids: - prop_dictionary[dist][theme_id] = {} - for theme_id in dict_unchanged.keys(): - prop_dictionary[dist][theme_id] = {} - - dict_predicted_keys = dict_series_by_keys(dict_predicted) - - for theme_id, dist_dict in dict_predicted_keys.items(): + for theme_id in theme_ids: + dict_evaluated_result[theme_id] = {} + prop_dictionary[theme_id] = {} + for dist in dict_series[theme_id].keys(): + prop_dictionary[theme_id][dist] = {} + for theme_id in dict_unchanged.keys(): + prop_dictionary[theme_id] = {} + + for theme_id, dict_results in dict_predicted.items(): equality = False - for dist in sorted(dist_dict.keys()): + for dist in sorted(dict_results.keys()): if equality: break - geomresult = dist_dict[dist][theme_id]["result"] + geomresult = dict_results[dist]["result"] actual_formula = actual_aligner.get_formula(geomresult) - prop_dictionary[dist][theme_id]["formula"] = json.dumps(actual_formula) + prop_dictionary[theme_id][dist]["formula"] = json.dumps(actual_formula) base_formula = None if theme_id in thematic_dict_formula: base_formula = thematic_dict_formula[theme_id] @@ -382,39 +380,40 @@ def evaluate( threshold_percentage, ) if equality: - dict_evaluated_result[dist][theme_id] = dict_predicted[dist][theme_id] - prop_dictionary[dist][theme_id]["evaluation"] = prop + dict_evaluated_result[theme_id][dist] = dict_predicted[theme_id][dist] + prop_dictionary[theme_id][dist]["evaluation"] = prop break - evaluated_theme_ids = list(dict_series_by_keys(dict_evaluated_result).keys()) + evaluated_theme_ids = [theme_id for theme_id, value in dict_evaluated_result.items() if value != {}] + # fill where no equality is found/ The biggest predicted distance is returned as # proposal for theme_id in theme_ids: if theme_id not in evaluated_theme_ids: - if len(dict_predicted_keys[theme_id].keys()) == 0: - result = dict_series[0][theme_id] - dict_evaluated_result[0][theme_id] = result - prop_dictionary[0][theme_id]["formula"] = json.dumps( + if len(dict_predicted[theme_id].keys()) == 0: + result = dict_series[theme_id][0] + dict_evaluated_result[theme_id][0] = result + prop_dictionary[theme_id][0]["formula"] = json.dumps( actual_aligner.get_formula(result["result"]) ) - prop_dictionary[0][theme_id]["evaluation"] = Evaluation.NO_PREDICTION_5 + prop_dictionary[theme_id][0]["evaluation"] = Evaluation.NO_PREDICTION_5 continue # Add all predicted features so they can be manually checked - for dist in dict_predicted_keys[theme_id].keys(): - predicted_resultset = dict_predicted[dist][theme_id] - dict_evaluated_result[dist][theme_id] = predicted_resultset - prop_dictionary[dist][theme_id]["formula"] = json.dumps( + for dist in dict_predicted[theme_id].keys(): + predicted_resultset = dict_predicted[theme_id][dist] + dict_evaluated_result[theme_id][dist] = predicted_resultset + prop_dictionary[theme_id][dist]["formula"] = json.dumps( actual_aligner.get_formula(predicted_resultset["result"]) ) - prop_dictionary[dist][theme_id]["evaluation"] = Evaluation.TO_CHECK_4 + prop_dictionary[theme_id][dist]["evaluation"] = Evaluation.TO_CHECK_4 for theme_id, geom in dict_unchanged.items(): - result = {"result": geom} - dict_evaluated_result[0][theme_id] = result - prop_dictionary[0][theme_id]["evaluation"] = Evaluation.NO_CHANGE_6 - prop_dictionary[0][theme_id]["formula"] = json.dumps( - actual_aligner.get_formula(result["result"]) - ) + prop_dictionary[theme_id] = {0: + {"result": geom, + "evaluation": Evaluation.NO_CHANGE_6, + "formula": json.dumps(actual_aligner.get_formula(geom)) + } + } return dict_evaluated_result, prop_dictionary diff --git a/brdr/utils.py b/brdr/utils.py index baeedf2..d03a007 100644 --- a/brdr/utils.py +++ b/brdr/utils.py @@ -21,10 +21,10 @@ def get_series_geojson_dict( - series_dict: dict[float, dict[str, ProcessResult]], + series_dict: dict[str, dict[float, ProcessResult]], crs: str, id_field: str, - series_prop_dict: dict[float, dict[str, any]] = None, + series_prop_dict: dict[str, dict[float, any]] = None, geom_attributes=True, ): """ @@ -32,10 +32,10 @@ def get_series_geojson_dict( """ features_list_dict = {} - for relative_distance, results_dict in series_dict.items(): - prop_dict = dict(series_prop_dict or {}).get(relative_distance, {}) - for theme_id, process_result in results_dict.items(): - properties = prop_dict.get(theme_id, {}) + for theme_id, results_dict in series_dict.items(): + prop_dict = dict(series_prop_dict or {}).get(theme_id, {}) + for relative_distance, process_result in results_dict.items(): + properties = prop_dict.get(relative_distance, {}) properties[id_field] = theme_id properties["relevant_distance"] = relative_distance @@ -322,7 +322,7 @@ def _numerical_derivative(x, y): def diffs_from_dict_series( - dict_series: dict[float, dict[str, ProcessResult]], + dict_series: dict[str, dict[float, ProcessResult]], dict_thematic: dict[str, BaseGeometry], diff_metric: DiffMetric = DiffMetric.CHANGES_AREA, ): @@ -375,16 +375,14 @@ def diffs_from_dict_series( KeyError: If a thematic element key is missing from the results in `dict_series`. """ - thematic_ids = dict_thematic.keys() - - diffs = {thematic_id: {} for thematic_id in thematic_ids} - + diffs = {} # all the relevant distances used to calculate the series - for rel_dist, results_dict in dict_series.items(): + for thematic_id, results_dict in dict_series.items(): + diffs[thematic_id] = {} - for thematic_id in thematic_ids: - result = results_dict.get(thematic_id, {}).get("result") - result_diff = results_dict.get(thematic_id, {}).get("result_diff") + for rel_dist in results_dict: + result = results_dict.get(rel_dist, {}).get("result") + result_diff = results_dict.get(rel_dist, {}).get("result_diff") # result_diff_plus = results_dict.get(thematic_id, {})\ # .get("result_diff_plus") # result_diff_min = results_dict.get(thematic_id, {})\ @@ -533,9 +531,10 @@ def _add_bbox_to_url(url, crs=DEFAULT_CRS, bbox=None): def merge_process_results( - result_dict: dict[str, ProcessResult] -) -> dict[str, ProcessResult]: + result_dict: dict[str, dict[float, ProcessResult]] +) -> dict[str, dict[float, ProcessResult]]: """ + #TODO: function can be optimised. At the moment it is unioned element by element. Possible to collect the elements and union at the end Merges geometries in a dictionary from multiple themes into a single theme. Args: result_dict (dict): A dictionary where keys are theme IDs and values are @@ -547,39 +546,39 @@ def merge_process_results( """ grouped_results: dict[str, ProcessResult] = {} - for id_theme, process_result in result_dict.items(): + for id_theme, dict_results in result_dict.items(): id_theme_global = id_theme.split(MULTI_SINGLE_ID_SEPARATOR)[0] if id_theme_global not in grouped_results: - grouped_results[id_theme_global] = process_result + grouped_results[id_theme_global] = dict_results else: - for key in process_result: - geom: BaseGeometry = process_result[key] # noqa - if geom.is_empty or geom is None: - continue - existing: BaseGeometry = grouped_results[id_theme_global][key] # noqa - grouped_results[id_theme_global][key] = unary_union( # noqa - [existing, geom] - ) - + for rel_dist,process_result in dict_results.items(): + for key in process_result: + geom: BaseGeometry = process_result[key] # noqa + if geom.is_empty or geom is None: + continue + existing: BaseGeometry = grouped_results[id_theme_global][rel_dist][key] # noqa + grouped_results[id_theme_global][rel_dist][key] = unary_union( # noqa + [existing, geom] + ) return grouped_results -def dict_series_by_keys(dict_series): - """ - Transforms a dict_series into a dictionary with theme_id as keys, and a dictionary - with all predicted distances and their resulting geometry as a value. - Args: - dict_series: a dictionary result of the 'series/predictor' - - Returns: dictionary with theme_id as keys, and a dictionary with all serial - distances and their resulting geometry as a value. - - """ - dict_series_keys = {} - for dist, res in dict_series.items(): - for key in res.keys(): - if key not in dict_series_keys.keys(): - dict_series_keys[key] = {} - dict_series_keys[key][dist] = {key: res[key]} - return dict_series_keys +# def dict_series_by_keys(dict_series): +# """ +# Transforms a dict_series into a dictionary with theme_id as keys, and a dictionary +# with all predicted distances and their resulting geometry as a value. +# Args: +# dict_series: a dictionary result of the 'series/predictor' +# +# Returns: dictionary with theme_id as keys, and a dictionary with all serial +# distances and their resulting geometry as a value. +# +# """ +# dict_series_keys = {} +# for dist, res in dict_series.items(): +# for key in res.keys(): +# if key not in dict_series_keys.keys(): +# dict_series_keys[key] = {} +# dict_series_keys[key][dist] = {key: res[key]} +# return dict_series_keys diff --git a/examples/__init__.py b/examples/__init__.py index 1c7b4a8..ef813f6 100644 --- a/examples/__init__.py +++ b/examples/__init__.py @@ -7,7 +7,7 @@ from brdr.typings import ProcessResult -def _make_map(ax, result_dict, thematic_dict, reference_dict): +def _make_map(ax, processresult, thematic_dict, reference_dict): """ Fills an ax with a map: * reference_dict @@ -18,7 +18,7 @@ def _make_map(ax, result_dict, thematic_dict, reference_dict): , so it can be used in matplotlib """ try: - dicts = _processresult_to_dicts(result_dict) + dicts = _processresult_to_dicts(processresult) results = dicts[0] results_diff_pos = dicts[1] results_diff_neg = dicts[2] @@ -89,13 +89,19 @@ def _make_map(ax, result_dict, thematic_dict, reference_dict): def show_map( - dict_results_by_distance: dict[float, dict[str, ProcessResult]], + dict_results: dict[str, dict[float, ProcessResult]], dict_thematic, dict_reference, ): """ Show results on a map """ + dict_results_by_distance = {} + for theme_id, dist_result in dict_results.items(): + for rel_dist, processresults in dist_result.items(): + dict_results_by_distance[rel_dist]={} + dict_results_by_distance[rel_dist][theme_id] = processresults + len_series = len(dict_results_by_distance.keys()) i = 0 # Plot data in subplots @@ -116,18 +122,18 @@ def show_map( plt.show() -def print_formula(dict_results_by_distance, aligner): - for rel_dist in dict_results_by_distance: - for key in dict_results_by_distance[rel_dist]: +def print_formula(dict_results, aligner): + for theme_id, dist_results in dict_results.items(): + for rel_dist,processresults in dist_results.items(): print( "--------Formula for ID " - + str(key) + + str(theme_id) + " with relevant distance " + str(rel_dist) + "--------------" ) print( - aligner.get_formula(dict_results_by_distance[rel_dist][key]["result"]) + aligner.get_formula(processresults["result"]) ) return @@ -150,12 +156,12 @@ def plot_series( plt.show() return -def _processresult_to_dicts(dict_processresult): +def _processresult_to_dicts(processresult): """ Transforms a dictionary with all ProcessResults to individual dictionaries of the results Args: - dict_processresult: + processresult: Returns: @@ -166,8 +172,8 @@ def _processresult_to_dicts(dict_processresult): results_diff_min = {} results_relevant_intersection = {} results_relevant_diff = {} - for key in dict_processresult: - processresult = dict_processresult[key] + for key in processresult: + processresult = processresult[key] results[key] = processresult["result"] results_diff[key] = processresult["result_diff"] results_diff_plus[key] = processresult["result_diff_plus"] diff --git a/examples/example_131635.py b/examples/example_131635.py index 9ca8433..cb87bba 100644 --- a/examples/example_131635.py +++ b/examples/example_131635.py @@ -6,6 +6,7 @@ from examples import show_map if __name__ == "__main__": + #TODO # EXAMPLE for a thematic Polygon (aanduid_id 131635) # Initiate brdr @@ -18,9 +19,9 @@ # RESULTS rel_dist = 2 - dict_results_by_distance = {rel_dist: aligner.process_dict_thematic(rel_dist, 4)} + dict_results = aligner.process_dict_thematic(rel_dist, 4) # put resulting tuple in a dictionary aligner.export_results("output/", formula=True) - show_map(dict_results_by_distance, aligner.dict_thematic, aligner.dict_reference) - print_formula(dict_results_by_distance, aligner) + show_map(dict_results, aligner.dict_thematic, aligner.dict_reference) + print_formula(dict_results, aligner) diff --git a/examples/example_aligner.py b/examples/example_aligner.py index 8095f83..053c922 100644 --- a/examples/example_aligner.py +++ b/examples/example_aligner.py @@ -50,11 +50,9 @@ # Example how to use the Aligner rel_dist = 6 - dict_results_by_distance = { - aligner.relevant_distance: aligner.process_dict_thematic( + dict_results= aligner.process_dict_thematic( relevant_distance=rel_dist, od_strategy=OpenbaarDomeinStrategy.SNAP_FULL_AREA_ALL_SIDE, ) - } aligner.export_results("output/") - show_map(dict_results_by_distance, aligner.dict_thematic, aligner.dict_reference) + show_map(dict_results, aligner.dict_thematic, aligner.dict_reference) diff --git a/examples/example_ao.py b/examples/example_ao.py index 5041fad..5d60bc0 100644 --- a/examples/example_ao.py +++ b/examples/example_ao.py @@ -4,7 +4,6 @@ from brdr.enums import GRBType from brdr.grb import GRBActualLoader from brdr.oe import OnroerendErfgoedLoader -from brdr.utils import dict_series_by_keys from examples import show_map, plot_series if __name__ == "__main__": @@ -14,7 +13,6 @@ # Initiate brdr aligner = Aligner() # Load thematic data & reference data - aanduidingsobjecten = range(1, 10) aanduidingsobjecten =[117798,116800,117881] loader = OnroerendErfgoedLoader(aanduidingsobjecten) @@ -22,25 +20,16 @@ loader = GRBActualLoader(grb_type=GRBType.ADP, partition=1000, aligner=aligner) aligner.load_reference_data(loader) - # RESULTS - # rel_dist = 0.2 - # dict_results_by_distance = {} - # #put resulting tuple in a dictionary - # dict_results_by_distance[rel_dist] = aligner.process_dict_thematic(rel_dist,2) - # aligner.export_results("output/") - # show_map(dict_results_by_distance, aligner.dict_thematic, aligner.dict_reference) - series = np.arange(0, 500, 20, dtype=int) / 100 # predict which relevant distances are interesting to propose as resulting geometry dict_series, dict_predicted, diffs = aligner.predictor( relevant_distances=series, od_strategy=2, threshold_overlap_percentage=50 ) - dict_predicted = dict_series_by_keys(dict_predicted) for key in dict_predicted.keys(): diff = {key: diffs[key]} plot_series(series, diff) show_map( - dict_predicted[key], + dict_predicted, {key: aligner.dict_thematic[key]}, aligner.dict_reference, ) diff --git a/examples/example_combined_borders_adp_gbg.py b/examples/example_combined_borders_adp_gbg.py index 27f0f64..f9549f8 100644 --- a/examples/example_combined_borders_adp_gbg.py +++ b/examples/example_combined_borders_adp_gbg.py @@ -57,8 +57,7 @@ aligner.load_reference_data_dict(dict_ref) rel_dist = 2 - dict_results_by_distance = {} - dict_results_by_distance[rel_dist] = aligner.process_dict_thematic(rel_dist, 4) + dict_results = aligner.process_dict_thematic(rel_dist, 4) aligner.export_results("output/") - show_map(dict_results_by_distance, aligner.dict_thematic, aligner.dict_reference) - print_formula(dict_results_by_distance, aligner) + show_map(dict_results, aligner.dict_thematic, aligner.dict_reference) + print_formula(dict_results, aligner) diff --git a/examples/example_eo.py b/examples/example_eo.py index fbcadb0..3e38d3d 100644 --- a/examples/example_eo.py +++ b/examples/example_eo.py @@ -4,7 +4,7 @@ from brdr.enums import GRBType from brdr.grb import GRBActualLoader from brdr.oe import OnroerendErfgoedLoader, OEType -from brdr.utils import write_geojson, dict_series_by_keys +from brdr.utils import write_geojson from examples import show_map, plot_series if __name__ == "__main__": @@ -36,16 +36,15 @@ dict_series, dict_predicted, diffs = aligner.predictor( relevant_distances=series, od_strategy=2, threshold_overlap_percentage=50 ) - fcs = aligner.get_predictions_as_geojson(series_dict=dict_predicted) + fcs = aligner.get_series_as_geojson(series_dict=dict_predicted) write_geojson("output/predicted.geojson", fcs["result"]) write_geojson("output/predicted_diff.geojson", fcs["result_diff"]) - dict_predicted = dict_series_by_keys(dict_predicted) for key in dict_predicted.keys(): diff = {key: diffs[key]} plot_series(series, diff) show_map( - dict_predicted[key], + dict_predicted, {key: aligner.dict_thematic[key]}, aligner.dict_reference, ) diff --git a/examples/example_evaluate.py b/examples/example_evaluate.py index 46aa494..b5d5925 100644 --- a/examples/example_evaluate.py +++ b/examples/example_evaluate.py @@ -99,11 +99,12 @@ def fid_to_geojson(geojson): base_aligner.load_reference_data( GRBFiscalParcelLoader(year=base_year, aligner=base_aligner) ) -base_process_result = base_aligner.process_dict_thematic(relevant_distance=2) +relevant_distance=2 +base_process_result = base_aligner.process_dict_thematic(relevant_distance=relevant_distance) thematic_dict_formula = {} thematic_dict_result = {} for key in base_process_result: - thematic_dict_result[key] = base_process_result[key]["result"] + thematic_dict_result[key] = base_process_result[key][relevant_distance]["result"] thematic_dict_formula[key] = base_aligner.get_formula(thematic_dict_result[key]) print(key + ": " + thematic_dict_result[key].wkt) print(key + ": " + str(thematic_dict_formula[key])) @@ -149,7 +150,7 @@ def fid_to_geojson(geojson): series_prop_dict=prop_dictionary, ) print(fc["result"]) -fcs = actual_aligner.get_predictions_as_geojson(formula=True) +fcs = actual_aligner.get_series_as_geojson(formula=True) print(fcs["result"]) for feature in fc["result"]["features"]: diff --git a/examples/example_evaluate_ao.py b/examples/example_evaluate_ao.py index 3b6c88c..01b6cf0 100644 --- a/examples/example_evaluate_ao.py +++ b/examples/example_evaluate_ao.py @@ -23,11 +23,12 @@ base_aligner.load_reference_data( GRBFiscalParcelLoader(year=base_year, aligner=base_aligner) ) -base_process_result = base_aligner.process_dict_thematic(relevant_distance=3) +relevant_distance=3 +base_process_result = base_aligner.process_dict_thematic(relevant_distance=relevant_distance) thematic_dict_formula = {} thematic_dict_result = {} for key in base_process_result: - thematic_dict_result[key] = base_process_result[key]["result"] + thematic_dict_result[key] = base_process_result[key][relevant_distance]["result"] thematic_dict_formula[key] = base_aligner.get_formula(thematic_dict_result[key]) base_aligner_result = Aligner() base_aligner_result.load_thematic_data(DictLoader(thematic_dict_result)) diff --git a/examples/example_evaluate_multi_to_single.py b/examples/example_evaluate_multi_to_single.py index 593da3c..3f0efc8 100644 --- a/examples/example_evaluate_multi_to_single.py +++ b/examples/example_evaluate_multi_to_single.py @@ -11,7 +11,7 @@ from brdr.loader import DictLoader from brdr.oe import OnroerendErfgoedLoader from brdr.utils import get_series_geojson_dict -from brdr.utils import merge_process_results +#from brdr.utils import merge_process_results multi_as_single_modus = False @@ -25,15 +25,16 @@ base_aligner.load_reference_data( GRBFiscalParcelLoader(year=base_year, aligner=base_aligner) ) -base_process_result = base_aligner.process_dict_thematic(relevant_distance=2) -base_process_result = merge_process_results(base_process_result) +relevant_distance=2 +base_process_result = base_aligner.process_dict_thematic(relevant_distance=relevant_distance) +#base_process_result = merge_process_results(base_process_result) thematic_dict_formula = {} thematic_dict_result = {} # Create a dictionary with resulting geometries (aligned on Adpf2022) and a dictionary # with the corresponding formula for key in base_process_result: - thematic_dict_result[key] = base_process_result[key]["result"] + thematic_dict_result[key] = base_process_result[key][relevant_distance]["result"] thematic_dict_formula[key] = base_aligner.get_formula(thematic_dict_result[key]) # Determine all features that are possibly changed during timespan diff --git a/examples/example_multi_to_single.py b/examples/example_multi_to_single.py index d12fea3..cb67efb 100644 --- a/examples/example_multi_to_single.py +++ b/examples/example_multi_to_single.py @@ -20,11 +20,11 @@ ) rel_dist = 20 -dict_results_by_distance = {rel_dist: aligner.process_dict_thematic(rel_dist, 4)} +dict_results = aligner.process_dict_thematic(rel_dist, 4) aligner.export_results("output/") -show_map(dict_results_by_distance, aligner.dict_thematic, aligner.dict_reference) +show_map(dict_results, aligner.dict_thematic, aligner.dict_reference) -print_formula(dict_results_by_distance, aligner) +print_formula(dict_results, aligner) # WITH MULTI_TO_SINGLE @@ -40,8 +40,8 @@ ) rel_dist = 20 -dict_results_by_distance = {rel_dist: aligner.process_dict_thematic(rel_dist, 4)} +dict_results = aligner.process_dict_thematic(rel_dist, 4) aligner.export_results("output/") -show_map(dict_results_by_distance, aligner.dict_thematic, aligner.dict_reference) +show_map(dict_results, aligner.dict_thematic, aligner.dict_reference) -print_formula(dict_results_by_distance, aligner) +print_formula(dict_results, aligner) diff --git a/examples/example_multipolygon.py b/examples/example_multipolygon.py index 433a28b..75522ad 100644 --- a/examples/example_multipolygon.py +++ b/examples/example_multipolygon.py @@ -26,7 +26,7 @@ ) dict_series, dict_predicted, diffs = aligner.predictor() -fcs = aligner.get_predictions_as_geojson(series_dict=dict_predicted, formula=True) +fcs = aligner.get_series_as_geojson(series_dict=dict_predicted, formula=True) aligner.export_results("output/") write_geojson("output/predicted.geojson", fcs["result"]) write_geojson("output/predicted_diff.geojson", fcs["result_diff"]) diff --git a/examples/example_parcel_vs_building.py b/examples/example_parcel_vs_building.py index 37e98c8..7632478 100644 --- a/examples/example_parcel_vs_building.py +++ b/examples/example_parcel_vs_building.py @@ -2,6 +2,7 @@ from brdr.aligner import Aligner from brdr.enums import GRBType +from brdr.grb import GRBActualLoader from brdr.utils import diffs_from_dict_series from examples import plot_series @@ -14,8 +15,8 @@ aligner_x.load_thematic_data_file( "../tests/testdata/test_parcel_vs_building.geojson", "theme_id" ) - aligner_x.load_reference_data_grb_actual( - grb_type=GRBType.ADP, partition=1000 + aligner_x.load_reference_data(GRBActualLoader + (grb_type=GRBType.ADP, partition=1000,aligner=aligner_x) ) # gebruik de actuele adp-percelen adp= administratieve percelen aligner_y = Aligner() @@ -23,8 +24,8 @@ aligner_y.load_thematic_data_file( "../tests/testdata/test_parcel_vs_building.geojson", "theme_id" ) - aligner_y.load_reference_data_grb_actual( - grb_type=GRBType.GBG, partition=1000 + aligner_y.load_reference_data(GRBActualLoader + (grb_type=GRBType.GBG, partition=1000,aligner=aligner_y) ) # gebruik de actuele adp-percelen adp= administratieve percelen # Example how to use a series (for histogram) diff --git a/examples/example_readme.py b/examples/example_readme.py index d5a0194..cf1d19f 100644 --- a/examples/example_readme.py +++ b/examples/example_readme.py @@ -19,8 +19,9 @@ loader = DictLoader(reference_dict) aligner.load_reference_data(loader) # EXECUTE THE ALIGNMENT -process_result = aligner.process_dict_thematic(relevant_distance=1) +relevant_distance=1 +process_result = aligner.process_dict_thematic(relevant_distance=relevant_distance) # PRINT RESULTS IN WKT -print("result: " + process_result["theme_id_1"]["result"].wkt) -print("added area: " + process_result["theme_id_1"]["result_diff_plus"].wkt) -print("removed area: " + process_result["theme_id_1"]["result_diff_min"].wkt) +print("result: " + process_result["theme_id_1"][relevant_distance]["result"].wkt) +print("added area: " + process_result["theme_id_1"][relevant_distance]["result_diff_plus"].wkt) +print("removed area: " + process_result["theme_id_1"][relevant_distance]["result_diff_min"].wkt) diff --git a/examples/example_refactor_dict_series.py b/examples/example_refactor_dict_series.py new file mode 100644 index 0000000..c40182e --- /dev/null +++ b/examples/example_refactor_dict_series.py @@ -0,0 +1,28 @@ + +from brdr.aligner import Aligner +from brdr.enums import GRBType +from brdr.grb import GRBActualLoader +from brdr.oe import OnroerendErfgoedLoader + + +# EXAMPLE to test the algorithm for erfgoedobject with relevant distance 0.2m and +# od_strategy SNAP_ALL_SIDE + +# Initiate brdr +aligner = Aligner() +# Load thematic data & reference data +aanduidingsobjecten =[117798,116800,117881] + +loader = OnroerendErfgoedLoader(aanduidingsobjecten) +aligner.load_thematic_data(loader) +loader = GRBActualLoader(grb_type=GRBType.ADP, partition=1000, aligner=aligner) +aligner.load_reference_data(loader) + +test = aligner.process_dict_thematic() +test = aligner.process_series([1,2,3]) +test = aligner.predictor() +fcs = aligner.get_series_as_geojson(formula=True) +print (test) +print (fcs) +print (fcs["result"]) + diff --git a/examples/example_speedtest.py b/examples/example_speedtest.py index 936ddd0..cdaf536 100644 --- a/examples/example_speedtest.py +++ b/examples/example_speedtest.py @@ -5,6 +5,7 @@ # Initiate brdr aligner = Aligner(relevant_distance=2) +aligner.multi_as_single_modus=True # Load local thematic data and reference data # loader = GeoJsonFileLoader( # "../tests/testdata/theme.geojson", "theme_identifier" @@ -17,12 +18,12 @@ aligner.load_reference_data(loader) times=[] -for iter in range(1, 11): +for iter in range(1, 3): starttime= datetime.now() # Example how to use the Aligner aligner.predictor() - fcs = aligner.get_predictions_as_geojson(formula=True) + fcs = aligner.get_series_as_geojson(formula=True) endtime=datetime.now() seconds = (endtime-starttime).total_seconds() times.append(seconds) @@ -41,4 +42,12 @@ # Max: 52.108296 # Mean: 27.9915248 # Median: 24.193207 -# Stdv: 11.28891821173264 \ No newline at end of file +# Stdv: 11.28891821173264 + +# #AFTER refactoring +# duration: [21.313991, 16.558168, 16.590126, 18.111118, 16.872433, 17.928071, 18.32295, 17.87116, 19.516652, 16.729241] +# Min: 16.558168 +# Max: 21.313991 +# Mean: 17.981391 +# Median: 17.8996155 +# Stdv: 1.504459449440969 \ No newline at end of file diff --git a/examples/examples_predictor.py b/examples/examples_predictor.py index 362237f..4ee046f 100644 --- a/examples/examples_predictor.py +++ b/examples/examples_predictor.py @@ -4,7 +4,6 @@ from brdr.enums import GRBType from brdr.grb import GRBActualLoader from brdr.loader import GeoJsonFileLoader -from brdr.utils import dict_series_by_keys from examples import show_map # Press the green button in the gutter to run the script. @@ -29,7 +28,6 @@ dict_series, dict_predicted, diffs = aligner.predictor( relevant_distances=series, od_strategy=4, threshold_overlap_percentage=50 ) - dict_predicted = dict_series_by_keys(dict_predicted) for key in dict_predicted: show_map( dict_predicted[key], diff --git a/tests/test_aligner.py b/tests/test_aligner.py index adf8986..dd3cd64 100644 --- a/tests/test_aligner.py +++ b/tests/test_aligner.py @@ -1,4 +1,3 @@ -import os import unittest import numpy as np @@ -10,12 +9,11 @@ from brdr.aligner import Aligner from brdr.enums import GRBType from brdr.enums import OpenbaarDomeinStrategy -from brdr.geometry_utils import buffer_neg_pos from brdr.geometry_utils import _grid_bounds +from brdr.geometry_utils import buffer_neg_pos from brdr.grb import GRBActualLoader from brdr.loader import GeoJsonLoader from brdr.typings import FeatureCollection -from brdr.typings import ProcessResult class TestAligner(unittest.TestCase): @@ -51,22 +49,23 @@ def test_grid_bounds_2(self): for partition in grid_partitions: self.assertIsInstance(partition, Polygon) - def test_export_results(self): - aligner = Aligner() - aligner.load_thematic_data_dict( - {"theme_id_1": from_wkt("POLYGON ((0 0, 0 9, 5 10, 10 0, 0 0))")} - ) - aligner.load_reference_data_dict( - {"ref_id_1": from_wkt("POLYGON ((0 1, 0 10,8 10,10 1,0 1))")} - ) - aligner.process_dict_thematic() - path = "./tmp/" - aligner.export_results(path=path) - filenames = [f"{k}.geojson" for k in ProcessResult.__annotations__] - for file_name in os.listdir(path): - os.remove(path + file_name) - assert file_name in filenames - os.rmdir(path) + # def test_export_results(self): + # #TODO + # aligner = Aligner() + # aligner.load_thematic_data_dict( + # {"theme_id_1": from_wkt("POLYGON ((0 0, 0 9, 5 10, 10 0, 0 0))")} + # ) + # aligner.load_reference_data_dict( + # {"ref_id_1": from_wkt("POLYGON ((0 1, 0 10,8 10,10 1,0 1))")} + # ) + # aligner.process_dict_thematic() + # path = "./tmp/" + # aligner.export_results(path=path) + # filenames = [f"{k}.geojson" for k in ProcessResult.__annotations__] + # for file_name in os.listdir(path): + # os.remove(path + file_name) + # assert file_name in filenames + # os.rmdir(path) def test_get_formula_full_intersection(self): # Test when intersection equals reference geometry @@ -122,7 +121,7 @@ def test_predictor(self): dict_series, dict_predicted, dict_diffs = self.sample_aligner.predictor( relevant_distances=series, od_strategy=4, threshold_overlap_percentage=50 ) - self.assertEqual(len(dict_predicted[0]), len(thematic_dict)) + self.assertEqual(len(dict_predicted), len(thematic_dict)) def test_load_reference_data_grb_actual_adp(self): thematic_dict = { @@ -190,13 +189,14 @@ def test_all_od_strategies(self): self.sample_aligner.load_thematic_data_dict(thematic_dict) # LOAD REFERENCE DICTIONARY self.sample_aligner.load_reference_data_dict(reference_dict) + relevant_distance = 1 for od_strategy in OpenbaarDomeinStrategy: process_result = self.sample_aligner.process_dict_thematic( - relevant_distance=1, + relevant_distance=relevant_distance, od_strategy=od_strategy, threshold_overlap_percentage=50, ) - self.assertEqual(len(process_result["theme_id_1"]), 6) + self.assertEqual(len(process_result["theme_id_1"][relevant_distance]), 6) def test_process_interior_ring(self): thematic_dict = { @@ -223,6 +223,7 @@ def test_process_interior_ring(self): self.assertEqual(len(result_dict), len(thematic_dict)) def test_process_circle(self): + #TODO geometry = Point(0, 0).buffer(3) thematic_dict = {"key": geometry} self.sample_aligner.load_thematic_data_dict(thematic_dict) @@ -230,8 +231,9 @@ def test_process_circle(self): self.sample_aligner.load_reference_data(GRBActualLoader(aligner=self.sample_aligner, grb_type=GRBType.GBG, partition=1000) ) - results_dict = self.sample_aligner.process_dict_thematic() - self.assertEqual(geometry, results_dict["key"]["result"]) + relevant_distance=1 + results_dict = self.sample_aligner.process_dict_thematic(relevant_distance=relevant_distance) + self.assertEqual(geometry, results_dict["key"][relevant_distance]["result"]) def test__prepare_thematic_data(self): aligner = Aligner() @@ -270,22 +272,23 @@ def test__prepare_thematic_data(self): } thematic_loader = GeoJsonLoader(_input=geojson, id_property="theme_identifier") aligner.dict_thematic, properties, source = thematic_loader.load_data() + #TODO assert aligner.dict_thematic == {"4": shape(geojson["features"][0]["geometry"])} self.assertGreater(len(aligner.dict_thematic), 0) - def test_get_results_as_dict(self): - self.sample_aligner.load_thematic_data_dict( - { - "theme_id_1": from_wkt("POLYGON ((0 0, 0 9, 5 10, 10 0, 0 0))"), - "theme_id_2": from_wkt("POLYGON ((0 0, 0 9, 5 10, 10 0, 0 0))"), - } - ) - self.sample_aligner.load_reference_data_dict( - {"ref_id_1": from_wkt("POLYGON ((0 1, 0 10,8 10,10 1,0 1))")} - ) - self.sample_aligner.process_dict_thematic() - result = self.sample_aligner.get_results_as_dict() - assert len(result) == 2 + # def test_get_results_as_dict(self): + # self.sample_aligner.load_thematic_data_dict( + # { + # "theme_id_1": from_wkt("POLYGON ((0 0, 0 9, 5 10, 10 0, 0 0))"), + # "theme_id_2": from_wkt("POLYGON ((0 0, 0 9, 5 10, 10 0, 0 0))"), + # } + # ) + # self.sample_aligner.load_reference_data_dict( + # {"ref_id_1": from_wkt("POLYGON ((0 1, 0 10,8 10,10 1,0 1))")} + # ) + # self.sample_aligner.process_dict_thematic() + # result = self.sample_aligner.get_results_as_dict() + # assert len(result) == 2 def test_get_reference_as_geojson(self): self.sample_aligner.load_thematic_data_dict( @@ -301,11 +304,12 @@ def test_fully_aligned_input(self): aligned_shape = from_wkt("POLYGON ((0 0, 0 9, 5 10, 10 0, 0 0))") self.sample_aligner.load_thematic_data_dict({"theme_id_1": aligned_shape}) self.sample_aligner.load_reference_data_dict({"ref_id_1": aligned_shape}) - result = self.sample_aligner.process_dict_thematic() - assert result["theme_id_1"].get("result") == aligned_shape - assert result["theme_id_1"].get("result_diff") == Polygon() - assert result["theme_id_1"].get("result_diff_min") == Polygon() - assert result["theme_id_1"].get("result_diff_plus") == Polygon() + relevant_distance = 1 + result = self.sample_aligner.process_dict_thematic(relevant_distance=relevant_distance) + assert result["theme_id_1"][relevant_distance].get("result") == aligned_shape + assert result["theme_id_1"][relevant_distance].get("result_diff") == Polygon() + assert result["theme_id_1"][relevant_distance].get("result_diff_min") == Polygon() + assert result["theme_id_1"][relevant_distance].get("result_diff_plus") == Polygon() def test_fully_aligned_geojson_output(self): aligned_shape = from_wkt( diff --git a/tests/test_examples.py b/tests/test_examples.py index 5661de4..d4a1152 100644 --- a/tests/test_examples.py +++ b/tests/test_examples.py @@ -7,10 +7,9 @@ from brdr.grb import GRBActualLoader from brdr.loader import DictLoader from brdr.loader import GeoJsonLoader -from brdr.oe import get_oe_dict_by_ids +from brdr.oe import get_oe_dict_by_ids, OnroerendErfgoedLoader from brdr.utils import diffs_from_dict_series from brdr.utils import get_breakpoints_zerostreak -from brdr.utils import multipolygons_to_singles class TestExamples(unittest.TestCase): @@ -27,9 +26,10 @@ def test_example_131635(self): aligner.process_dict_thematic(rel_dist, 4) def test_example_combined_borders_adp_gbg(self): + aligner = Aligner() - dict_theme = get_oe_dict_by_ids([131635]) - aligner.load_thematic_data_dict(dict_theme) + loader = OnroerendErfgoedLoader([131635]) + aligner.load_thematic_data(loader) adp_loader = GRBActualLoader( grb_type=GRBType.ADP, partition=1000, aligner=aligner ) @@ -45,22 +45,7 @@ def test_example_combined_borders_adp_gbg(self): rel_dist = 2 result_dict = aligner.process_dict_thematic(rel_dist, 4) for process_results in result_dict.values(): - aligner.get_formula(process_results["result"]) - - def test_example_multi_to_single(self): - aligner = Aligner() - # Load thematic data & reference data - # Get a specific feature of OE that exists out of a Multipolygon - dict_theme = get_oe_dict_by_ids([110082]) - dict_theme = multipolygons_to_singles(dict_theme) - aligner.load_thematic_data_dict(dict_theme) - aligner.load_reference_data(GRBActualLoader(aligner=aligner, - grb_type=GRBType.GBG, partition=1000) - ) - rel_dist = 5 - result_dict = aligner.process_dict_thematic(rel_dist, 4) - for process_results in result_dict.values(): - aligner.get_formula(process_results["result"]) + aligner.get_formula(process_results[rel_dist]["result"]) def test_example_multipolygon(self): aligner0 = Aligner() @@ -189,7 +174,7 @@ def test_example_multipolygon(self): _, dict_predicted, _ = aligner.predictor() self.assertGreater(len(dict_predicted), 0) - fcs = aligner.get_predictions_as_geojson(formula=True) + fcs = aligner.get_series_as_geojson(formula=True) self.assertEqual(len(fcs), 6) def test_example_wanted_changes(self): diff --git a/tests/test_grb.py b/tests/test_grb.py index e62ca3e..8e30a53 100644 --- a/tests/test_grb.py +++ b/tests/test_grb.py @@ -175,6 +175,7 @@ def test_get_geoms_affected_by_grb_change_bulk(self): assert len(dict_affected.keys()) > 0 def test_evaluate(self): + #TODO thematic_dict = { "theme_id_1": from_wkt( "MultiPolygon (((174180.20077791667426936 171966.14649116666987538, " @@ -190,11 +191,12 @@ def test_evaluate(self): base_aligner.load_reference_data( GRBFiscalParcelLoader(aligner=base_aligner, year="2022", partition=1000) ) - base_process_result = base_aligner.process_dict_thematic(relevant_distance=1) + relevant_distance=1 + base_process_result = base_aligner.process_dict_thematic(relevant_distance=relevant_distance) thematic_dict_formula = {} thematic_dict_result = {} for key in base_process_result: - thematic_dict_result[key] = base_process_result[key]["result"] + thematic_dict_result[key] = base_process_result[key][relevant_distance]["result"] thematic_dict_formula[key] = base_aligner.get_formula( thematic_dict_result[key] ) @@ -265,10 +267,3 @@ def test_grbspecificdateparcelloader(self): #Print results for feature in featurecollection["result"]["features"]: assert isinstance(feature["properties"]["evaluation"],Evaluation) - # print( - # feature["properties"][name_thematic_id] - # + ": " - # + feature["properties"]["evaluation"] - # ) - # geojson = featurecollection["result"] - # print(geojson) diff --git a/tests/test_integration.py b/tests/test_integration.py index e8af774..4209298 100644 --- a/tests/test_integration.py +++ b/tests/test_integration.py @@ -46,7 +46,7 @@ def test_webservice_brdr(self): geometry = shape(contour) - aligner.load_thematic_data(DictLoader({"input": geometry})) + aligner.load_thematic_data(DictLoader({"input_id": geometry})) aligner.load_reference_data( GRBActualLoader( grb_type=referentielaag_type, partition=1000, aligner=aligner @@ -60,21 +60,21 @@ def test_webservice_brdr(self): dict_series, aligner.dict_thematic, DiffMetric.CHANGES_AREA ) - dict_diffs = dict_diffs["input"] - dict_series = { - rel_dist: { - "result": json.loads(to_geojson(dict_results["input"]["result"])), + dict_diffs = dict_diffs["input_id"] + serial_dict ={} + dict_results=dict_series["input_id"] + for rel_dist,process_results in dict_results.items(): + serial_dict[rel_dist]={ + "result": json.loads(to_geojson(dict_results[rel_dist]["result"])), "result_diff_min": json.loads( - to_geojson(dict_results["input"]["result_diff_min"]) - ), - "result_diff_plus": json.loads( - to_geojson(dict_results["input"]["result_diff_plus"]) + to_geojson(dict_results[rel_dist]["result_diff_min"]) ), + "result_diff_plus": json.loads( + to_geojson(dict_results[rel_dist]["result_diff_plus"]) + ), } - for rel_dist, dict_results in dict_series.items() - } return { - "series": dict_series, + "series": serial_dict, "diffs": dict_diffs, } diff --git a/tests/test_utils.py b/tests/test_utils.py index 20a0cfd..6d106f7 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -131,17 +131,18 @@ def test_diffs_from_dict_series_complete(self): "theme_id2": Polygon([(5, 5), (15, 5), (15, 15), (5, 15)]), } dict_series = { - 10: { - "theme_id1": { + + "theme_id1": {10:{ "result": Polygon([(0, 0), (8, 0), (8, 8), (0, 8)]), "result_diff": Polygon([(2, 2), (6, 2), (6, 6), (2, 6)]), + } }, - "theme_id2": { + "theme_id2": {10:{ "result": Polygon([(7, 7), (13, 7), (13, 13), (7, 13)]), "result_diff": Polygon([(9, 9), (11, 9), (11, 11), (9, 11)]), + } }, } - } expected_diffs = {"theme_id1": {10: 16.0}, "theme_id2": {10: 4.0}} assert expected_diffs == diffs_from_dict_series( @@ -169,6 +170,6 @@ def test_merge_process_results(self): process_result_1["result"] = Polygon([(0, 0), (10, 0), (10, 10), (0, 10)]) process_result_2 = ProcessResult() process_result_2["result"] = Polygon([(0, 0), (8, 0), (8, 8), (0, 8)]) - testdict = {key_1: process_result_1, key_2: process_result_2} + testdict = {key_1: {0:process_result_1}, key_2: {0:process_result_2}} merged_testdict = merge_process_results(testdict) assert len(merged_testdict.keys()) == 1 From 249a6a19af2c5cc6bb9d9910a6ce507a757a04a6 Mon Sep 17 00:00:00 2001 From: dieuska Date: Wed, 18 Sep 2024 08:40:04 +0200 Subject: [PATCH 14/35] fixed example --- examples/example_grbspecificloader.py | 2 +- examples/example_parcel_change_detector.py | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/examples/example_grbspecificloader.py b/examples/example_grbspecificloader.py index 8e7197a..93d2c20 100644 --- a/examples/example_grbspecificloader.py +++ b/examples/example_grbspecificloader.py @@ -12,5 +12,5 @@ } loader = DictLoader(thematic_dict) aligner.load_thematic_data(loader) -loader = GRBSpecificDateParcelLoader(date="2023-07-03", aligner=aligner) +loader = GRBSpecificDateParcelLoader(date="2023-05-03", aligner=aligner) aligner.load_reference_data(loader) diff --git a/examples/example_parcel_change_detector.py b/examples/example_parcel_change_detector.py index 66baf10..64a54e9 100644 --- a/examples/example_parcel_change_detector.py +++ b/examples/example_parcel_change_detector.py @@ -81,7 +81,7 @@ i = 0 for key in base_process_result: i = i + 1 - thematic_dict_result[key] = base_process_result[key]["result"] + thematic_dict_result[key] = base_process_result[key][base_correction]["result"] thematic_dict_formula[key] = base_aligner.get_formula(thematic_dict_result[key]) if i > 500: break @@ -130,8 +130,8 @@ counter_difference = 0 for theme_id in dict_affected: for dist in series: - if "evaluation" in prop_dictionary[dist][theme_id].keys(): - ev = prop_dictionary[dist][theme_id]["evaluation"] + if "evaluation" in prop_dictionary[theme_id][dist].keys(): + ev = prop_dictionary[theme_id][dist]["evaluation"] if ev.startswith("equal") and dist == 0: counter_equality = counter_equality + 1 elif ev.startswith("equal") and dist > 0: @@ -140,7 +140,7 @@ counter_difference = counter_difference + 1 break -logging.info( +print( "Features: " + str(len(dict_affected)) + "//Equality: " From 69f4a5739490b7166d58ff7a96ac3ebb6f42f775 Mon Sep 17 00:00:00 2001 From: dieuska Date: Wed, 18 Sep 2024 10:21:01 +0200 Subject: [PATCH 15/35] #86 #81 #77 cleanedup unused code and deprecated functions and refactored aligner --- brdr/aligner.py | 267 +++++++++++++------ brdr/geometry_utils.py | 152 +---------- brdr/utils.py | 72 ----- examples/example_combined_borders_adp_gbg.py | 4 +- examples/example_multipolygon.py | 12 +- examples/example_parcel_vs_building.py | 9 +- examples/examples_aligner.py | 29 +- examples/stats_snapping_distance_creation.py | 5 +- tests/test_aligner.py | 96 +++---- tests/test_examples.py | 4 +- tests/test_loader.py | 8 +- 11 files changed, 267 insertions(+), 391 deletions(-) diff --git a/brdr/aligner.py b/brdr/aligner.py index eff8515..27cf07f 100644 --- a/brdr/aligner.py +++ b/brdr/aligner.py @@ -18,7 +18,8 @@ from shapely.geometry.base import BaseGeometry from brdr import __version__ -from brdr.constants import BUFFER_MULTIPLICATION_FACTOR, LAST_VERSION_DATE, VERSION_DATE, DATE_FORMAT +from brdr.constants import BUFFER_MULTIPLICATION_FACTOR, LAST_VERSION_DATE, VERSION_DATE, DATE_FORMAT, \ + THRESHOLD_EXCLUSION_PERCENTAGE, THRESHOLD_EXCLUSION_AREA from brdr.constants import CORR_DISTANCE from brdr.constants import DEFAULT_CRS from brdr.constants import THRESHOLD_CIRCLE_RATIO @@ -26,16 +27,11 @@ from brdr.geometry_utils import buffer_neg from brdr.geometry_utils import buffer_neg_pos from brdr.geometry_utils import buffer_pos -from brdr.geometry_utils import calculate_geom_by_intersection_and_reference from brdr.geometry_utils import fill_and_remove_gaps from brdr.geometry_utils import safe_difference from brdr.geometry_utils import safe_intersection from brdr.geometry_utils import safe_symmetric_difference from brdr.geometry_utils import safe_union -from brdr.loader import DictLoader -from brdr.loader import GeoJsonFileLoader -from brdr.loader import GeoJsonLoader -from brdr.loader import GeoJsonUrlLoader from brdr.loader import Loader from brdr.logger import Logger from brdr.typings import ProcessResult @@ -149,6 +145,19 @@ def __init__( def buffer_distance(self): return self.relevant_distance / 2 + def load_reference_data(self, loader: Loader): + ( + self.dict_reference, + self.dict_reference_properties, + self.dict_reference_source + ) = loader.load_data() + self._prepare_reference_data() + + def load_thematic_data(self, loader: Loader): + self.dict_thematic, self.dict_thematic_properties, self.dict_thematic_source = ( + loader.load_data() + ) + def process_geometry( self, input_geometry: BaseGeometry, @@ -214,7 +223,7 @@ def process_geometry( geom, relevant_intersection, relevant_diff, - ) = calculate_geom_by_intersection_and_reference( + ) = _calculate_geom_by_intersection_and_reference( geom_intersection, geom_reference, False, @@ -222,11 +231,11 @@ def process_geometry( self.threshold_overlap_percentage, ) self.logger.feedback_debug("intersection calculated") - preresult = self.add_multi_polygons_from_geom_to_array(geom, preresult) - relevant_intersection_array = self.add_multi_polygons_from_geom_to_array( + preresult = self._add_multi_polygons_from_geom_to_array(geom, preresult) + relevant_intersection_array = self._add_multi_polygons_from_geom_to_array( relevant_intersection, relevant_intersection_array ) - relevant_diff_array = self.add_multi_polygons_from_geom_to_array( + relevant_diff_array = self._add_multi_polygons_from_geom_to_array( relevant_diff, relevant_diff_array ) # UNION INTERMEDIATE LAYERS @@ -575,18 +584,29 @@ def get_results_as_geojson(self, formula=False): formula (bool, optional): Whether to include formula-related information in the output. Defaults to False. """ - results_dict = self.dict_result return self.get_series_as_geojson( formula,self.dict_result, ) + def get_predictions_as_geojson(self, formula=False): + """ + convert the predictions to geojson feature collections + + Args: + formula (bool, optional): Whether to include formula-related information + in the output. Defaults to False. + """ + return self.get_series_as_geojson( + formula,self.dict_predicted, + ) + def get_series_as_geojson(self, formula=False, series_dict=None): """ - get a dictionary containing of the resulting geometries as geojson, based on the - 'predicted' relevant distances. + get a geojson of a dictionary containing the resulting geometries for all + 'serial' relevant distances. If no dict_series is given, the dict_result returned. Optional: The descriptive formula is added as an attribute to the result""" - series_dict = series_dict or self.dict_predicted + series_dict = series_dict or self.dict_result prop_dictionary = defaultdict(dict) for theme_id, results_dict in series_dict.items(): @@ -612,6 +632,17 @@ def get_reference_as_geojson(self): return geojson_from_dict( self.dict_reference, self.CRS, self.name_reference_id, geom_attributes=False ) + def export_predictions(self, path, formula=True): + """ + Exports 'predicted' results as GeoJSON files. + + This function exports 6 GeoJSON files containing the 'predicted' results to the + specified `path`. + """ + fcs = self.get_predictions_as_geojson(formula) + for name, fc in fcs.items(): + write_geojson(os.path.join(path, name + "_predictions.geojson"), fc) + def export_results(self, path, formula=True): """ @@ -643,6 +674,14 @@ def export_results(self, path, formula=True): for name, fc in fcs.items(): write_geojson(os.path.join(path, name + ".geojson"), fc) + def get_thematic_union(self): + if self.thematic_union is None: + self.thematic_union = make_valid( + unary_union(list(self.dict_thematic.values())) + ) + return self.thematic_union + + def _prepare_reference_data(self): """ Prepares reference data for spatial queries and analysis. @@ -778,7 +817,7 @@ def _calculate_intersection_between_geometry_and_od(self, geometry): pass # ADD THEMATIC_OD - preresult = self.add_multi_polygons_from_geom_to_array(geom_thematic_od, []) + preresult = self._add_multi_polygons_from_geom_to_array(geom_thematic_od, []) return ( geometry, preresult, @@ -828,27 +867,21 @@ def _od_snap_all_side(self, geometry): geom_thematic_od, geom_relevant_intersection, geom_relevant_diff, - ) = calculate_geom_by_intersection_and_reference( + ) = _calculate_geom_by_intersection_and_reference( geom_intersection, geom_reference, True, self.relevant_distance / 2, self.threshold_overlap_percentage, ) - relevant_intersection_array = self.add_multi_polygons_from_geom_to_array( + relevant_intersection_array = self._add_multi_polygons_from_geom_to_array( geom_relevant_intersection, [] ) - relevant_difference_array = self.add_multi_polygons_from_geom_to_array( + relevant_difference_array = self._add_multi_polygons_from_geom_to_array( geom_relevant_diff, [] ) return geom_thematic_od, relevant_difference_array, relevant_intersection_array - # def _snap_geom_to_reference(self, geom_input, geom_reference, relevant_distance): - # """ - # This feature does not work correctly with Shapely. This avoids polygons collapse - # if everything is taken together, which we do in some cases effectively want. - # """ - # return snap(geom_input, geom_reference, relevant_distance) def _get_reference_union(self): if self.reference_union is None: @@ -857,12 +890,6 @@ def _get_reference_union(self): ) return self.reference_union - def get_thematic_union(self): - if self.thematic_union is None: - self.thematic_union = make_valid( - unary_union(list(self.dict_thematic.values())) - ) - return self.thematic_union def _postprocess_preresult(self, preresult, geom_thematic) -> ProcessResult: """ @@ -1005,7 +1032,7 @@ def _postprocess_preresult(self, preresult, geom_thematic) -> ProcessResult: } @staticmethod - def add_multi_polygons_from_geom_to_array(geom: BaseGeometry, array): + def _add_multi_polygons_from_geom_to_array(geom: BaseGeometry, array): """ Append valid polygons and multipolygons extracted from a given geometry to an existing array. @@ -1032,57 +1059,133 @@ def add_multi_polygons_from_geom_to_array(geom: BaseGeometry, array): array.append(g) return array - def load_reference_data(self, loader: Loader): - ( - self.dict_reference, - self.dict_reference_properties, - self.dict_reference_source - ) = loader.load_data() - self._prepare_reference_data() +@staticmethod +def _calculate_geom_by_intersection_and_reference( + geom_intersection: BaseGeometry, + geom_reference: BaseGeometry, + is_openbaar_domein, + buffer_distance, + threshold_overlap_percentage, + threshold_exclusion_percentage=THRESHOLD_EXCLUSION_PERCENTAGE, + threshold_exclusion_area=THRESHOLD_EXCLUSION_AREA, +): + """ + Calculates the geometry based on intersection and reference geometries. + + Args: + geom_intersection (BaseGeometry): The intersection geometry. + geom_reference (BaseGeometry): The reference geometry. + is_openbaar_domein (bool): A flag indicating whether it's a public domain + (area not covered with reference polygon). + threshold_exclusion_percentage (int): The threshold exclusion percentage. + threshold_exclusion_area (int): The threshold exclusion area. + buffer_distance (float): The buffer distance. + threshold_overlap_percentage (int): The threshold overlap percentage. + + Returns: + tuple: A tuple containing the resulting geometries: + + * geom: BaseGeometry or None: The resulting geometry or None if conditions + are not met. + * geom_relevant_intersection: BaseGeometry or None: The relevant + intersection. + * geom_relevant_difference: BaseGeometry or None: The relevant difference. + + Notes: + - If the reference geometry area is 0, the overlap is set to 100%. + - If the overlap is less than relevant_OVERLAP_PERCENTAGE or the + intersection area is less than relevant_OVERLAP_AREA, None is returned. + - Otherwise, the relevant intersection and difference geometries are + calculated. + - If both relevant intersection and difference are non-empty, the final + geometry is obtained by applying safe intersection and buffering. + - If only relevant intersection is non-empty, the result is the reference + geometry. + - If only relevant difference is non-empty, the result is None. + """ - def load_thematic_data(self, loader: Loader): - self.dict_thematic, self.dict_thematic_properties, self.dict_thematic_source = ( - loader.load_data() + if geom_reference.area == 0: + overlap = 100 + + else: + overlap = geom_intersection.area * 100 / geom_reference.area + + if ( + overlap < threshold_exclusion_percentage + or geom_intersection.area < threshold_exclusion_area + ): + return Polygon(), Polygon(), Polygon() + + geom_difference = safe_difference(geom_reference, geom_intersection) + geom_relevant_intersection = buffer_neg(geom_intersection, buffer_distance) + geom_relevant_difference = buffer_neg(geom_difference, buffer_distance) + if ( + not geom_relevant_intersection.is_empty + and not geom_relevant_difference.is_empty + ): + # relevant intersection and relevant difference + geom_x = safe_intersection( + geom_reference, + safe_difference( + geom_reference, + safe_intersection( + geom_difference, + buffer_neg_pos(geom_difference, buffer_distance), + ), + ), + ) + geom = safe_intersection( + geom_x, + buffer_pos( + buffer_neg_pos(geom_x, buffer_distance), + buffer_distance, + ), ) + # when calculating for OD, we create a 'virtual parcel'. When calculating this + # virtual parcel, it is buffered to take outer boundaries into account. + # This results in a side effect that there are extra non-logical parts included + # in the result. The function below tries to exclude these non-logical parts. + # see eo_id 206363 with relevant distance=0.2m and SNAP_ALL_SIDE + if is_openbaar_domein: + geom = _get_relevant_polygons_from_geom(geom, buffer_distance) + elif not geom_relevant_intersection.is_empty and geom_relevant_difference.is_empty: + geom = geom_reference + elif geom_relevant_intersection.is_empty and not geom_relevant_difference.is_empty: + geom = geom_relevant_intersection # (=empty geometry) + else: + if is_openbaar_domein: + geom = geom_relevant_intersection # (=empty geometry) + # geom = snap_geom_to_reference (geom_intersection, geom_reference, + # relevant_distance) + elif threshold_overlap_percentage < 0: + # if we take a value of -1, the original border will be used + geom = geom_intersection + elif overlap > threshold_overlap_percentage: + geom = geom_reference + else: + geom = geom_relevant_intersection # (=empty geometry) + return geom, geom_relevant_intersection, geom_relevant_difference - # Deprecated loader methods - - def load_thematic_data_geojson(self, thematic_input, name_thematic_id): - logging.warning("deprecated method, use load_thematic_data instead") - loader = GeoJsonLoader(thematic_input, name_thematic_id) - self.load_thematic_data(loader) - - def load_thematic_data_file(self, path_to_file, name_thematic_id): - logging.warning("deprecated method, use load_thematic_data instead") - loader = GeoJsonFileLoader(path_to_file, name_thematic_id) - self.load_thematic_data(loader) - - def load_thematic_data_dict(self, dict_theme): - logging.warning("deprecated method, use load_thematic_data instead") - loader = DictLoader(dict_theme) - self.load_thematic_data(loader) - - def load_thematic_data_url(self, url, name_thematic_id): - logging.warning("deprecated method, use load_thematic_data instead") - loader = GeoJsonUrlLoader(url, name_thematic_id) - self.load_thematic_data(loader) - - def load_reference_data_dict(self, dict_ref): - logging.warning("deprecated method, use load_reference_data instead") - loader = DictLoader(dict_ref) - self.load_reference_data(loader) - - def load_reference_data_geojson(self, reference_input, name_reference_id): - logging.warning("deprecated method, use load_reference_data instead") - loader = GeoJsonLoader(_input=reference_input, id_property=name_reference_id) - self.load_reference_data(loader) - - def load_reference_data_file(self, path_to_file, name_reference_id): - logging.warning("deprecated method, use load_reference_data instead") - loader = GeoJsonFileLoader(path_to_file, name_reference_id) - self.load_reference_data(loader) - - def load_reference_data_url(self, url, name_reference_id): - logging.warning("deprecated method, use load_reference_data instead") - loader = GeoJsonUrlLoader(url, name_reference_id) - self.load_reference_data(loader) +@staticmethod +def _get_relevant_polygons_from_geom(geometry: BaseGeometry, buffer_distance: float): + """ + Get only the relevant parts (polygon) from a geometry. + Points, Lines and Polygons smaller than relevant distance are excluded from the + result + """ + if not geometry or geometry.is_empty: + # If the input geometry is empty or None, do nothing. + return geometry + else: + geometry = make_valid(unary_union(geometry)) + # Create a GeometryCollection from the input geometry. + geometry_collection = GeometryCollection(geometry) + array = [] + for g in geometry_collection.geoms: + # Ensure each sub-geometry is valid. + g = make_valid(g) + if str(g.geom_type) in ["Polygon", "MultiPolygon"]: + relevant_geom = buffer_neg(g, buffer_distance) + if relevant_geom is not None and not relevant_geom.is_empty: + array.append(g) + return make_valid(unary_union(array)) diff --git a/brdr/geometry_utils.py b/brdr/geometry_utils.py index de7513c..4c0aa0a 100644 --- a/brdr/geometry_utils.py +++ b/brdr/geometry_utils.py @@ -2,7 +2,6 @@ import numpy as np from shapely import GEOSException, equals -from shapely import GeometryCollection from shapely import Polygon from shapely import STRtree from shapely import buffer @@ -14,19 +13,15 @@ from shapely import get_parts from shapely import intersection from shapely import is_empty -from shapely import make_valid from shapely import polygons from shapely import symmetric_difference from shapely import to_wkt -from shapely import unary_union from shapely import union from shapely.geometry.base import BaseGeometry from shapely.prepared import prep from brdr.constants import MITRE_LIMIT from brdr.constants import QUAD_SEGMENTS -from brdr.constants import THRESHOLD_EXCLUSION_AREA -from brdr.constants import THRESHOLD_EXCLUSION_PERCENTAGE def buffer_neg_pos(geometry, buffer_value): @@ -417,137 +412,6 @@ def _grid_bounds(geom: BaseGeometry, delta: float): return grid -def _get_relevant_polygons_from_geom(geometry: BaseGeometry, buffer_distance: float): - """ - Get only the relevant parts (polygon) from a geometry. - Points, Lines and Polygons smaller than relevant distance are excluded from the - result - """ - if not geometry or geometry.is_empty: - # If the input geometry is empty or None, do nothing. - return geometry - else: - geometry = make_valid(unary_union(geometry)) - # Create a GeometryCollection from the input geometry. - geometry_collection = GeometryCollection(geometry) - array = [] - for g in geometry_collection.geoms: - # Ensure each sub-geometry is valid. - g = make_valid(g) - if str(g.geom_type) in ["Polygon", "MultiPolygon"]: - relevant_geom = buffer_neg(g, buffer_distance) - if relevant_geom is not None and not relevant_geom.is_empty: - array.append(g) - return make_valid(unary_union(array)) - - -def calculate_geom_by_intersection_and_reference( - geom_intersection: BaseGeometry, - geom_reference: BaseGeometry, - is_openbaar_domein, - buffer_distance, - threshold_overlap_percentage, - threshold_exclusion_percentage=THRESHOLD_EXCLUSION_PERCENTAGE, - threshold_exclusion_area=THRESHOLD_EXCLUSION_AREA, -): - """ - Calculates the geometry based on intersection and reference geometries. - - Args: - geom_intersection (BaseGeometry): The intersection geometry. - geom_reference (BaseGeometry): The reference geometry. - is_openbaar_domein (bool): A flag indicating whether it's a public domain - (area not covered with reference polygon). - threshold_exclusion_percentage (int): The threshold exclusion percentage. - threshold_exclusion_area (int): The threshold exclusion area. - buffer_distance (float): The buffer distance. - threshold_overlap_percentage (int): The threshold overlap percentage. - - Returns: - tuple: A tuple containing the resulting geometries: - - * geom: BaseGeometry or None: The resulting geometry or None if conditions - are not met. - * geom_relevant_intersection: BaseGeometry or None: The relevant - intersection. - * geom_relevant_difference: BaseGeometry or None: The relevant difference. - - Notes: - - If the reference geometry area is 0, the overlap is set to 100%. - - If the overlap is less than relevant_OVERLAP_PERCENTAGE or the - intersection area is less than relevant_OVERLAP_AREA, None is returned. - - Otherwise, the relevant intersection and difference geometries are - calculated. - - If both relevant intersection and difference are non-empty, the final - geometry is obtained by applying safe intersection and buffering. - - If only relevant intersection is non-empty, the result is the reference - geometry. - - If only relevant difference is non-empty, the result is None. - """ - - if geom_reference.area == 0: - overlap = 100 - - else: - overlap = geom_intersection.area * 100 / geom_reference.area - - if ( - overlap < threshold_exclusion_percentage - or geom_intersection.area < threshold_exclusion_area - ): - return Polygon(), Polygon(), Polygon() - - geom_difference = safe_difference(geom_reference, geom_intersection) - geom_relevant_intersection = buffer_neg(geom_intersection, buffer_distance) - geom_relevant_difference = buffer_neg(geom_difference, buffer_distance) - if ( - not geom_relevant_intersection.is_empty - and not geom_relevant_difference.is_empty - ): - # relevant intersection and relevant difference - geom_x = safe_intersection( - geom_reference, - safe_difference( - geom_reference, - safe_intersection( - geom_difference, - buffer_neg_pos(geom_difference, buffer_distance), - ), - ), - ) - geom = safe_intersection( - geom_x, - buffer_pos( - buffer_neg_pos(geom_x, buffer_distance), - buffer_distance, - ), - ) - # when calculating for OD, we create a 'virtual parcel'. When calculating this - # virtual parcel, it is buffered to take outer boundaries into account. - # This results in a side effect that there are extra non-logical parts included - # in the result. The function below tries to exclude these non-logical parts. - # see eo_id 206363 with relevant distance=0.2m and SNAP_ALL_SIDE - if is_openbaar_domein: - geom = _get_relevant_polygons_from_geom(geom, buffer_distance) - elif not geom_relevant_intersection.is_empty and geom_relevant_difference.is_empty: - geom = geom_reference - elif geom_relevant_intersection.is_empty and not geom_relevant_difference.is_empty: - geom = geom_relevant_intersection # (=empty geometry) - else: - if is_openbaar_domein: - geom = geom_relevant_intersection # (=empty geometry) - # geom = snap_geom_to_reference (geom_intersection, geom_reference, - # relevant_distance) - elif threshold_overlap_percentage < 0: - # if we take a value of -1, the original border will be used - geom = geom_intersection - elif overlap > threshold_overlap_percentage: - geom = geom_reference - else: - geom = geom_relevant_intersection # (=empty geometry) - return geom, geom_relevant_intersection, geom_relevant_difference - - def geom_from_wkt(wkt_string): """ Converts a WellKnownText (WKT) into a shapely-geometry @@ -612,10 +476,10 @@ def get_partitions(geom, delta): return filtered_grid -def fill_and_remove_gaps(geom_thematic_preresult, buffer_value): - geom_thematic_cleaned_holes = geom_thematic_preresult +def fill_and_remove_gaps(input_geometry, buffer_value): + cleaned_geometry = input_geometry ix_part = 1 - for part in get_parts(geom_thematic_preresult): + for part in get_parts(input_geometry): exterior_ring = get_exterior_ring(part) exterior_polygon = polygons([exterior_ring])[0] empty_buffered_exterior_polygon = buffer_neg( @@ -626,8 +490,8 @@ def fill_and_remove_gaps(geom_thematic_preresult, buffer_value): and empty_buffered_exterior_polygon and not exterior_polygon.is_empty ): - geom_thematic_cleaned_holes = safe_difference( - geom_thematic_cleaned_holes, exterior_polygon + cleaned_geometry = safe_difference( + cleaned_geometry, exterior_polygon ) num_interior_rings = get_num_interior_rings(part) if num_interior_rings > 0: @@ -640,12 +504,12 @@ def fill_and_remove_gaps(geom_thematic_preresult, buffer_value): interior_polygon, buffer_value ).is_empty if empty_buffered_interior_ring: - geom_thematic_cleaned_holes = safe_union( - geom_thematic_cleaned_holes, interior_polygon + cleaned_geometry = safe_union( + cleaned_geometry, interior_polygon ) ix = ix + 1 ix_part = ix_part + 1 - return geom_thematic_cleaned_holes + return cleaned_geometry def get_bbox(geometry): diff --git a/brdr/utils.py b/brdr/utils.py index d03a007..68ee0d1 100644 --- a/brdr/utils.py +++ b/brdr/utils.py @@ -302,25 +302,6 @@ def _numerical_derivative(x, y): return derivative - -# def filter_dict_by_key(dictionary, filter_key): -# """ -# Filters a dictionary to only include keys matching a specific value. -# -# This function creates a new dictionary containing entries from the original -# dictionary where the key matches the provided `filter_key`. -# -# Args: -# dictionary (dict): The dictionary to filter. -# filter_key (str): The key value to filter by. -# -# Returns: -# dict: A new dictionary containing only entries where the key matches the -# `filter_key`. -# """ -# return {key: dictionary[key] for key in dictionary.keys() if key == filter_key} - - def diffs_from_dict_series( dict_series: dict[str, dict[float, ProcessResult]], dict_thematic: dict[str, BaseGeometry], @@ -383,10 +364,6 @@ def diffs_from_dict_series( for rel_dist in results_dict: result = results_dict.get(rel_dist, {}).get("result") result_diff = results_dict.get(rel_dist, {}).get("result_diff") - # result_diff_plus = results_dict.get(thematic_id, {})\ - # .get("result_diff_plus") - # result_diff_min = results_dict.get(thematic_id, {})\ - # .get("result_diff_min") diff = 0 if ( @@ -502,34 +479,6 @@ def _add_bbox_to_url(url, crs=DEFAULT_CRS, bbox=None): return url -# def merge_dict_series( -# dict_series: dict[float, dict[str, ProcessResult]] -# ) -> dict[float, dict[str, ProcessResult]]: -# """ -# Merges dict_series (dict_predicted) with seperated IDs (MULTI_SINGLE_ID_SEPARATOR) -# to their original unique ID -# """ -# dict_series_merged = {} -# for dist, item in dict_series.items(): -# dict_series_merged[dist] = merge_process_results(item) -# return dict_series_merged - - -# def merge_dict(dictionary: dict[str, BaseGeometry]) -> dict[str, BaseGeometry]: -# """ -# Merges dict_series (dict_predicted) with seperated IDs (MULTI_SINGLE_ID_SEPARATOR) -# to their original unique ID -# """ -# out_dictionary = {} -# for id_theme, item in dictionary.items(): -# id_theme_global = id_theme.split(MULTI_SINGLE_ID_SEPARATOR)[0] -# if id_theme_global not in out_dictionary: -# out_dictionary[id_theme_global] = [item] -# else: -# out_dictionary[id_theme_global].append(item) -# return {k: make_valid(unary_union(v)) for k, v in out_dictionary.items()} - - def merge_process_results( result_dict: dict[str, dict[float, ProcessResult]] ) -> dict[str, dict[float, ProcessResult]]: @@ -561,24 +510,3 @@ def merge_process_results( [existing, geom] ) return grouped_results - - - -# def dict_series_by_keys(dict_series): -# """ -# Transforms a dict_series into a dictionary with theme_id as keys, and a dictionary -# with all predicted distances and their resulting geometry as a value. -# Args: -# dict_series: a dictionary result of the 'series/predictor' -# -# Returns: dictionary with theme_id as keys, and a dictionary with all serial -# distances and their resulting geometry as a value. -# -# """ -# dict_series_keys = {} -# for dist, res in dict_series.items(): -# for key in res.keys(): -# if key not in dict_series_keys.keys(): -# dict_series_keys[key] = {} -# dict_series_keys[key][dist] = {key: res[key]} -# return dict_series_keys diff --git a/examples/example_combined_borders_adp_gbg.py b/examples/example_combined_borders_adp_gbg.py index f9549f8..adc5239 100644 --- a/examples/example_combined_borders_adp_gbg.py +++ b/examples/example_combined_borders_adp_gbg.py @@ -1,7 +1,7 @@ from brdr.aligner import Aligner from brdr.enums import GRBType from brdr.grb import get_collection_grb_actual, GRBActualLoader -from brdr.loader import GeoJsonFileLoader +from brdr.loader import GeoJsonFileLoader, DictLoader from brdr.utils import polygonize_reference_data, geojson_to_dicts from examples import show_map, print_formula @@ -54,7 +54,7 @@ dict_adp_gbg.update(dict_gbg) # combine 2 dictionaries # make a polygonized version of the reference data with non-overlapping polygons dict_ref = polygonize_reference_data(dict_adp_gbg) - aligner.load_reference_data_dict(dict_ref) + aligner.load_reference_data(DictLoader(dict_ref)) rel_dist = 2 dict_results = aligner.process_dict_thematic(rel_dist, 4) diff --git a/examples/example_multipolygon.py b/examples/example_multipolygon.py index 75522ad..946e4f3 100644 --- a/examples/example_multipolygon.py +++ b/examples/example_multipolygon.py @@ -2,7 +2,7 @@ from brdr.aligner import Aligner from brdr.enums import GRBType from brdr.grb import GRBActualLoader -from brdr.loader import DictLoader +from brdr.loader import DictLoader, GeoJsonFileLoader from brdr.utils import multipolygons_to_singles from brdr.utils import write_geojson @@ -10,12 +10,14 @@ # Load thematic data -aligner0.load_thematic_data_file( +aligner0.load_thematic_data(GeoJsonFileLoader( "../tests/testdata/multipolygon.geojson", "theme_identifier" -) +)) aligner0.dict_thematic = multipolygons_to_singles(aligner0.dict_thematic) -aligner0.load_thematic_data_dict( - aligner0.dict_thematic, +aligner0.load_thematic_data( + DictLoader( + aligner0.dict_thematic, + ) ) # gebruik de actuele adp-percelen adp= administratieve percelen aligner = Aligner() diff --git a/examples/example_parcel_vs_building.py b/examples/example_parcel_vs_building.py index 7632478..24dd775 100644 --- a/examples/example_parcel_vs_building.py +++ b/examples/example_parcel_vs_building.py @@ -3,6 +3,7 @@ from brdr.aligner import Aligner from brdr.enums import GRBType from brdr.grb import GRBActualLoader +from brdr.loader import GeoJsonFileLoader from brdr.utils import diffs_from_dict_series from examples import plot_series @@ -12,18 +13,18 @@ # Initiate brdr aligner_x = Aligner() # Load thematic data & reference data (parcels) - aligner_x.load_thematic_data_file( + aligner_x.load_thematic_data(GeoJsonFileLoader( "../tests/testdata/test_parcel_vs_building.geojson", "theme_id" - ) + )) aligner_x.load_reference_data(GRBActualLoader (grb_type=GRBType.ADP, partition=1000,aligner=aligner_x) ) # gebruik de actuele adp-percelen adp= administratieve percelen aligner_y = Aligner() # Load thematic data & reference data (buildings) - aligner_y.load_thematic_data_file( + aligner_y.load_thematic_data(GeoJsonFileLoader( "../tests/testdata/test_parcel_vs_building.geojson", "theme_id" - ) + )) aligner_y.load_reference_data(GRBActualLoader (grb_type=GRBType.GBG, partition=1000,aligner=aligner_y) ) # gebruik de actuele adp-percelen adp= administratieve percelen diff --git a/examples/examples_aligner.py b/examples/examples_aligner.py index b4f28a5..bcef75c 100644 --- a/examples/examples_aligner.py +++ b/examples/examples_aligner.py @@ -1,5 +1,7 @@ from brdr.aligner import Aligner from brdr.enums import OpenbaarDomeinStrategy, GRBType +from brdr.grb import GRBActualLoader +from brdr.loader import GeoJsonFileLoader from brdr.utils import diffs_from_dict_series from examples import plot_series from examples import show_map @@ -8,40 +10,29 @@ # Initiate brdr aligner = Aligner() # Load thematic data - aligner.load_thematic_data_file( + aligner.load_thematic_data(GeoJsonFileLoader( "../tests/testdata/themelayer_referenced.geojson", "id_theme" - ) + )) # Use GRB adp-parcels as reference polygons adp= administratieve percelen - aligner.load_reference_data_grb_actual(grb_type=GRBType.ADP, partition=1000) - # alternative reference poly - # # Use GRB-gbg (buildings), gbg= gebouw aan de grond - # x.load_reference_data_grb_actual('gbg') - - # Use local data - # x.load_reference_data_file( - # "../tests/testdata/reference_leuven.geojson", 'capakey' - # ) + aligner.load_reference_data(GRBActualLoader(grb_type=GRBType.ADP, partition=1000,aligner=aligner)) # Example how to use the Aligner rel_dist = 10 - dict_results_by_distance = { - rel_dist: aligner.process_dict_thematic( + dict_results = aligner.process_dict_thematic( relevant_distance=rel_dist, od_strategy=OpenbaarDomeinStrategy.SNAP_FULL_AREA_SINGLE_SIDE, ) - } aligner.export_results("output/") - show_map(dict_results_by_distance, aligner.dict_thematic, aligner.dict_reference) + show_map(dict_results, aligner.dict_thematic, aligner.dict_reference) rel_dist = 6 - dict_results_by_distance = { - rel_dist: aligner.process_dict_thematic( + dict_results = aligner.process_dict_thematic( relevant_distance=rel_dist, od_strategy=OpenbaarDomeinStrategy.SNAP_ALL_SIDE ) - } + aligner.export_results("output/") - show_map(dict_results_by_distance, aligner.dict_thematic, aligner.dict_reference) + show_map(dict_results, aligner.dict_thematic, aligner.dict_reference) # for key in r: # x.get_formula(r[key]) diff --git a/examples/stats_snapping_distance_creation.py b/examples/stats_snapping_distance_creation.py index e94c02f..ae07146 100644 --- a/examples/stats_snapping_distance_creation.py +++ b/examples/stats_snapping_distance_creation.py @@ -5,6 +5,7 @@ import numpy as np from brdr.aligner import Aligner +from brdr.loader import GeoJsonFileLoader # Code to create stats.csv @@ -16,8 +17,8 @@ print(array_relevant_distance) x = Aligner() -x.load_thematic_data_file("../tests/testdata/theme_leuven.geojson", "aanduid_id") -x.load_reference_data_file("../tests/testdata/reference_leuven.geojson", "capakey") +x.load_thematic_data(GeoJsonFileLoader("../tests/testdata/theme_leuven.geojson", "aanduid_id")) +x.load_reference_data(GeoJsonFileLoader("../tests/testdata/reference_leuven.geojson", "capakey")) with open("../tests/output/stats" + time + ".csv", "w", newline="") as csvfile: writer = csv.writer( csvfile, delimiter=";" diff --git a/tests/test_aligner.py b/tests/test_aligner.py index dd3cd64..9ae1af9 100644 --- a/tests/test_aligner.py +++ b/tests/test_aligner.py @@ -1,3 +1,4 @@ +import os import unittest import numpy as np @@ -12,8 +13,8 @@ from brdr.geometry_utils import _grid_bounds from brdr.geometry_utils import buffer_neg_pos from brdr.grb import GRBActualLoader -from brdr.loader import GeoJsonLoader -from brdr.typings import FeatureCollection +from brdr.loader import GeoJsonLoader, DictLoader +from brdr.typings import FeatureCollection, ProcessResult class TestAligner(unittest.TestCase): @@ -49,29 +50,28 @@ def test_grid_bounds_2(self): for partition in grid_partitions: self.assertIsInstance(partition, Polygon) - # def test_export_results(self): - # #TODO - # aligner = Aligner() - # aligner.load_thematic_data_dict( - # {"theme_id_1": from_wkt("POLYGON ((0 0, 0 9, 5 10, 10 0, 0 0))")} - # ) - # aligner.load_reference_data_dict( - # {"ref_id_1": from_wkt("POLYGON ((0 1, 0 10,8 10,10 1,0 1))")} - # ) - # aligner.process_dict_thematic() - # path = "./tmp/" - # aligner.export_results(path=path) - # filenames = [f"{k}.geojson" for k in ProcessResult.__annotations__] - # for file_name in os.listdir(path): - # os.remove(path + file_name) - # assert file_name in filenames - # os.rmdir(path) + def test_export_results(self): + aligner = Aligner() + aligner.load_thematic_data(DictLoader + ({"theme_id_1": from_wkt("POLYGON ((0 0, 0 9, 5 10, 10 0, 0 0))")} + )) + aligner.load_reference_data(DictLoader( + {"ref_id_1": from_wkt("POLYGON ((0 1, 0 10,8 10,10 1,0 1))")} + )) + aligner.process_dict_thematic() + path = "./tmp/" + aligner.export_results(path=path) + filenames = [f"{k}.geojson" for k in ProcessResult.__annotations__] + for file_name in os.listdir(path): + os.remove(path + file_name) + assert file_name in filenames + os.rmdir(path) def test_get_formula_full_intersection(self): # Test when intersection equals reference geometry key = "a" ref_dict = {key: self.sample_geom} - self.sample_aligner.load_reference_data_dict(ref_dict) + self.sample_aligner.load_reference_data(DictLoader(ref_dict)) res = self.sample_aligner.get_formula(self.sample_geom, with_geom=True) self.assertTrue(res["full"]) result = res["reference_features"][key] @@ -82,7 +82,7 @@ def test_get_formula_partial_intersection(self): # Test when intersection is partial key = "a" ref_dict = {key: self.sample_geom.buffer(0.5)} - self.sample_aligner.load_reference_data_dict(ref_dict) + self.sample_aligner.load_reference_data(DictLoader(ref_dict)) res = self.sample_aligner.get_formula(self.sample_geom, with_geom=True) self.assertFalse(res["full"]) result = res["reference_features"][key] @@ -94,7 +94,7 @@ def test_process_geometry(self): # Test if processed geometry is equal to reference geometry key_ref = "a" ref_dict = {key_ref: self.sample_geom} - self.sample_aligner.load_reference_data_dict(ref_dict) + self.sample_aligner.load_reference_data(DictLoader(ref_dict)) process_result = self.sample_aligner.process_geometry( self.sample_geom.buffer(0.5) ) @@ -111,9 +111,9 @@ def test_predictor(self): # ADD A REFERENCE POLYGON TO REFERENCE DICTIONARY reference_dict = {"ref_id": from_wkt("POLYGON ((0 1, 0 10,8 10,10 1,0 1))")} # LOAD THEMATIC DICTIONARY - self.sample_aligner.load_thematic_data_dict(thematic_dict) + self.sample_aligner.load_thematic_data(DictLoader(thematic_dict)) # LOAD REFERENCE DICTIONARY - self.sample_aligner.load_reference_data_dict(reference_dict) + self.sample_aligner.load_reference_data(DictLoader(reference_dict)) series = np.arange(0, 300, 10, dtype=int) / 100 # predict which relevant distances are interesting to propose as resulting # geometry @@ -134,7 +134,7 @@ def test_load_reference_data_grb_actual_adp(self): "174184.09476602054201066 171899.68933439542888664)))" ) } - self.sample_aligner.load_thematic_data_dict(thematic_dict) + self.sample_aligner.load_thematic_data(DictLoader(thematic_dict)) # LOAD REFERENCE DICTIONARY self.sample_aligner.load_reference_data(GRBActualLoader(aligner=self.sample_aligner, grb_type=GRBType.ADP, partition=1000) @@ -152,7 +152,7 @@ def test_load_reference_data_grb_actual_gbg(self): "174184.09476602054201066 171899.68933439542888664)))" ) } - self.sample_aligner.load_thematic_data_dict(thematic_dict) + self.sample_aligner.load_thematic_data(DictLoader(thematic_dict)) # LOAD REFERENCE DICTIONARY self.sample_aligner.load_reference_data(GRBActualLoader(aligner=self.sample_aligner, grb_type=GRBType.GBG, partition=1000) @@ -170,7 +170,7 @@ def test_load_reference_data_grb_actual_knw(self): "174184.09476602054201066 171899.68933439542888664)))" ) } - self.sample_aligner.load_thematic_data_dict(thematic_dict) + self.sample_aligner.load_thematic_data(DictLoader(thematic_dict)) # LOAD REFERENCE DICTIONARY self.sample_aligner.load_reference_data(GRBActualLoader(aligner=self.sample_aligner, grb_type=GRBType.KNW, partition=1000) @@ -186,9 +186,9 @@ def test_all_od_strategies(self): # ADD A REFERENCE POLYGON TO REFERENCE DICTIONARY reference_dict = {"ref_id_1": from_wkt("POLYGON ((0 1, 0 10,8 10,10 1,0 1))")} # LOAD THEMATIC DICTIONARY - self.sample_aligner.load_thematic_data_dict(thematic_dict) + self.sample_aligner.load_thematic_data(DictLoader(thematic_dict)) # LOAD REFERENCE DICTIONARY - self.sample_aligner.load_reference_data_dict(reference_dict) + self.sample_aligner.load_reference_data(DictLoader(reference_dict)) relevant_distance = 1 for od_strategy in OpenbaarDomeinStrategy: process_result = self.sample_aligner.process_dict_thematic( @@ -214,7 +214,7 @@ def test_process_interior_ring(self): "174400.07184735251939856 170963.78864862219779752)))" ) } - self.sample_aligner.load_thematic_data_dict(thematic_dict) + self.sample_aligner.load_thematic_data(DictLoader(thematic_dict)) # LOAD REFERENCE DICTIONARY self.sample_aligner.load_reference_data(GRBActualLoader(aligner=self.sample_aligner, grb_type=GRBType.GBG, partition=1000) @@ -226,7 +226,7 @@ def test_process_circle(self): #TODO geometry = Point(0, 0).buffer(3) thematic_dict = {"key": geometry} - self.sample_aligner.load_thematic_data_dict(thematic_dict) + self.sample_aligner.load_thematic_data(DictLoader(thematic_dict)) # LOAD REFERENCE DICTIONARY self.sample_aligner.load_reference_data(GRBActualLoader(aligner=self.sample_aligner, grb_type=GRBType.GBG, partition=1000) @@ -272,38 +272,24 @@ def test__prepare_thematic_data(self): } thematic_loader = GeoJsonLoader(_input=geojson, id_property="theme_identifier") aligner.dict_thematic, properties, source = thematic_loader.load_data() - #TODO assert aligner.dict_thematic == {"4": shape(geojson["features"][0]["geometry"])} self.assertGreater(len(aligner.dict_thematic), 0) - # def test_get_results_as_dict(self): - # self.sample_aligner.load_thematic_data_dict( - # { - # "theme_id_1": from_wkt("POLYGON ((0 0, 0 9, 5 10, 10 0, 0 0))"), - # "theme_id_2": from_wkt("POLYGON ((0 0, 0 9, 5 10, 10 0, 0 0))"), - # } - # ) - # self.sample_aligner.load_reference_data_dict( - # {"ref_id_1": from_wkt("POLYGON ((0 1, 0 10,8 10,10 1,0 1))")} - # ) - # self.sample_aligner.process_dict_thematic() - # result = self.sample_aligner.get_results_as_dict() - # assert len(result) == 2 - def test_get_reference_as_geojson(self): - self.sample_aligner.load_thematic_data_dict( - {"theme_id_1": from_wkt("POLYGON ((0 0, 0 9, 5 10, 10 0, 0 0))")} - ) - self.sample_aligner.load_reference_data_dict( - {"ref_id_1": from_wkt("POLYGON ((0 1, 0 10,8 10,10 1,0 1))")} + self.sample_aligner.load_thematic_data(DictLoader + ({"theme_id_1": from_wkt("POLYGON ((0 0, 0 9, 5 10, 10 0, 0 0))")} + )) + self.sample_aligner.load_reference_data(DictLoader( + {"ref_id_1": from_wkt("POLYGON ((0 1, 0 10,8 10,10 1,0 1))")}) ) self.sample_aligner.process_dict_thematic() self.sample_aligner.get_reference_as_geojson() def test_fully_aligned_input(self): aligned_shape = from_wkt("POLYGON ((0 0, 0 9, 5 10, 10 0, 0 0))") - self.sample_aligner.load_thematic_data_dict({"theme_id_1": aligned_shape}) - self.sample_aligner.load_reference_data_dict({"ref_id_1": aligned_shape}) + loader = DictLoader({"theme_id_1": aligned_shape}) + self.sample_aligner.load_thematic_data(DictLoader({"theme_id_1": aligned_shape})) + self.sample_aligner.load_reference_data(DictLoader({"ref_id_1": aligned_shape})) relevant_distance = 1 result = self.sample_aligner.process_dict_thematic(relevant_distance=relevant_distance) assert result["theme_id_1"][relevant_distance].get("result") == aligned_shape @@ -330,8 +316,8 @@ def test_fully_aligned_geojson_output(self): "173463.11530961000244133 174423.83310307000647299)))" ) - self.sample_aligner.load_thematic_data_dict({"theme_id_1": aligned_shape}) - self.sample_aligner.load_reference_data_dict({"ref_id_1": aligned_shape}) + self.sample_aligner.load_thematic_data(DictLoader({"theme_id_1": aligned_shape})) + self.sample_aligner.load_reference_data(DictLoader({"ref_id_1": aligned_shape})) self.sample_aligner.process_dict_thematic() fcs = self.sample_aligner.get_results_as_geojson(formula=True) assert fcs["result"]["features"][0]["properties"]["area"] > 0 diff --git a/tests/test_examples.py b/tests/test_examples.py index d4a1152..7889b67 100644 --- a/tests/test_examples.py +++ b/tests/test_examples.py @@ -40,7 +40,7 @@ def test_example_combined_borders_adp_gbg(self): dict_ref2, dict_ref_properties_gbg, source_gbg = gbg_loader.load_data() dict_ref.update(dict_ref2) # combine 2 dictionaries # make a polygonized version of the reference data with non-overlapping polygons - aligner.load_reference_data_dict(dict_ref) + aligner.load_reference_data(DictLoader(dict_ref)) rel_dist = 2 result_dict = aligner.process_dict_thematic(rel_dist, 4) @@ -212,7 +212,7 @@ def test_example_predictor(self): aligner = Aligner() # Load thematic data & reference data dict_theme = get_oe_dict_by_ids([131635]) - aligner.load_thematic_data_dict(dict_theme) + aligner.load_thematic_data(DictLoader(dict_theme)) aligner.load_reference_data(GRBActualLoader(aligner=aligner, grb_type=GRBType.GBG, partition=1000) ) diff --git a/tests/test_loader.py b/tests/test_loader.py index 5349466..f654dc2 100644 --- a/tests/test_loader.py +++ b/tests/test_loader.py @@ -1,7 +1,7 @@ from brdr.aligner import Aligner from brdr.enums import GRBType from brdr.grb import GRBActualLoader -from brdr.loader import DictLoader +from brdr.loader import DictLoader, GeoJsonUrlLoader from brdr.oe import get_oe_dict_by_ids @@ -26,7 +26,7 @@ def test_load_data(self): assert aligner.dict_reference is not None -def test_load_thematic_data_url(requests_mock, haspengouw_geojson): +def test_load_thematic_data_by_url(requests_mock, haspengouw_geojson): requests_mock.add( requests_mock.GET, "https://mock.com/haspengouw.geojson", @@ -34,7 +34,7 @@ def test_load_thematic_data_url(requests_mock, haspengouw_geojson): status=200, ) aligner = Aligner() - aligner.load_thematic_data_url("https://mock.com/haspengouw.geojson", "Id") + aligner.load_thematic_data(GeoJsonUrlLoader("https://mock.com/haspengouw.geojson", "Id")) assert aligner.dict_thematic is not None @@ -47,6 +47,6 @@ def test_load_reference_data_url(requests_mock, haspengouw_geojson): ) aligner = Aligner() - aligner.load_reference_data_url("https://mock.com/haspengouw.geojson", "Id") + aligner.load_reference_data(GeoJsonUrlLoader("https://mock.com/haspengouw.geojson", "Id")) assert aligner.dict_reference is not None From 9bf8045f73ba5a5e51b521e5dac45ab009f72ccb Mon Sep 17 00:00:00 2001 From: dieuska Date: Wed, 18 Sep 2024 10:37:46 +0200 Subject: [PATCH 16/35] #82 cleaned up the example for 'show_map' --- examples/{examples_aligner.py => example_aligners.py} | 7 +++---- examples/example_ao.py | 2 +- examples/example_eo.py | 2 +- examples/example_local_data.py | 7 +++---- examples/{examples_predictor.py => example_predictor.py} | 2 +- 5 files changed, 9 insertions(+), 11 deletions(-) rename examples/{examples_aligner.py => example_aligners.py} (92%) rename examples/{examples_predictor.py => example_predictor.py} (97%) diff --git a/examples/examples_aligner.py b/examples/example_aligners.py similarity index 92% rename from examples/examples_aligner.py rename to examples/example_aligners.py index bcef75c..d9f8f98 100644 --- a/examples/examples_aligner.py +++ b/examples/example_aligners.py @@ -46,12 +46,11 @@ # border will be used for cases where relevant zones cannot be used for # determination) rel_dist = 6 - dict_results_by_distance = { - rel_dist: aligner.process_dict_thematic( + dict_results = aligner.process_dict_thematic( relevant_distance=rel_dist, od_strategy=OpenbaarDomeinStrategy.SNAP_FULL_AREA_ALL_SIDE, threshold_overlap_percentage=-1, ) - } + aligner.export_results("output/") - show_map(dict_results_by_distance, aligner.dict_thematic, aligner.dict_reference) + show_map(dict_results, aligner.dict_thematic, aligner.dict_reference) diff --git a/examples/example_ao.py b/examples/example_ao.py index 5d60bc0..23e2c6e 100644 --- a/examples/example_ao.py +++ b/examples/example_ao.py @@ -29,7 +29,7 @@ diff = {key: diffs[key]} plot_series(series, diff) show_map( - dict_predicted, + {key: dict_predicted[key]}, {key: aligner.dict_thematic[key]}, aligner.dict_reference, ) diff --git a/examples/example_eo.py b/examples/example_eo.py index 3e38d3d..6ba5c23 100644 --- a/examples/example_eo.py +++ b/examples/example_eo.py @@ -44,7 +44,7 @@ diff = {key: diffs[key]} plot_series(series, diff) show_map( - dict_predicted, + {key: dict_predicted[key]}, {key: aligner.dict_thematic[key]}, aligner.dict_reference, ) diff --git a/examples/example_local_data.py b/examples/example_local_data.py index 5fb3d51..834249f 100644 --- a/examples/example_local_data.py +++ b/examples/example_local_data.py @@ -14,11 +14,10 @@ aligner.load_reference_data(loader) # Example how to use the Aligner rel_dist = 1 - dict_results_by_distance = { - aligner.relevant_distance: aligner.process_dict_thematic( + dict_results = aligner.process_dict_thematic( relevant_distance=rel_dist, od_strategy=OpenbaarDomeinStrategy.SNAP_FULL_AREA_ALL_SIDE, ) - } + aligner.export_results("output/") - # show_map(dict_results_by_distance, aligner.dict_thematic, aligner.dict_reference) + #show_map(dict_results, aligner.dict_thematic, aligner.dict_reference) diff --git a/examples/examples_predictor.py b/examples/example_predictor.py similarity index 97% rename from examples/examples_predictor.py rename to examples/example_predictor.py index 4ee046f..78c60db 100644 --- a/examples/examples_predictor.py +++ b/examples/example_predictor.py @@ -30,7 +30,7 @@ ) for key in dict_predicted: show_map( - dict_predicted[key], + {key:dict_predicted[key]}, {key: aligner.dict_thematic[key]}, aligner.dict_reference, ) From 610ffa2d32b341413963c85034cf1a693af6dd75 Mon Sep 17 00:00:00 2001 From: dieuska Date: Wed, 18 Sep 2024 11:07:47 +0200 Subject: [PATCH 17/35] #69 --- brdr/aligner.py | 4 +++- examples/example_predictor.py | 14 ++++++++------ 2 files changed, 11 insertions(+), 7 deletions(-) diff --git a/brdr/aligner.py b/brdr/aligner.py index 27cf07f..c16d83f 100644 --- a/brdr/aligner.py +++ b/brdr/aligner.py @@ -610,12 +610,14 @@ def get_series_as_geojson(self, formula=False, series_dict=None): prop_dictionary = defaultdict(dict) for theme_id, results_dict in series_dict.items(): + nr_calculations = len(results_dict) for relevant_distance, process_results in results_dict.items(): if formula: result = process_results["result"] formula = self.get_formula(result) prop_dictionary[theme_id][relevant_distance] = { - "formula": json.dumps(formula) + "formula": json.dumps(formula), + "nr_calculations": nr_calculations } return get_series_geojson_dict( diff --git a/examples/example_predictor.py b/examples/example_predictor.py index 78c60db..8f16050 100644 --- a/examples/example_predictor.py +++ b/examples/example_predictor.py @@ -28,9 +28,11 @@ dict_series, dict_predicted, diffs = aligner.predictor( relevant_distances=series, od_strategy=4, threshold_overlap_percentage=50 ) - for key in dict_predicted: - show_map( - {key:dict_predicted[key]}, - {key: aligner.dict_thematic[key]}, - aligner.dict_reference, - ) + fcs = aligner.get_predictions_as_geojson(formula=True) + print(fcs["result"]) + # for key in dict_predicted: + # show_map( + # {key:dict_predicted[key]}, + # {key: aligner.dict_thematic[key]}, + # aligner.dict_reference, + # ) From 3537ca5dc7b7f949d3d4b281e2018c530caee7b9 Mon Sep 17 00:00:00 2001 From: dieuska Date: Wed, 18 Sep 2024 11:11:44 +0200 Subject: [PATCH 18/35] #69 small extra improvement --- brdr/aligner.py | 8 ++++---- examples/example_predictor.py | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/brdr/aligner.py b/brdr/aligner.py index c16d83f..6e60ab3 100644 --- a/brdr/aligner.py +++ b/brdr/aligner.py @@ -612,13 +612,13 @@ def get_series_as_geojson(self, formula=False, series_dict=None): for theme_id, results_dict in series_dict.items(): nr_calculations = len(results_dict) for relevant_distance, process_results in results_dict.items(): + prop_dictionary[theme_id][relevant_distance] = { + "nr_calculations": nr_calculations + } if formula: result = process_results["result"] formula = self.get_formula(result) - prop_dictionary[theme_id][relevant_distance] = { - "formula": json.dumps(formula), - "nr_calculations": nr_calculations - } + prop_dictionary[theme_id][relevant_distance]["formula"] =json.dumps(formula) return get_series_geojson_dict( series_dict, diff --git a/examples/example_predictor.py b/examples/example_predictor.py index 8f16050..662ac59 100644 --- a/examples/example_predictor.py +++ b/examples/example_predictor.py @@ -28,7 +28,7 @@ dict_series, dict_predicted, diffs = aligner.predictor( relevant_distances=series, od_strategy=4, threshold_overlap_percentage=50 ) - fcs = aligner.get_predictions_as_geojson(formula=True) + fcs = aligner.get_predictions_as_geojson(formula=False) print(fcs["result"]) # for key in dict_predicted: # show_map( From 3b18fcaef1bc15b0727ddffce58aa6887e930803 Mon Sep 17 00:00:00 2001 From: dieuska Date: Wed, 18 Sep 2024 11:54:36 +0200 Subject: [PATCH 19/35] #70 predictor will only give unique predictions --- brdr/aligner.py | 34 +++++++++++++++-- .../example_predictor_double_prediction.py | 37 +++++++++++++++++++ tests/test_aligner.py | 22 +++++++++++ 3 files changed, 89 insertions(+), 4 deletions(-) create mode 100644 examples/example_predictor_double_prediction.py diff --git a/brdr/aligner.py b/brdr/aligner.py index 6e60ab3..1ffb0ec 100644 --- a/brdr/aligner.py +++ b/brdr/aligner.py @@ -24,7 +24,7 @@ from brdr.constants import DEFAULT_CRS from brdr.constants import THRESHOLD_CIRCLE_RATIO from brdr.enums import OpenbaarDomeinStrategy -from brdr.geometry_utils import buffer_neg +from brdr.geometry_utils import buffer_neg, safe_equals from brdr.geometry_utils import buffer_neg_pos from brdr.geometry_utils import buffer_pos from brdr.geometry_utils import fill_and_remove_gaps @@ -33,7 +33,7 @@ from brdr.geometry_utils import safe_symmetric_difference from brdr.geometry_utils import safe_union from brdr.loader import Loader -from brdr.logger import Logger +from brdr.logger import Logger, LOGGER from brdr.typings import ProcessResult from brdr.utils import diffs_from_dict_series, multipolygons_to_singles from brdr.utils import geojson_from_dict @@ -395,14 +395,40 @@ def predictor( for zs in zero_streaks: dict_predicted[theme_id] [zs[0]]= dict_series[theme_id][zs[0]] - self.dict_predicted = dict_predicted + #Check if the predicted reldists are unique (and remove duplicated predictions + dict_predicted_unique = defaultdict(dict) + for theme_id,dist_results in dict_predicted.items(): + dict_predicted_unique[theme_id] = {} + predicted_geoms_for_theme_id = [] + for rel_dist, processresults in dist_results.items(): + predicted_geom = processresults["result"] + if not self._equal_geom_in_array(predicted_geom,predicted_geoms_for_theme_id): + dict_predicted_unique[theme_id][rel_dist] = processresults + predicted_geoms_for_theme_id.append(processresults["result"]) + else: + self.logger.feedback_info(f"Duplicate prediction found for key {theme_id} at distance {rel_dist}: Prediction excluded") + + self.dict_predicted = dict_predicted_unique return ( dict_series, - dict_predicted, + self.dict_predicted, diffs_dict, ) + @staticmethod + def _equal_geom_in_array(geom,geom_array): + """ + Check if a predicted geometry is equal to other predicted geometries in a list. + Equality is defined as there is the symmetrical difference is smaller than the CORRECTION DISTANCE + Returns True if one of the elements is equal, otherwise False + """ + for g in geom_array: + #if safe_equals(geom,g): + if buffer_neg(safe_symmetric_difference(geom, g),CORR_DISTANCE).is_empty: + return True + return False + def process_series( self, relevant_distances: Iterable[float], diff --git a/examples/example_predictor_double_prediction.py b/examples/example_predictor_double_prediction.py new file mode 100644 index 0000000..a6496f3 --- /dev/null +++ b/examples/example_predictor_double_prediction.py @@ -0,0 +1,37 @@ +import numpy as np +from shapely import from_wkt + +from brdr.aligner import Aligner +from brdr.enums import GRBType +from brdr.grb import GRBActualLoader +from brdr.loader import GeoJsonFileLoader, DictLoader +from examples import show_map + +# Press the green button in the gutter to run the script. +if __name__ == "__main__": + """ + example to use the predictor-function to automatically predict which resulting + geometries are interesting to look at (based on detection of breakpoints and + relevant distances of 'no-change') + """ + # Initiate an Aligner + aligner = Aligner() + # Load thematic data & reference data + loader = DictLoader({"id1": from_wkt("MultiPolygon Z (((138430.4033999964594841 194082.86080000177025795 0, 138422.19659999758005142 194080.36510000005364418 0, 138419.01550000160932541 194079.34930000081658363 0, 138412.59849999845027924 194077.14139999821782112 0, 138403.65579999983310699 194074.06430000066757202 0, 138402.19910000264644623 194077.67480000108480453 0, 138401.83420000225305557 194078.57939999923110008 0, 138400.89329999685287476 194080.91140000149607658 0, 138400.31650000065565109 194080.67880000174045563 0, 138399.27300000190734863 194083.37680000066757202 0, 138405.93310000002384186 194085.95410000160336494 0, 138413.51049999892711639 194088.80620000138878822 0, 138427.25680000334978104 194094.29969999939203262 0, 138430.4033999964594841 194082.86080000177025795 0)))")}) + aligner.load_thematic_data(loader) + loader = GRBActualLoader(grb_type=GRBType.ADP, partition=1000, aligner=aligner) + aligner.load_reference_data(loader) + + series = np.arange(0, 800, 10, dtype=int) / 100 + # predict which relevant distances are interesting to propose as resulting geometry + dict_series, dict_predicted, diffs = aligner.predictor( + relevant_distances=series, od_strategy=4, threshold_overlap_percentage=50 + ) + fcs = aligner.get_predictions_as_geojson(formula=False) + print(fcs["result"]) + # for key in dict_predicted: + # show_map( + # {key:dict_predicted[key]}, + # {key: aligner.dict_thematic[key]}, + # aligner.dict_reference, + # ) diff --git a/tests/test_aligner.py b/tests/test_aligner.py index 9ae1af9..a640ea7 100644 --- a/tests/test_aligner.py +++ b/tests/test_aligner.py @@ -123,6 +123,28 @@ def test_predictor(self): ) self.assertEqual(len(dict_predicted), len(thematic_dict)) + + def test_predictor_double_prediction(self): + """ + Test if a double prediction is filtered out of the prediction results. + This testdata has 2 resulting predictions that are the same (at 0.0 and 6.0), and 6.0 will be removed from dict_predicted + """ + # Initiate an Aligner + aligner = Aligner() + # Load thematic data & reference data + loader = DictLoader({"id1": from_wkt( + "MultiPolygon Z (((138430.4033999964594841 194082.86080000177025795 0, 138422.19659999758005142 194080.36510000005364418 0, 138419.01550000160932541 194079.34930000081658363 0, 138412.59849999845027924 194077.14139999821782112 0, 138403.65579999983310699 194074.06430000066757202 0, 138402.19910000264644623 194077.67480000108480453 0, 138401.83420000225305557 194078.57939999923110008 0, 138400.89329999685287476 194080.91140000149607658 0, 138400.31650000065565109 194080.67880000174045563 0, 138399.27300000190734863 194083.37680000066757202 0, 138405.93310000002384186 194085.95410000160336494 0, 138413.51049999892711639 194088.80620000138878822 0, 138427.25680000334978104 194094.29969999939203262 0, 138430.4033999964594841 194082.86080000177025795 0)))")}) + aligner.load_thematic_data(loader) + loader = GRBActualLoader(grb_type=GRBType.ADP, partition=1000, aligner=aligner) + aligner.load_reference_data(loader) + + series = np.arange(0, 800, 10, dtype=int) / 100 + # predict which relevant distances are interesting to propose as resulting geometry + dict_series, dict_predicted, diffs = aligner.predictor( + relevant_distances=series, od_strategy=4, threshold_overlap_percentage=50 + ) + self.assertEqual(len(dict_predicted["id1"]), 3) + def test_load_reference_data_grb_actual_adp(self): thematic_dict = { "theme_id_1": from_wkt( From 5c0b0aa2e7df2e3bfd8df26f3330c6c877cf8a96 Mon Sep 17 00:00:00 2001 From: dieuska Date: Wed, 18 Sep 2024 13:27:33 +0200 Subject: [PATCH 20/35] #80 and reformat --- README.md | 77 ++++++++++----- brdr/__init__.py | 2 +- brdr/aligner.py | 32 ++++--- brdr/loader.py | 6 +- brdr/logger.py | 1 + examples/example_131635.py | 2 +- examples/example_aligner.py | 8 +- examples/example_aligners.py | 28 +++--- examples/example_evaluate_multi_to_single.py | 1 + examples/example_local_data.py | 8 +- examples/example_multipolygon.py | 6 +- examples/example_predictor.py | 1 - .../example_predictor_double_prediction.py | 3 +- examples/example_readme.py | 12 ++- examples/example_refactor_dict_series.py | 12 +-- examples/example_speedtest.py | 1 + examples/example_update_to_actual_grb.py | 14 +-- tests/test_aligner.py | 94 ++++++++++++------- tests/test_geometry_utils.py | 2 +- tests/test_loader.py | 8 +- tests/test_oe.py | 17 ---- tests/test_utils.py | 4 +- 22 files changed, 201 insertions(+), 138 deletions(-) diff --git a/README.md b/README.md index bbff24f..48b6693 100644 --- a/README.md +++ b/README.md @@ -43,16 +43,45 @@ The figure below shows: ### Functionalities -`brdr` provides a variety of side-functionalities to assist in aligning boundaries, including: - -* Loading thematic data ((Multi-)Polygons): as a dict, geojson or Web Feature Service (WFS-url) -* Loading reference data ((Multi-)Polygons): as a dict, geojson or Web Feature Service (WFS-url) -* (Flanders-specific) Download reference data from GRB-Flanders -* Align thematic boundaries to reference boundaries with a specific relevant distance (process_dict_thematic) -* Align thematic boundaries to reference boundaries with a series of specified relevant distances (process_series) -* Make use of a 'predictor'-function that aligns thematic boundaries to reference boundaries for 'predicted' interesting - relevant distances (predictor) -* Calculating a descriptive formulation of a thematic boundary based on a reference layer +`brdr` provides a variety of functionalities in the Aligner-class to assist in aligning boundaries, including: + +* Loaders: + * aligner.load_thematic_data():Loading thematic data ((Multi-)Polygons) as a dictionary (DictLoader) or geojson ( + GeoJsonFileLoader,GeoJsonUrlLoader) + * aligner.load_reference_data():Loading reference data ((Multi-)Polygons) as a dictionary (DictLoader) or geojson ( + GeoJsonFileLoader,GeoJsonUrlLoader) + +* Processors: + * aligner.process_dict_thematic(): Align thematic boundaries to reference boundaries with a specific relevant + distance + * aligner.process_series(): Align thematic boundaries to reference boundaries with a series of specified relevant + distances (process_series) + * aligner.predictor(): Make use of a 'predictor'-function that aligns thematic boundaries to reference boundaries + for 'predicted' interesting + relevant distances (predictor) + * aligner.get_formula(): Calculating a descriptive formula of a thematic boundary based on a reference layer + +* Exporters: + * aligner.get_results_as_geojson(): Returns a collection of geojson-dictionaries with the results (resulting + geometry, differences,...) + * aligner.get_predictions_as_geojson(): Returns a collection of geojson-dictionaries with the predictions (resulting + geometry, differences,...) + * aligner.get_series_as_geojson(): Returns a collection of geojson-dictionaries for a given resulting series ( + resulting geometry, differences,...) + * aligner.get_reference_as_geojson(): Returns a geojson-featurecollection with all the reference-geometries + * aligner.export_predictions(): Exports the predicted geojson-files to a specified path + * aligner.export_results(): Exports the resuling geojson-files to a specified path + +Besides the generic functionalities, a range of Flanders-specific functionalities are provided: + +* Loaders: + * GRBActualLoader: Loading actual GRB (parcels, buildings) + * GRBFiscalParcelLoader: Loading fiscal GRB-parcels of a specific year +* Processors: + * grb.get_geoms_affected_by_grb_change(): get thematic geometries that are possibly affected by GRB-changes during a + specific timespan + * grb.update_to_actual_grb(): aligns the boundaries of thematic features to the actual GRB-boundaries + * grb.evaluate(): Evaluates a thematic dictionary on equality with a newer version and adds a 'evaluation'-property ### Possible application fields @@ -64,7 +93,7 @@ The figure below shows: resulting geometries * ... * Data-Analysis: Investigate the pattern in deviation and change between thematic and reference boundaries -* Update-detection: Investigate the descriptive formulation before and after alignment to check for (automatic) +* Update-detection: Investigate the descriptive formula before and after alignment to check for (automatic) alignment of geodata * ... @@ -108,17 +137,18 @@ reference_dict = {"ref_id_1": geom_from_wkt("POLYGON ((0 1, 0 10,8 10,10 1,0 1)) loader = DictLoader(reference_dict) aligner.load_reference_data(loader) # EXECUTE THE ALIGNMENT -process_result = aligner.process_dict_thematic(relevant_distance=1) +relevant_distance = 1 +process_result = aligner.process_dict_thematic(relevant_distance=relevant_distance) # PRINT RESULTS IN WKT -print("result: " + process_result["theme_id_1"]["result"].wkt) -print("added area: " + process_result["theme_id_1"]["result_diff_plus"].wkt) -print("removed area: " + process_result["theme_id_1"]["result_diff_min"].wkt) -# SHOW RESULTING GEOMETRY AND CHANGES -# from examples import show_map -# show_map( -# {aligner.relevant_distance:(result, result_diff, result_diff_plus, result_diff_min, relevant_intersection, relevant_diff)}, -# thematic_dict, -# reference_dict) +print("result: " + process_result["theme_id_1"][relevant_distance]["result"].wkt) +print( + "added area: " + + process_result["theme_id_1"][relevant_distance]["result_diff_plus"].wkt +) +print( + "removed area: " + + process_result["theme_id_1"][relevant_distance]["result_diff_min"].wkt +) ``` The resulting figure shows: @@ -205,7 +235,10 @@ pip-compile $PIP_COMPILE_ARGS -o requirements-dev.txt --all-extras ### tests ```python -python -m pytest --cov=brdr tests/ --cov-report term-missing +python - m +pytest - -cov = brdr +tests / --cov - report +term - missing ``` ## Motivation & citation diff --git a/brdr/__init__.py b/brdr/__init__.py index 77648b6..3ced358 100644 --- a/brdr/__init__.py +++ b/brdr/__init__.py @@ -1 +1 @@ -__version__ = "0.2.1" \ No newline at end of file +__version__ = "0.2.1" diff --git a/brdr/aligner.py b/brdr/aligner.py index 1ffb0ec..4078bcb 100644 --- a/brdr/aligner.py +++ b/brdr/aligner.py @@ -24,7 +24,7 @@ from brdr.constants import DEFAULT_CRS from brdr.constants import THRESHOLD_CIRCLE_RATIO from brdr.enums import OpenbaarDomeinStrategy -from brdr.geometry_utils import buffer_neg, safe_equals +from brdr.geometry_utils import buffer_neg from brdr.geometry_utils import buffer_neg_pos from brdr.geometry_utils import buffer_pos from brdr.geometry_utils import fill_and_remove_gaps @@ -33,7 +33,7 @@ from brdr.geometry_utils import safe_symmetric_difference from brdr.geometry_utils import safe_union from brdr.loader import Loader -from brdr.logger import Logger, LOGGER +from brdr.logger import Logger from brdr.typings import ProcessResult from brdr.utils import diffs_from_dict_series, multipolygons_to_singles from brdr.utils import geojson_from_dict @@ -402,7 +402,7 @@ def predictor( predicted_geoms_for_theme_id = [] for rel_dist, processresults in dist_results.items(): predicted_geom = processresults["result"] - if not self._equal_geom_in_array(predicted_geom,predicted_geoms_for_theme_id): + if not _equal_geom_in_array(predicted_geom,predicted_geoms_for_theme_id): dict_predicted_unique[theme_id][rel_dist] = processresults predicted_geoms_for_theme_id.append(processresults["result"]) else: @@ -416,18 +416,7 @@ def predictor( diffs_dict, ) - @staticmethod - def _equal_geom_in_array(geom,geom_array): - """ - Check if a predicted geometry is equal to other predicted geometries in a list. - Equality is defined as there is the symmetrical difference is smaller than the CORRECTION DISTANCE - Returns True if one of the elements is equal, otherwise False - """ - for g in geom_array: - #if safe_equals(geom,g): - if buffer_neg(safe_symmetric_difference(geom, g),CORR_DISTANCE).is_empty: - return True - return False + def process_series( self, @@ -1217,3 +1206,16 @@ def _get_relevant_polygons_from_geom(geometry: BaseGeometry, buffer_distance: fl if relevant_geom is not None and not relevant_geom.is_empty: array.append(g) return make_valid(unary_union(array)) + +@staticmethod +def _equal_geom_in_array(geom,geom_array): + """ + Check if a predicted geometry is equal to other predicted geometries in a list. + Equality is defined as there is the symmetrical difference is smaller than the CORRECTION DISTANCE + Returns True if one of the elements is equal, otherwise False + """ + for g in geom_array: + #if safe_equals(geom,g): + if buffer_neg(safe_symmetric_difference(geom, g),CORR_DISTANCE).is_empty: + return True + return False diff --git a/brdr/loader.py b/brdr/loader.py index b2ef8e5..f8ec963 100644 --- a/brdr/loader.py +++ b/brdr/loader.py @@ -29,9 +29,11 @@ def load_data(self): self.versiondate_info["format"], ) except: - #Catch, to try extracting only the date with default -date format if specific format does not work + # Catch, to try extracting only the date with default -date format if specific format does not work self.data_dict_properties[key][VERSION_DATE] = datetime.strptime( - self.data_dict_properties[key][self.versiondate_info["name"]][:10], + self.data_dict_properties[key][self.versiondate_info["name"]][ + :10 + ], DATE_FORMAT, ) diff --git a/brdr/logger.py b/brdr/logger.py index 682a436..a6a98ab 100644 --- a/brdr/logger.py +++ b/brdr/logger.py @@ -11,6 +11,7 @@ handler.setFormatter(formatter) LOGGER.addHandler(handler) + class Logger: def __init__(self, feedback=None): self.feedback = feedback diff --git a/examples/example_131635.py b/examples/example_131635.py index cb87bba..f39c05f 100644 --- a/examples/example_131635.py +++ b/examples/example_131635.py @@ -6,7 +6,7 @@ from examples import show_map if __name__ == "__main__": - #TODO + # TODO # EXAMPLE for a thematic Polygon (aanduid_id 131635) # Initiate brdr diff --git a/examples/example_aligner.py b/examples/example_aligner.py index 053c922..5c47555 100644 --- a/examples/example_aligner.py +++ b/examples/example_aligner.py @@ -50,9 +50,9 @@ # Example how to use the Aligner rel_dist = 6 - dict_results= aligner.process_dict_thematic( - relevant_distance=rel_dist, - od_strategy=OpenbaarDomeinStrategy.SNAP_FULL_AREA_ALL_SIDE, - ) + dict_results = aligner.process_dict_thematic( + relevant_distance=rel_dist, + od_strategy=OpenbaarDomeinStrategy.SNAP_FULL_AREA_ALL_SIDE, + ) aligner.export_results("output/") show_map(dict_results, aligner.dict_thematic, aligner.dict_reference) diff --git a/examples/example_aligners.py b/examples/example_aligners.py index d9f8f98..3f7850e 100644 --- a/examples/example_aligners.py +++ b/examples/example_aligners.py @@ -10,26 +10,28 @@ # Initiate brdr aligner = Aligner() # Load thematic data - aligner.load_thematic_data(GeoJsonFileLoader( - "../tests/testdata/themelayer_referenced.geojson", "id_theme" - )) + aligner.load_thematic_data( + GeoJsonFileLoader("../tests/testdata/themelayer_referenced.geojson", "id_theme") + ) # Use GRB adp-parcels as reference polygons adp= administratieve percelen - aligner.load_reference_data(GRBActualLoader(grb_type=GRBType.ADP, partition=1000,aligner=aligner)) + aligner.load_reference_data( + GRBActualLoader(grb_type=GRBType.ADP, partition=1000, aligner=aligner) + ) # Example how to use the Aligner rel_dist = 10 dict_results = aligner.process_dict_thematic( - relevant_distance=rel_dist, - od_strategy=OpenbaarDomeinStrategy.SNAP_FULL_AREA_SINGLE_SIDE, - ) + relevant_distance=rel_dist, + od_strategy=OpenbaarDomeinStrategy.SNAP_FULL_AREA_SINGLE_SIDE, + ) aligner.export_results("output/") show_map(dict_results, aligner.dict_thematic, aligner.dict_reference) rel_dist = 6 dict_results = aligner.process_dict_thematic( - relevant_distance=rel_dist, od_strategy=OpenbaarDomeinStrategy.SNAP_ALL_SIDE - ) + relevant_distance=rel_dist, od_strategy=OpenbaarDomeinStrategy.SNAP_ALL_SIDE + ) aligner.export_results("output/") show_map(dict_results, aligner.dict_thematic, aligner.dict_reference) @@ -47,10 +49,10 @@ # determination) rel_dist = 6 dict_results = aligner.process_dict_thematic( - relevant_distance=rel_dist, - od_strategy=OpenbaarDomeinStrategy.SNAP_FULL_AREA_ALL_SIDE, - threshold_overlap_percentage=-1, - ) + relevant_distance=rel_dist, + od_strategy=OpenbaarDomeinStrategy.SNAP_FULL_AREA_ALL_SIDE, + threshold_overlap_percentage=-1, + ) aligner.export_results("output/") show_map(dict_results, aligner.dict_thematic, aligner.dict_reference) diff --git a/examples/example_evaluate_multi_to_single.py b/examples/example_evaluate_multi_to_single.py index 3f0efc8..e0e9add 100644 --- a/examples/example_evaluate_multi_to_single.py +++ b/examples/example_evaluate_multi_to_single.py @@ -11,6 +11,7 @@ from brdr.loader import DictLoader from brdr.oe import OnroerendErfgoedLoader from brdr.utils import get_series_geojson_dict + #from brdr.utils import merge_process_results multi_as_single_modus = False diff --git a/examples/example_local_data.py b/examples/example_local_data.py index 834249f..3072625 100644 --- a/examples/example_local_data.py +++ b/examples/example_local_data.py @@ -15,9 +15,9 @@ # Example how to use the Aligner rel_dist = 1 dict_results = aligner.process_dict_thematic( - relevant_distance=rel_dist, - od_strategy=OpenbaarDomeinStrategy.SNAP_FULL_AREA_ALL_SIDE, - ) + relevant_distance=rel_dist, + od_strategy=OpenbaarDomeinStrategy.SNAP_FULL_AREA_ALL_SIDE, + ) aligner.export_results("output/") - #show_map(dict_results, aligner.dict_thematic, aligner.dict_reference) + # show_map(dict_results, aligner.dict_thematic, aligner.dict_reference) diff --git a/examples/example_multipolygon.py b/examples/example_multipolygon.py index 946e4f3..bc7b818 100644 --- a/examples/example_multipolygon.py +++ b/examples/example_multipolygon.py @@ -10,9 +10,9 @@ # Load thematic data -aligner0.load_thematic_data(GeoJsonFileLoader( - "../tests/testdata/multipolygon.geojson", "theme_identifier" -)) +aligner0.load_thematic_data( + GeoJsonFileLoader("../tests/testdata/multipolygon.geojson", "theme_identifier") +) aligner0.dict_thematic = multipolygons_to_singles(aligner0.dict_thematic) aligner0.load_thematic_data( DictLoader( diff --git a/examples/example_predictor.py b/examples/example_predictor.py index 662ac59..20f5528 100644 --- a/examples/example_predictor.py +++ b/examples/example_predictor.py @@ -4,7 +4,6 @@ from brdr.enums import GRBType from brdr.grb import GRBActualLoader from brdr.loader import GeoJsonFileLoader -from examples import show_map # Press the green button in the gutter to run the script. if __name__ == "__main__": diff --git a/examples/example_predictor_double_prediction.py b/examples/example_predictor_double_prediction.py index a6496f3..daf7fa9 100644 --- a/examples/example_predictor_double_prediction.py +++ b/examples/example_predictor_double_prediction.py @@ -4,8 +4,7 @@ from brdr.aligner import Aligner from brdr.enums import GRBType from brdr.grb import GRBActualLoader -from brdr.loader import GeoJsonFileLoader, DictLoader -from examples import show_map +from brdr.loader import DictLoader # Press the green button in the gutter to run the script. if __name__ == "__main__": diff --git a/examples/example_readme.py b/examples/example_readme.py index cf1d19f..e890130 100644 --- a/examples/example_readme.py +++ b/examples/example_readme.py @@ -19,9 +19,15 @@ loader = DictLoader(reference_dict) aligner.load_reference_data(loader) # EXECUTE THE ALIGNMENT -relevant_distance=1 +relevant_distance = 1 process_result = aligner.process_dict_thematic(relevant_distance=relevant_distance) # PRINT RESULTS IN WKT print("result: " + process_result["theme_id_1"][relevant_distance]["result"].wkt) -print("added area: " + process_result["theme_id_1"][relevant_distance]["result_diff_plus"].wkt) -print("removed area: " + process_result["theme_id_1"][relevant_distance]["result_diff_min"].wkt) +print( + "added area: " + + process_result["theme_id_1"][relevant_distance]["result_diff_plus"].wkt +) +print( + "removed area: " + + process_result["theme_id_1"][relevant_distance]["result_diff_min"].wkt +) diff --git a/examples/example_refactor_dict_series.py b/examples/example_refactor_dict_series.py index c40182e..e831542 100644 --- a/examples/example_refactor_dict_series.py +++ b/examples/example_refactor_dict_series.py @@ -1,4 +1,3 @@ - from brdr.aligner import Aligner from brdr.enums import GRBType from brdr.grb import GRBActualLoader @@ -11,7 +10,7 @@ # Initiate brdr aligner = Aligner() # Load thematic data & reference data -aanduidingsobjecten =[117798,116800,117881] +aanduidingsobjecten = [117798, 116800, 117881] loader = OnroerendErfgoedLoader(aanduidingsobjecten) aligner.load_thematic_data(loader) @@ -19,10 +18,9 @@ aligner.load_reference_data(loader) test = aligner.process_dict_thematic() -test = aligner.process_series([1,2,3]) +test = aligner.process_series([1, 2, 3]) test = aligner.predictor() fcs = aligner.get_series_as_geojson(formula=True) -print (test) -print (fcs) -print (fcs["result"]) - +print(test) +print(fcs) +print(fcs["result"]) diff --git a/examples/example_speedtest.py b/examples/example_speedtest.py index cdaf536..7e31a2b 100644 --- a/examples/example_speedtest.py +++ b/examples/example_speedtest.py @@ -1,5 +1,6 @@ import statistics from datetime import datetime + from brdr.aligner import Aligner from brdr.loader import GeoJsonFileLoader diff --git a/examples/example_update_to_actual_grb.py b/examples/example_update_to_actual_grb.py index 7606c5b..a701da1 100644 --- a/examples/example_update_to_actual_grb.py +++ b/examples/example_update_to_actual_grb.py @@ -3,7 +3,7 @@ from brdr.grb import update_to_actual_grb from brdr.loader import GeoJsonFileLoader -#Create a featurecollection (aligned on 2022), to use for the 'update_to_actual_grb' +# Create a featurecollection (aligned on 2022), to use for the 'update_to_actual_grb' base_year = "2022" base_aligner = Aligner() name_thematic_id = "theme_identifier" @@ -14,11 +14,13 @@ ) base_process_result = base_aligner.process_dict_thematic(relevant_distance=2) fcs = base_aligner.get_results_as_geojson(formula=True) -featurecollection_base_result= fcs["result"] -print (featurecollection_base_result) -#Update Featurecollection to actual version -featurecollection = update_to_actual_grb(featurecollection_base_result,base_aligner.name_thematic_id) -#Print results +featurecollection_base_result = fcs["result"] +print(featurecollection_base_result) +# Update Featurecollection to actual version +featurecollection = update_to_actual_grb( + featurecollection_base_result, base_aligner.name_thematic_id +) +# Print results for feature in featurecollection["result"]["features"]: print( feature["properties"][name_thematic_id] diff --git a/tests/test_aligner.py b/tests/test_aligner.py index a640ea7..f3c7696 100644 --- a/tests/test_aligner.py +++ b/tests/test_aligner.py @@ -52,12 +52,14 @@ def test_grid_bounds_2(self): def test_export_results(self): aligner = Aligner() - aligner.load_thematic_data(DictLoader - ({"theme_id_1": from_wkt("POLYGON ((0 0, 0 9, 5 10, 10 0, 0 0))")} - )) - aligner.load_reference_data(DictLoader( - {"ref_id_1": from_wkt("POLYGON ((0 1, 0 10,8 10,10 1,0 1))")} - )) + aligner.load_thematic_data( + DictLoader( + {"theme_id_1": from_wkt("POLYGON ((0 0, 0 9, 5 10, 10 0, 0 0))")} + ) + ) + aligner.load_reference_data( + DictLoader({"ref_id_1": from_wkt("POLYGON ((0 1, 0 10,8 10,10 1,0 1))")}) + ) aligner.process_dict_thematic() path = "./tmp/" aligner.export_results(path=path) @@ -123,7 +125,6 @@ def test_predictor(self): ) self.assertEqual(len(dict_predicted), len(thematic_dict)) - def test_predictor_double_prediction(self): """ Test if a double prediction is filtered out of the prediction results. @@ -132,8 +133,13 @@ def test_predictor_double_prediction(self): # Initiate an Aligner aligner = Aligner() # Load thematic data & reference data - loader = DictLoader({"id1": from_wkt( - "MultiPolygon Z (((138430.4033999964594841 194082.86080000177025795 0, 138422.19659999758005142 194080.36510000005364418 0, 138419.01550000160932541 194079.34930000081658363 0, 138412.59849999845027924 194077.14139999821782112 0, 138403.65579999983310699 194074.06430000066757202 0, 138402.19910000264644623 194077.67480000108480453 0, 138401.83420000225305557 194078.57939999923110008 0, 138400.89329999685287476 194080.91140000149607658 0, 138400.31650000065565109 194080.67880000174045563 0, 138399.27300000190734863 194083.37680000066757202 0, 138405.93310000002384186 194085.95410000160336494 0, 138413.51049999892711639 194088.80620000138878822 0, 138427.25680000334978104 194094.29969999939203262 0, 138430.4033999964594841 194082.86080000177025795 0)))")}) + loader = DictLoader( + { + "id1": from_wkt( + "MultiPolygon Z (((138430.4033999964594841 194082.86080000177025795 0, 138422.19659999758005142 194080.36510000005364418 0, 138419.01550000160932541 194079.34930000081658363 0, 138412.59849999845027924 194077.14139999821782112 0, 138403.65579999983310699 194074.06430000066757202 0, 138402.19910000264644623 194077.67480000108480453 0, 138401.83420000225305557 194078.57939999923110008 0, 138400.89329999685287476 194080.91140000149607658 0, 138400.31650000065565109 194080.67880000174045563 0, 138399.27300000190734863 194083.37680000066757202 0, 138405.93310000002384186 194085.95410000160336494 0, 138413.51049999892711639 194088.80620000138878822 0, 138427.25680000334978104 194094.29969999939203262 0, 138430.4033999964594841 194082.86080000177025795 0)))" + ) + } + ) aligner.load_thematic_data(loader) loader = GRBActualLoader(grb_type=GRBType.ADP, partition=1000, aligner=aligner) aligner.load_reference_data(loader) @@ -158,8 +164,10 @@ def test_load_reference_data_grb_actual_adp(self): } self.sample_aligner.load_thematic_data(DictLoader(thematic_dict)) # LOAD REFERENCE DICTIONARY - self.sample_aligner.load_reference_data(GRBActualLoader(aligner=self.sample_aligner, - grb_type=GRBType.ADP, partition=1000) + self.sample_aligner.load_reference_data( + GRBActualLoader( + aligner=self.sample_aligner, grb_type=GRBType.ADP, partition=1000 + ) ) self.assertGreater(len(self.sample_aligner.dict_reference), 0) @@ -176,8 +184,10 @@ def test_load_reference_data_grb_actual_gbg(self): } self.sample_aligner.load_thematic_data(DictLoader(thematic_dict)) # LOAD REFERENCE DICTIONARY - self.sample_aligner.load_reference_data(GRBActualLoader(aligner=self.sample_aligner, - grb_type=GRBType.GBG, partition=1000) + self.sample_aligner.load_reference_data( + GRBActualLoader( + aligner=self.sample_aligner, grb_type=GRBType.GBG, partition=1000 + ) ) self.assertGreater(len(self.sample_aligner.dict_reference), 0) @@ -194,8 +204,10 @@ def test_load_reference_data_grb_actual_knw(self): } self.sample_aligner.load_thematic_data(DictLoader(thematic_dict)) # LOAD REFERENCE DICTIONARY - self.sample_aligner.load_reference_data(GRBActualLoader(aligner=self.sample_aligner, - grb_type=GRBType.KNW, partition=1000) + self.sample_aligner.load_reference_data( + GRBActualLoader( + aligner=self.sample_aligner, grb_type=GRBType.KNW, partition=1000 + ) ) self.sample_aligner.process_dict_thematic() self.assertGreaterEqual(len(self.sample_aligner.dict_reference), 0) @@ -238,23 +250,29 @@ def test_process_interior_ring(self): } self.sample_aligner.load_thematic_data(DictLoader(thematic_dict)) # LOAD REFERENCE DICTIONARY - self.sample_aligner.load_reference_data(GRBActualLoader(aligner=self.sample_aligner, - grb_type=GRBType.GBG, partition=1000) + self.sample_aligner.load_reference_data( + GRBActualLoader( + aligner=self.sample_aligner, grb_type=GRBType.GBG, partition=1000 + ) ) result_dict = self.sample_aligner.process_dict_thematic() self.assertEqual(len(result_dict), len(thematic_dict)) def test_process_circle(self): - #TODO + # TODO geometry = Point(0, 0).buffer(3) thematic_dict = {"key": geometry} self.sample_aligner.load_thematic_data(DictLoader(thematic_dict)) # LOAD REFERENCE DICTIONARY - self.sample_aligner.load_reference_data(GRBActualLoader(aligner=self.sample_aligner, - grb_type=GRBType.GBG, partition=1000) + self.sample_aligner.load_reference_data( + GRBActualLoader( + aligner=self.sample_aligner, grb_type=GRBType.GBG, partition=1000 + ) + ) + relevant_distance = 1 + results_dict = self.sample_aligner.process_dict_thematic( + relevant_distance=relevant_distance ) - relevant_distance=1 - results_dict = self.sample_aligner.process_dict_thematic(relevant_distance=relevant_distance) self.assertEqual(geometry, results_dict["key"][relevant_distance]["result"]) def test__prepare_thematic_data(self): @@ -298,11 +316,13 @@ def test__prepare_thematic_data(self): self.assertGreater(len(aligner.dict_thematic), 0) def test_get_reference_as_geojson(self): - self.sample_aligner.load_thematic_data(DictLoader - ({"theme_id_1": from_wkt("POLYGON ((0 0, 0 9, 5 10, 10 0, 0 0))")} - )) - self.sample_aligner.load_reference_data(DictLoader( - {"ref_id_1": from_wkt("POLYGON ((0 1, 0 10,8 10,10 1,0 1))")}) + self.sample_aligner.load_thematic_data( + DictLoader( + {"theme_id_1": from_wkt("POLYGON ((0 0, 0 9, 5 10, 10 0, 0 0))")} + ) + ) + self.sample_aligner.load_reference_data( + DictLoader({"ref_id_1": from_wkt("POLYGON ((0 1, 0 10,8 10,10 1,0 1))")}) ) self.sample_aligner.process_dict_thematic() self.sample_aligner.get_reference_as_geojson() @@ -310,14 +330,22 @@ def test_get_reference_as_geojson(self): def test_fully_aligned_input(self): aligned_shape = from_wkt("POLYGON ((0 0, 0 9, 5 10, 10 0, 0 0))") loader = DictLoader({"theme_id_1": aligned_shape}) - self.sample_aligner.load_thematic_data(DictLoader({"theme_id_1": aligned_shape})) + self.sample_aligner.load_thematic_data( + DictLoader({"theme_id_1": aligned_shape}) + ) self.sample_aligner.load_reference_data(DictLoader({"ref_id_1": aligned_shape})) relevant_distance = 1 - result = self.sample_aligner.process_dict_thematic(relevant_distance=relevant_distance) + result = self.sample_aligner.process_dict_thematic( + relevant_distance=relevant_distance + ) assert result["theme_id_1"][relevant_distance].get("result") == aligned_shape assert result["theme_id_1"][relevant_distance].get("result_diff") == Polygon() - assert result["theme_id_1"][relevant_distance].get("result_diff_min") == Polygon() - assert result["theme_id_1"][relevant_distance].get("result_diff_plus") == Polygon() + assert ( + result["theme_id_1"][relevant_distance].get("result_diff_min") == Polygon() + ) + assert ( + result["theme_id_1"][relevant_distance].get("result_diff_plus") == Polygon() + ) def test_fully_aligned_geojson_output(self): aligned_shape = from_wkt( @@ -338,7 +366,9 @@ def test_fully_aligned_geojson_output(self): "173463.11530961000244133 174423.83310307000647299)))" ) - self.sample_aligner.load_thematic_data(DictLoader({"theme_id_1": aligned_shape})) + self.sample_aligner.load_thematic_data( + DictLoader({"theme_id_1": aligned_shape}) + ) self.sample_aligner.load_reference_data(DictLoader({"ref_id_1": aligned_shape})) self.sample_aligner.process_dict_thematic() fcs = self.sample_aligner.get_results_as_geojson(formula=True) diff --git a/tests/test_geometry_utils.py b/tests/test_geometry_utils.py index 4aef4f3..fa38be4 100644 --- a/tests/test_geometry_utils.py +++ b/tests/test_geometry_utils.py @@ -4,11 +4,11 @@ from shapely.geometry import Point from shapely.geometry import Polygon +from brdr.geometry_utils import _grid_bounds from brdr.geometry_utils import buffer_neg from brdr.geometry_utils import buffer_neg_pos from brdr.geometry_utils import buffer_pos from brdr.geometry_utils import get_partitions -from brdr.geometry_utils import _grid_bounds from brdr.geometry_utils import safe_difference from brdr.geometry_utils import safe_intersection from brdr.geometry_utils import safe_symmetric_difference diff --git a/tests/test_loader.py b/tests/test_loader.py index f654dc2..b29e92f 100644 --- a/tests/test_loader.py +++ b/tests/test_loader.py @@ -34,7 +34,9 @@ def test_load_thematic_data_by_url(requests_mock, haspengouw_geojson): status=200, ) aligner = Aligner() - aligner.load_thematic_data(GeoJsonUrlLoader("https://mock.com/haspengouw.geojson", "Id")) + aligner.load_thematic_data( + GeoJsonUrlLoader("https://mock.com/haspengouw.geojson", "Id") + ) assert aligner.dict_thematic is not None @@ -47,6 +49,8 @@ def test_load_reference_data_url(requests_mock, haspengouw_geojson): ) aligner = Aligner() - aligner.load_reference_data(GeoJsonUrlLoader("https://mock.com/haspengouw.geojson", "Id")) + aligner.load_reference_data( + GeoJsonUrlLoader("https://mock.com/haspengouw.geojson", "Id") + ) assert aligner.dict_reference is not None diff --git a/tests/test_oe.py b/tests/test_oe.py index eea585c..82943fe 100644 --- a/tests/test_oe.py +++ b/tests/test_oe.py @@ -1,24 +1,7 @@ import unittest -from datetime import date, timedelta - -import numpy as np -from shapely import Polygon, from_wkt from brdr.aligner import Aligner -from brdr.enums import GRBType -from brdr.grb import ( - get_last_version_date, - is_grb_changed, - get_geoms_affected_by_grb_change, - evaluate, - GRBActualLoader, - GRBFiscalParcelLoader, -) -from brdr.loader import DictLoader from brdr.oe import OnroerendErfgoedLoader, OEType -from brdr.utils import ( - get_series_geojson_dict, -) class TestOE(unittest.TestCase): diff --git a/tests/test_utils.py b/tests/test_utils.py index 6d106f7..b4c4596 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -5,10 +5,10 @@ from shapely.geometry import Polygon from brdr.constants import MULTI_SINGLE_ID_SEPARATOR -from brdr.oe import get_oe_dict_by_ids, OEType +from brdr.oe import get_oe_dict_by_ids from brdr.typings import ProcessResult from brdr.utils import diffs_from_dict_series -#from brdr.utils import filter_dict_by_key +# from brdr.utils import filter_dict_by_key from brdr.utils import get_breakpoints_zerostreak from brdr.utils import get_collection from brdr.utils import merge_process_results From 0ef0c35fa0cba8c3f1b5e9d90060747df94051cf Mon Sep 17 00:00:00 2001 From: dieuska Date: Wed, 18 Sep 2024 16:07:09 +0200 Subject: [PATCH 21/35] reformat --- brdr/utils.py | 7 +++-- examples/__init__.py | 9 +++---- examples/example_ao.py | 2 +- examples/example_eo.py | 2 +- examples/example_evaluate.py | 6 +++-- examples/example_evaluate_ao.py | 8 +++--- examples/example_evaluate_multi_to_single.py | 10 ++++--- examples/example_parcel_change_detector.py | 2 +- examples/example_parcel_vs_building.py | 24 ++++++++++------- .../example_predictor_double_prediction.py | 8 +++++- examples/example_speedtest.py | 24 ++++++++--------- examples/stats_snapping_distance_creation.py | 8 ++++-- tests/test_examples.py | 4 +-- tests/test_integration.py | 14 +++++----- tests/test_utils.py | 27 ++++++++++++------- 15 files changed, 91 insertions(+), 64 deletions(-) diff --git a/brdr/utils.py b/brdr/utils.py index 68ee0d1..31d0343 100644 --- a/brdr/utils.py +++ b/brdr/utils.py @@ -483,8 +483,7 @@ def merge_process_results( result_dict: dict[str, dict[float, ProcessResult]] ) -> dict[str, dict[float, ProcessResult]]: """ - #TODO: function can be optimised. At the moment it is unioned element by element. Possible to collect the elements and union at the end - Merges geometries in a dictionary from multiple themes into a single theme. + Merges geometries in a dictionary from multiple themes into a single theme. Args: result_dict (dict): A dictionary where keys are theme IDs and values are process results @@ -493,7 +492,7 @@ def merge_process_results( theme IDs and values are merged geometries. """ - grouped_results: dict[str, ProcessResult] = {} + grouped_results: dict[str, dict[float, ProcessResult]] = {} for id_theme, dict_results in result_dict.items(): id_theme_global = id_theme.split(MULTI_SINGLE_ID_SEPARATOR)[0] @@ -509,4 +508,4 @@ def merge_process_results( grouped_results[id_theme_global][rel_dist][key] = unary_union( # noqa [existing, geom] ) - return grouped_results + return grouped_results \ No newline at end of file diff --git a/examples/__init__.py b/examples/__init__.py index ef813f6..5537c72 100644 --- a/examples/__init__.py +++ b/examples/__init__.py @@ -99,7 +99,7 @@ def show_map( dict_results_by_distance = {} for theme_id, dist_result in dict_results.items(): for rel_dist, processresults in dist_result.items(): - dict_results_by_distance[rel_dist]={} + dict_results_by_distance[rel_dist] = {} dict_results_by_distance[rel_dist][theme_id] = processresults len_series = len(dict_results_by_distance.keys()) @@ -124,7 +124,7 @@ def show_map( def print_formula(dict_results, aligner): for theme_id, dist_results in dict_results.items(): - for rel_dist,processresults in dist_results.items(): + for rel_dist, processresults in dist_results.items(): print( "--------Formula for ID " + str(theme_id) @@ -132,9 +132,7 @@ def print_formula(dict_results, aligner): + str(rel_dist) + "--------------" ) - print( - aligner.get_formula(processresults["result"]) - ) + print(aligner.get_formula(processresults["result"])) return @@ -156,6 +154,7 @@ def plot_series( plt.show() return + def _processresult_to_dicts(processresult): """ Transforms a dictionary with all ProcessResults to individual dictionaries of the diff --git a/examples/example_ao.py b/examples/example_ao.py index 23e2c6e..36a5a8b 100644 --- a/examples/example_ao.py +++ b/examples/example_ao.py @@ -13,7 +13,7 @@ # Initiate brdr aligner = Aligner() # Load thematic data & reference data - aanduidingsobjecten =[117798,116800,117881] + aanduidingsobjecten = [117798, 116800, 117881] loader = OnroerendErfgoedLoader(aanduidingsobjecten) aligner.load_thematic_data(loader) diff --git a/examples/example_eo.py b/examples/example_eo.py index 6ba5c23..d6b783c 100644 --- a/examples/example_eo.py +++ b/examples/example_eo.py @@ -29,7 +29,7 @@ ] loader = OnroerendErfgoedLoader(objectids=erfgoedobjecten, oetype=OEType.EO) aligner.load_thematic_data(loader) - aligner.load_reference_data(GRBActualLoader(aligner=aligner,grb_type=GRBType.ADP)) + aligner.load_reference_data(GRBActualLoader(aligner=aligner, grb_type=GRBType.ADP)) series = np.arange(0, 200, 20, dtype=int) / 100 # predict which relevant distances are interesting to propose as resulting geometry diff --git a/examples/example_evaluate.py b/examples/example_evaluate.py index b5d5925..9bf70ed 100644 --- a/examples/example_evaluate.py +++ b/examples/example_evaluate.py @@ -99,8 +99,10 @@ def fid_to_geojson(geojson): base_aligner.load_reference_data( GRBFiscalParcelLoader(year=base_year, aligner=base_aligner) ) -relevant_distance=2 -base_process_result = base_aligner.process_dict_thematic(relevant_distance=relevant_distance) +relevant_distance = 2 +base_process_result = base_aligner.process_dict_thematic( + relevant_distance=relevant_distance +) thematic_dict_formula = {} thematic_dict_result = {} for key in base_process_result: diff --git a/examples/example_evaluate_ao.py b/examples/example_evaluate_ao.py index 01b6cf0..da62bed 100644 --- a/examples/example_evaluate_ao.py +++ b/examples/example_evaluate_ao.py @@ -16,15 +16,17 @@ base_aligner = Aligner() # = OnroerendErfgoedLoader([125610,148305,127615,122316,120153,124699,115489,120288,120387,124762,148143,116141]) -#loader = OnroerendErfgoedLoader([10047, 10048, 10049, 10050, 10051, 10056]) +# loader = OnroerendErfgoedLoader([10047, 10048, 10049, 10050, 10051, 10056]) loader = OnroerendErfgoedLoader([120288]) base_aligner.load_thematic_data(loader) base_year = "2022" base_aligner.load_reference_data( GRBFiscalParcelLoader(year=base_year, aligner=base_aligner) ) -relevant_distance=3 -base_process_result = base_aligner.process_dict_thematic(relevant_distance=relevant_distance) +relevant_distance = 3 +base_process_result = base_aligner.process_dict_thematic( + relevant_distance=relevant_distance +) thematic_dict_formula = {} thematic_dict_result = {} for key in base_process_result: diff --git a/examples/example_evaluate_multi_to_single.py b/examples/example_evaluate_multi_to_single.py index e0e9add..894921e 100644 --- a/examples/example_evaluate_multi_to_single.py +++ b/examples/example_evaluate_multi_to_single.py @@ -12,7 +12,7 @@ from brdr.oe import OnroerendErfgoedLoader from brdr.utils import get_series_geojson_dict -#from brdr.utils import merge_process_results +# from brdr.utils import merge_process_results multi_as_single_modus = False @@ -26,9 +26,11 @@ base_aligner.load_reference_data( GRBFiscalParcelLoader(year=base_year, aligner=base_aligner) ) -relevant_distance=2 -base_process_result = base_aligner.process_dict_thematic(relevant_distance=relevant_distance) -#base_process_result = merge_process_results(base_process_result) +relevant_distance = 2 +base_process_result = base_aligner.process_dict_thematic( + relevant_distance=relevant_distance +) +# base_process_result = merge_process_results(base_process_result) thematic_dict_formula = {} thematic_dict_result = {} diff --git a/examples/example_parcel_change_detector.py b/examples/example_parcel_change_detector.py index 64a54e9..983cafa 100644 --- a/examples/example_parcel_change_detector.py +++ b/examples/example_parcel_change_detector.py @@ -27,7 +27,7 @@ crs = "EPSG:31370" limit = 10000 # bbox = "172800,170900,173000,171100" -bbox = [172000,172000,174000,174000] +bbox = [172000, 172000, 174000, 174000] # bbox = "170000,170000,175000,174900" # bbox = "100000,195000,105000,195900" # bbox = "150000,210000,155000,214900" diff --git a/examples/example_parcel_vs_building.py b/examples/example_parcel_vs_building.py index 24dd775..5799aee 100644 --- a/examples/example_parcel_vs_building.py +++ b/examples/example_parcel_vs_building.py @@ -13,20 +13,24 @@ # Initiate brdr aligner_x = Aligner() # Load thematic data & reference data (parcels) - aligner_x.load_thematic_data(GeoJsonFileLoader( - "../tests/testdata/test_parcel_vs_building.geojson", "theme_id" - )) - aligner_x.load_reference_data(GRBActualLoader - (grb_type=GRBType.ADP, partition=1000,aligner=aligner_x) + aligner_x.load_thematic_data( + GeoJsonFileLoader( + "../tests/testdata/test_parcel_vs_building.geojson", "theme_id" + ) + ) + aligner_x.load_reference_data( + GRBActualLoader(grb_type=GRBType.ADP, partition=1000, aligner=aligner_x) ) # gebruik de actuele adp-percelen adp= administratieve percelen aligner_y = Aligner() # Load thematic data & reference data (buildings) - aligner_y.load_thematic_data(GeoJsonFileLoader( - "../tests/testdata/test_parcel_vs_building.geojson", "theme_id" - )) - aligner_y.load_reference_data(GRBActualLoader - (grb_type=GRBType.GBG, partition=1000,aligner=aligner_y) + aligner_y.load_thematic_data( + GeoJsonFileLoader( + "../tests/testdata/test_parcel_vs_building.geojson", "theme_id" + ) + ) + aligner_y.load_reference_data( + GRBActualLoader(grb_type=GRBType.GBG, partition=1000, aligner=aligner_y) ) # gebruik de actuele adp-percelen adp= administratieve percelen # Example how to use a series (for histogram) diff --git a/examples/example_predictor_double_prediction.py b/examples/example_predictor_double_prediction.py index daf7fa9..4f92571 100644 --- a/examples/example_predictor_double_prediction.py +++ b/examples/example_predictor_double_prediction.py @@ -16,7 +16,13 @@ # Initiate an Aligner aligner = Aligner() # Load thematic data & reference data - loader = DictLoader({"id1": from_wkt("MultiPolygon Z (((138430.4033999964594841 194082.86080000177025795 0, 138422.19659999758005142 194080.36510000005364418 0, 138419.01550000160932541 194079.34930000081658363 0, 138412.59849999845027924 194077.14139999821782112 0, 138403.65579999983310699 194074.06430000066757202 0, 138402.19910000264644623 194077.67480000108480453 0, 138401.83420000225305557 194078.57939999923110008 0, 138400.89329999685287476 194080.91140000149607658 0, 138400.31650000065565109 194080.67880000174045563 0, 138399.27300000190734863 194083.37680000066757202 0, 138405.93310000002384186 194085.95410000160336494 0, 138413.51049999892711639 194088.80620000138878822 0, 138427.25680000334978104 194094.29969999939203262 0, 138430.4033999964594841 194082.86080000177025795 0)))")}) + loader = DictLoader( + { + "id1": from_wkt( + "MultiPolygon Z (((138430.4033999964594841 194082.86080000177025795 0, 138422.19659999758005142 194080.36510000005364418 0, 138419.01550000160932541 194079.34930000081658363 0, 138412.59849999845027924 194077.14139999821782112 0, 138403.65579999983310699 194074.06430000066757202 0, 138402.19910000264644623 194077.67480000108480453 0, 138401.83420000225305557 194078.57939999923110008 0, 138400.89329999685287476 194080.91140000149607658 0, 138400.31650000065565109 194080.67880000174045563 0, 138399.27300000190734863 194083.37680000066757202 0, 138405.93310000002384186 194085.95410000160336494 0, 138413.51049999892711639 194088.80620000138878822 0, 138427.25680000334978104 194094.29969999939203262 0, 138430.4033999964594841 194082.86080000177025795 0)))" + ) + } + ) aligner.load_thematic_data(loader) loader = GRBActualLoader(grb_type=GRBType.ADP, partition=1000, aligner=aligner) aligner.load_reference_data(loader) diff --git a/examples/example_speedtest.py b/examples/example_speedtest.py index 7e31a2b..1b368a8 100644 --- a/examples/example_speedtest.py +++ b/examples/example_speedtest.py @@ -6,7 +6,7 @@ # Initiate brdr aligner = Aligner(relevant_distance=2) -aligner.multi_as_single_modus=True +aligner.multi_as_single_modus = True # Load local thematic data and reference data # loader = GeoJsonFileLoader( # "../tests/testdata/theme.geojson", "theme_identifier" @@ -18,24 +18,24 @@ loader = GeoJsonFileLoader("../tests/testdata/reference_leuven.geojson", "capakey") aligner.load_reference_data(loader) -times=[] +times = [] for iter in range(1, 3): - starttime= datetime.now() + starttime = datetime.now() # Example how to use the Aligner aligner.predictor() fcs = aligner.get_series_as_geojson(formula=True) - endtime=datetime.now() - seconds = (endtime-starttime).total_seconds() + endtime = datetime.now() + seconds = (endtime - starttime).total_seconds() times.append(seconds) print(seconds) -print ("duration: " + str(times)) +print("duration: " + str(times)) -print ("Min: " + str(min(times))) -print ("Max: " + str(max(times))) -print ("Mean: " + str(statistics.mean(times))) -print ("Median: " + str(statistics.median(times))) -print ("Stdv: " + str(statistics.stdev(times))) +print("Min: " + str(min(times))) +print("Max: " + str(max(times))) +print("Mean: " + str(statistics.mean(times))) +print("Median: " + str(statistics.median(times))) +print("Stdv: " + str(statistics.stdev(times))) # #BEFORE REFACTORING dict_series # duration: [25.652311, 27.894154, 19.641618, 19.929254, 44.754033, 25.218422, 23.167992, 18.649832, 22.899336, 52.108296] @@ -51,4 +51,4 @@ # Max: 21.313991 # Mean: 17.981391 # Median: 17.8996155 -# Stdv: 1.504459449440969 \ No newline at end of file +# Stdv: 1.504459449440969 diff --git a/examples/stats_snapping_distance_creation.py b/examples/stats_snapping_distance_creation.py index ae07146..ae8fdbd 100644 --- a/examples/stats_snapping_distance_creation.py +++ b/examples/stats_snapping_distance_creation.py @@ -17,8 +17,12 @@ print(array_relevant_distance) x = Aligner() -x.load_thematic_data(GeoJsonFileLoader("../tests/testdata/theme_leuven.geojson", "aanduid_id")) -x.load_reference_data(GeoJsonFileLoader("../tests/testdata/reference_leuven.geojson", "capakey")) +x.load_thematic_data( + GeoJsonFileLoader("../tests/testdata/theme_leuven.geojson", "aanduid_id") +) +x.load_reference_data( + GeoJsonFileLoader("../tests/testdata/reference_leuven.geojson", "capakey") +) with open("../tests/output/stats" + time + ".csv", "w", newline="") as csvfile: writer = csv.writer( csvfile, delimiter=";" diff --git a/tests/test_examples.py b/tests/test_examples.py index 7889b67..7c9940c 100644 --- a/tests/test_examples.py +++ b/tests/test_examples.py @@ -213,8 +213,8 @@ def test_example_predictor(self): # Load thematic data & reference data dict_theme = get_oe_dict_by_ids([131635]) aligner.load_thematic_data(DictLoader(dict_theme)) - aligner.load_reference_data(GRBActualLoader(aligner=aligner, - grb_type=GRBType.GBG, partition=1000) + aligner.load_reference_data( + GRBActualLoader(aligner=aligner, grb_type=GRBType.GBG, partition=1000) ) series = np.arange(0, 300, 10, dtype=int) / 100 # predict which relevant distances are interesting to propose as resulting diff --git a/tests/test_integration.py b/tests/test_integration.py index 4209298..a269cbf 100644 --- a/tests/test_integration.py +++ b/tests/test_integration.py @@ -61,17 +61,17 @@ def test_webservice_brdr(self): ) dict_diffs = dict_diffs["input_id"] - serial_dict ={} - dict_results=dict_series["input_id"] - for rel_dist,process_results in dict_results.items(): - serial_dict[rel_dist]={ + serial_dict = {} + dict_results = dict_series["input_id"] + for rel_dist, process_results in dict_results.items(): + serial_dict[rel_dist] = { "result": json.loads(to_geojson(dict_results[rel_dist]["result"])), "result_diff_min": json.loads( to_geojson(dict_results[rel_dist]["result_diff_min"]) ), - "result_diff_plus": json.loads( - to_geojson(dict_results[rel_dist]["result_diff_plus"]) - ), + "result_diff_plus": json.loads( + to_geojson(dict_results[rel_dist]["result_diff_plus"]) + ), } return { diff --git a/tests/test_utils.py b/tests/test_utils.py index b4c4596..59a06da 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -8,6 +8,7 @@ from brdr.oe import get_oe_dict_by_ids from brdr.typings import ProcessResult from brdr.utils import diffs_from_dict_series + # from brdr.utils import filter_dict_by_key from brdr.utils import get_breakpoints_zerostreak from brdr.utils import get_collection @@ -131,18 +132,19 @@ def test_diffs_from_dict_series_complete(self): "theme_id2": Polygon([(5, 5), (15, 5), (15, 15), (5, 15)]), } dict_series = { - - "theme_id1": {10:{ + "theme_id1": { + 10: { "result": Polygon([(0, 0), (8, 0), (8, 8), (0, 8)]), "result_diff": Polygon([(2, 2), (6, 2), (6, 6), (2, 6)]), - } - }, - "theme_id2": {10:{ + } + }, + "theme_id2": { + 10: { "result": Polygon([(7, 7), (13, 7), (13, 13), (7, 13)]), "result_diff": Polygon([(9, 9), (11, 9), (11, 11), (9, 11)]), } - }, - } + }, + } expected_diffs = {"theme_id1": {10: 16.0}, "theme_id2": {10: 4.0}} assert expected_diffs == diffs_from_dict_series( @@ -166,10 +168,17 @@ def test_get_collection(self): def test_merge_process_results(self): key_1 = "key" + MULTI_SINGLE_ID_SEPARATOR + "1" key_2 = "key" + MULTI_SINGLE_ID_SEPARATOR + "2" + key_3 = "key_3" process_result_1 = ProcessResult() process_result_1["result"] = Polygon([(0, 0), (10, 0), (10, 10), (0, 10)]) process_result_2 = ProcessResult() process_result_2["result"] = Polygon([(0, 0), (8, 0), (8, 8), (0, 8)]) - testdict = {key_1: {0:process_result_1}, key_2: {0:process_result_2}} + process_result_3 = ProcessResult() + process_result_3["result"] = Polygon([(0, 0), (8, 0), (8, 8), (0, 8)]) + testdict = { + key_1: {0: process_result_1}, + key_2: {0: process_result_2}, + key_3: {0: process_result_3}, + } merged_testdict = merge_process_results(testdict) - assert len(merged_testdict.keys()) == 1 + assert len(merged_testdict.keys()) == 2 From c167b1864b44c2c03804ad9f029e9b2c69b4fd3c Mon Sep 17 00:00:00 2001 From: dieuska Date: Wed, 18 Sep 2024 16:37:58 +0200 Subject: [PATCH 22/35] moved evaluate from grb to aligner --- README.md | 2 +- brdr/aligner.py | 152 ++++++++++++++++- brdr/grb.py | 166 +------------------ examples/example_evaluate.py | 3 +- examples/example_evaluate_ao.py | 3 +- examples/example_evaluate_multi_to_single.py | 3 +- tests/test_grb.py | 3 +- 7 files changed, 158 insertions(+), 174 deletions(-) diff --git a/README.md b/README.md index 48b6693..273aa4d 100644 --- a/README.md +++ b/README.md @@ -60,6 +60,7 @@ The figure below shows: for 'predicted' interesting relevant distances (predictor) * aligner.get_formula(): Calculating a descriptive formula of a thematic boundary based on a reference layer + * evaluate(): Evaluates a thematic dictionary on equality with another version and adds a 'evaluation'-property * Exporters: * aligner.get_results_as_geojson(): Returns a collection of geojson-dictionaries with the results (resulting @@ -81,7 +82,6 @@ Besides the generic functionalities, a range of Flanders-specific functionalitie * grb.get_geoms_affected_by_grb_change(): get thematic geometries that are possibly affected by GRB-changes during a specific timespan * grb.update_to_actual_grb(): aligns the boundaries of thematic features to the actual GRB-boundaries - * grb.evaluate(): Evaluates a thematic dictionary on equality with a newer version and adds a 'evaluation'-property ### Possible application fields diff --git a/brdr/aligner.py b/brdr/aligner.py index 4078bcb..ab84dfc 100644 --- a/brdr/aligner.py +++ b/brdr/aligner.py @@ -23,7 +23,7 @@ from brdr.constants import CORR_DISTANCE from brdr.constants import DEFAULT_CRS from brdr.constants import THRESHOLD_CIRCLE_RATIO -from brdr.enums import OpenbaarDomeinStrategy +from brdr.enums import OpenbaarDomeinStrategy, Evaluation from brdr.geometry_utils import buffer_neg from brdr.geometry_utils import buffer_neg_pos from brdr.geometry_utils import buffer_pos @@ -1219,3 +1219,153 @@ def _equal_geom_in_array(geom,geom_array): if buffer_neg(safe_symmetric_difference(geom, g),CORR_DISTANCE).is_empty: return True return False + + + +@staticmethod +def evaluate( + actual_aligner, + dict_series, + dict_predicted, + thematic_dict_formula, + threshold_area=5, + threshold_percentage=1, + dict_unchanged=None, +): + """ + evaluate affected geometries and give attributes to evaluate and decide if new + proposals can be used + """ + if dict_unchanged is None: + dict_unchanged = {} + theme_ids = list(dict_series.keys()) + dict_evaluated_result = {} + prop_dictionary = {} + # Fill the dictionary-structure with empty values + for theme_id in theme_ids: + dict_evaluated_result[theme_id] = {} + prop_dictionary[theme_id] = {} + for dist in dict_series[theme_id].keys(): + prop_dictionary[theme_id][dist] = {} + for theme_id in dict_unchanged.keys(): + prop_dictionary[theme_id] = {} + + for theme_id, dict_results in dict_predicted.items(): + equality = False + for dist in sorted(dict_results.keys()): + if equality: + break + geomresult = dict_results[dist]["result"] + actual_formula = actual_aligner.get_formula(geomresult) + prop_dictionary[theme_id][dist]["formula"] = json.dumps(actual_formula) + base_formula = None + if theme_id in thematic_dict_formula: + base_formula = thematic_dict_formula[theme_id] + equality, prop = _check_equality( + base_formula, + actual_formula, + threshold_area, + threshold_percentage, + ) + if equality: + dict_evaluated_result[theme_id][dist] = dict_predicted[theme_id][dist] + prop_dictionary[theme_id][dist]["evaluation"] = prop + break + + evaluated_theme_ids = [theme_id for theme_id, value in dict_evaluated_result.items() if value != {}] + + # fill where no equality is found/ The biggest predicted distance is returned as + # proposal + for theme_id in theme_ids: + if theme_id not in evaluated_theme_ids: + if len(dict_predicted[theme_id].keys()) == 0: + result = dict_series[theme_id][0] + dict_evaluated_result[theme_id][0] = result + prop_dictionary[theme_id][0]["formula"] = json.dumps( + actual_aligner.get_formula(result["result"]) + ) + prop_dictionary[theme_id][0]["evaluation"] = Evaluation.NO_PREDICTION_5 + continue + # Add all predicted features so they can be manually checked + for dist in dict_predicted[theme_id].keys(): + predicted_resultset = dict_predicted[theme_id][dist] + dict_evaluated_result[theme_id][dist] = predicted_resultset + prop_dictionary[theme_id][dist]["formula"] = json.dumps( + actual_aligner.get_formula(predicted_resultset["result"]) + ) + prop_dictionary[theme_id][dist]["evaluation"] = Evaluation.TO_CHECK_4 + + for theme_id, geom in dict_unchanged.items(): + prop_dictionary[theme_id] = {0: + {"result": geom, + "evaluation": Evaluation.NO_CHANGE_6, + "formula": json.dumps(actual_aligner.get_formula(geom)) + } + } + return dict_evaluated_result, prop_dictionary + +@staticmethod +def _check_equality( + base_formula, actual_formula, threshold_area=5, threshold_percentage=1 +): + """ + function that checks if 2 formulas are equal (True,False) and adds an Evaluation + """ + if base_formula is None or actual_formula is None: + return False, Evaluation.NO_PREDICTION_5 + od_alike = False + if base_formula["reference_od"] is None and actual_formula["reference_od"] is None: + od_alike = True + elif base_formula["reference_od"] is None or actual_formula["reference_od"] is None: + od_alike = False + elif ( + abs( + base_formula["reference_od"]["area"] + - actual_formula["reference_od"]["area"] + ) + * 100 + / base_formula["reference_od"]["area"] + ) < threshold_percentage: + od_alike = True + + if ( + base_formula["reference_features"].keys() + == actual_formula["reference_features"].keys() + and od_alike + ): + if base_formula["full"] and base_formula["full"]: + return True, Evaluation.EQUALITY_FORMULA_GEOM_1 + + equal_reference_features = True + for key in base_formula["reference_features"].keys(): + if ( + ( + base_formula["reference_features"][key]["full"] + == actual_formula["reference_features"][key]["full"] + ) + or ( + abs( + base_formula["reference_features"][key]["area"] + - actual_formula["reference_features"][key]["area"] + ) + > threshold_area + ) + or ( + ( + abs( + base_formula["reference_features"][key]["area"] + - actual_formula["reference_features"][key]["area"] + ) + * 100 + / base_formula["reference_features"][key]["area"] + ) + > threshold_percentage + ) + ): + equal_reference_features = False + if equal_reference_features: + return True, Evaluation.EQUALITY_FORMULA_2 + if base_formula["full"] and base_formula["full"] and od_alike: + return True, Evaluation.EQUALITY_GEOM_3 + return False, Evaluation.NO_PREDICTION_5 + diff --git a/brdr/grb.py b/brdr/grb.py index 8ecf8da..3fb81eb 100644 --- a/brdr/grb.py +++ b/brdr/grb.py @@ -9,7 +9,7 @@ from shapely.geometry import shape from shapely.geometry.base import BaseGeometry -from brdr.aligner import Aligner +from brdr.aligner import Aligner, evaluate from brdr.constants import DEFAULT_CRS, LAST_VERSION_DATE, DATE_FORMAT, VERSION_DATE from brdr.constants import DOWNLOAD_LIMIT from brdr.constants import GRB_BUILDING_ID @@ -19,7 +19,6 @@ from brdr.constants import GRB_PARCEL_ID from brdr.constants import GRB_VERSION_DATE from brdr.constants import MAX_REFERENCE_BUFFER -from brdr.enums import Evaluation from brdr.enums import GRBType from brdr.geometry_utils import buffer_pos, safe_intersection from brdr.geometry_utils import create_donut @@ -330,167 +329,8 @@ def get_collection_grb_parcels_by_date( logging.debug(len(specific_date_features)) collection_specific_date["features"]=specific_date_features - - return collection_specific_date - -def evaluate( - actual_aligner, - dict_series, - dict_predicted, - thematic_dict_formula, - threshold_area=5, - threshold_percentage=1, - dict_unchanged=None, -): - """ - evaluate affected geometries and give attributes to evaluate and decide if new - proposals can be used - """ - if dict_unchanged is None: - dict_unchanged = {} - theme_ids = list(dict_series.keys()) - dict_evaluated_result = {} - prop_dictionary = {} - # Fill the dictionary-structure with empty values - for theme_id in theme_ids: - dict_evaluated_result[theme_id] = {} - prop_dictionary[theme_id] = {} - for dist in dict_series[theme_id].keys(): - prop_dictionary[theme_id][dist] = {} - for theme_id in dict_unchanged.keys(): - prop_dictionary[theme_id] = {} - - for theme_id, dict_results in dict_predicted.items(): - equality = False - for dist in sorted(dict_results.keys()): - if equality: - break - geomresult = dict_results[dist]["result"] - actual_formula = actual_aligner.get_formula(geomresult) - prop_dictionary[theme_id][dist]["formula"] = json.dumps(actual_formula) - base_formula = None - if theme_id in thematic_dict_formula: - base_formula = thematic_dict_formula[theme_id] - equality, prop = check_equality( - base_formula, - actual_formula, - threshold_area, - threshold_percentage, - ) - if equality: - dict_evaluated_result[theme_id][dist] = dict_predicted[theme_id][dist] - prop_dictionary[theme_id][dist]["evaluation"] = prop - break - - evaluated_theme_ids = [theme_id for theme_id, value in dict_evaluated_result.items() if value != {}] - - # fill where no equality is found/ The biggest predicted distance is returned as - # proposal - for theme_id in theme_ids: - if theme_id not in evaluated_theme_ids: - if len(dict_predicted[theme_id].keys()) == 0: - result = dict_series[theme_id][0] - dict_evaluated_result[theme_id][0] = result - prop_dictionary[theme_id][0]["formula"] = json.dumps( - actual_aligner.get_formula(result["result"]) - ) - prop_dictionary[theme_id][0]["evaluation"] = Evaluation.NO_PREDICTION_5 - continue - # Add all predicted features so they can be manually checked - for dist in dict_predicted[theme_id].keys(): - predicted_resultset = dict_predicted[theme_id][dist] - dict_evaluated_result[theme_id][dist] = predicted_resultset - prop_dictionary[theme_id][dist]["formula"] = json.dumps( - actual_aligner.get_formula(predicted_resultset["result"]) - ) - prop_dictionary[theme_id][dist]["evaluation"] = Evaluation.TO_CHECK_4 - - for theme_id, geom in dict_unchanged.items(): - prop_dictionary[theme_id] = {0: - {"result": geom, - "evaluation": Evaluation.NO_CHANGE_6, - "formula": json.dumps(actual_aligner.get_formula(geom)) - } - } - return dict_evaluated_result, prop_dictionary - - -def check_equality( - base_formula, actual_formula, threshold_area=5, threshold_percentage=1 -): - """ - function that checks if 2 formulas are equal (determined by business-logic) - """ - # TODO: research naar aanduid_id 116448 (equality na 0.5m), 120194 (1m) - # TODO: research and implementation of following ideas - # TODO: refine equality comparison, make it more generic - # TODO: Add control of OD to equality-comparison (see case aanduid_id 120288) - # ideas: - # * If result_diff smaller than 0.x --> automatic update - # * big polygons: If 'outer ring' has same formula (do net check inner side) --> - # automatic update - # ** outer ring can be calculated: 1) negative buffer 2) original - buffered - - if base_formula is None or actual_formula is None: - return False, Evaluation.NO_PREDICTION_5 - od_alike = False - if base_formula["reference_od"] is None and actual_formula["reference_od"] is None: - od_alike = True - elif base_formula["reference_od"] is None or actual_formula["reference_od"] is None: - od_alike = False - elif ( - abs( - base_formula["reference_od"]["area"] - - actual_formula["reference_od"]["area"] - ) - * 100 - / base_formula["reference_od"]["area"] - ) < threshold_percentage: - od_alike = True - - if ( - base_formula["reference_features"].keys() - == actual_formula["reference_features"].keys() - and od_alike - ): - if base_formula["full"] and base_formula["full"]: - return True, Evaluation.EQUALITY_FORMULA_GEOM_1 - - equal_reference_features = True - for key in base_formula["reference_features"].keys(): - if ( - ( - base_formula["reference_features"][key]["full"] - == actual_formula["reference_features"][key]["full"] - ) - or ( - abs( - base_formula["reference_features"][key]["area"] - - actual_formula["reference_features"][key]["area"] - ) - > threshold_area - ) - or ( - ( - abs( - base_formula["reference_features"][key]["area"] - - actual_formula["reference_features"][key]["area"] - ) - * 100 - / base_formula["reference_features"][key]["area"] - ) - > threshold_percentage - ) - ): - equal_reference_features = False - if equal_reference_features: - return True, Evaluation.EQUALITY_FORMULA_2 - if base_formula["full"] and base_formula["full"] and od_alike: - return True, Evaluation.EQUALITY_GEOM_3 - return False, Evaluation.NO_PREDICTION_5 - def update_to_actual_grb(featurecollection, id_theme_fieldname, formula_field="formula", max_distance_for_actualisation=2, feedback=None ): """ Function to update a thematic featurecollection to the most actual version of GRB. @@ -503,7 +343,6 @@ def update_to_actual_grb(featurecollection, id_theme_fieldname, formula_field="f last_version_date = datetime.now().date() for feature in featurecollection["features"]: - id_theme = feature["properties"][id_theme_fieldname] try: geom = shape(feature["geometry"]) @@ -527,11 +366,8 @@ def update_to_actual_grb(featurecollection, id_theme_fieldname, formula_field="f except: raise Exception(f"Problem with {LAST_VERSION_DATE}") - # if feedback.isCanceled(): - # return {} datetime_start = last_version_date datetime_end = datetime.now().date() - #thematic_dict_result = dict(dict_thematic) base_aligner_result = Aligner(feedback=feedback) base_aligner_result.load_thematic_data(DictLoader(dict_thematic)) base_aligner_result.name_thematic_id = id_theme_fieldname diff --git a/examples/example_evaluate.py b/examples/example_evaluate.py index 9bf70ed..bfd4917 100644 --- a/examples/example_evaluate.py +++ b/examples/example_evaluate.py @@ -7,12 +7,13 @@ from brdr.enums import GRBType from brdr.grb import GRBActualLoader from brdr.grb import GRBFiscalParcelLoader -from brdr.grb import evaluate +from brdr.aligner import evaluate from brdr.grb import get_geoms_affected_by_grb_change from brdr.loader import DictLoader, GeoJsonFileLoader from brdr.utils import get_series_geojson_dict + def fid_to_geojson(geojson): fid = 1 for f in geojson["features"]: diff --git a/examples/example_evaluate_ao.py b/examples/example_evaluate_ao.py index da62bed..c023317 100644 --- a/examples/example_evaluate_ao.py +++ b/examples/example_evaluate_ao.py @@ -2,11 +2,10 @@ import numpy as np -from brdr.aligner import Aligner +from brdr.aligner import Aligner, evaluate from brdr.enums import GRBType from brdr.grb import ( get_geoms_affected_by_grb_change, - evaluate, GRBFiscalParcelLoader, GRBActualLoader, ) diff --git a/examples/example_evaluate_multi_to_single.py b/examples/example_evaluate_multi_to_single.py index 894921e..4c325ea 100644 --- a/examples/example_evaluate_multi_to_single.py +++ b/examples/example_evaluate_multi_to_single.py @@ -2,11 +2,10 @@ import numpy as np -from brdr.aligner import Aligner +from brdr.aligner import Aligner, evaluate from brdr.enums import GRBType from brdr.grb import GRBActualLoader from brdr.grb import GRBFiscalParcelLoader -from brdr.grb import evaluate from brdr.grb import get_geoms_affected_by_grb_change from brdr.loader import DictLoader from brdr.oe import OnroerendErfgoedLoader diff --git a/tests/test_grb.py b/tests/test_grb.py index 8e30a53..a2257eb 100644 --- a/tests/test_grb.py +++ b/tests/test_grb.py @@ -4,13 +4,12 @@ import numpy as np from shapely import Polygon, from_wkt -from brdr.aligner import Aligner +from brdr.aligner import Aligner, evaluate from brdr.enums import GRBType, Evaluation from brdr.grb import ( get_last_version_date, is_grb_changed, get_geoms_affected_by_grb_change, - evaluate, GRBActualLoader, GRBFiscalParcelLoader, GRBSpecificDateParcelLoader, update_to_actual_grb, ) From 0bbf5368e4ec55b6da71bb2902c5ad256f7623fb Mon Sep 17 00:00:00 2001 From: dieuska Date: Wed, 18 Sep 2024 17:04:13 +0200 Subject: [PATCH 23/35] refactored evaluate-function --- brdr/aligner.py | 162 +++++++++---------- brdr/geometry_utils.py | 8 +- brdr/grb.py | 6 +- examples/example_evaluate.py | 90 +---------- examples/example_evaluate_ao.py | 12 +- examples/example_evaluate_multi_to_single.py | 14 +- tests/test_aligner.py | 61 ++++++- tests/test_grb.py | 72 +-------- tests/test_utils.py | 1 - 9 files changed, 156 insertions(+), 270 deletions(-) diff --git a/brdr/aligner.py b/brdr/aligner.py index ab84dfc..666359d 100644 --- a/brdr/aligner.py +++ b/brdr/aligner.py @@ -698,6 +698,85 @@ def get_thematic_union(self): ) return self.thematic_union + def evaluate( + self,series, + thematic_dict_formula, + threshold_area=5, + threshold_percentage=1, + dict_unchanged=None, + ): + """ + evaluate affected geometries and give attributes to evaluate and decide if new + proposals can be used + """ + dict_series,dict_predicted,diffs = self.predictor(series) + if dict_unchanged is None: + dict_unchanged = {} + theme_ids = list(dict_series.keys()) + dict_evaluated_result = {} + prop_dictionary = {} + # Fill the dictionary-structure with empty values + for theme_id in theme_ids: + dict_evaluated_result[theme_id] = {} + prop_dictionary[theme_id] = {} + for dist in dict_series[theme_id].keys(): + prop_dictionary[theme_id][dist] = {} + for theme_id in dict_unchanged.keys(): + prop_dictionary[theme_id] = {} + + for theme_id, dict_results in dict_predicted.items(): + equality = False + for dist in sorted(dict_results.keys()): + if equality: + break + geomresult = dict_results[dist]["result"] + actual_formula = self.get_formula(geomresult) + prop_dictionary[theme_id][dist]["formula"] = json.dumps(actual_formula) + base_formula = None + if theme_id in thematic_dict_formula: + base_formula = thematic_dict_formula[theme_id] + equality, prop = _check_equality( + base_formula, + actual_formula, + threshold_area, + threshold_percentage, + ) + if equality: + dict_evaluated_result[theme_id][dist] = dict_predicted[theme_id][dist] + prop_dictionary[theme_id][dist]["evaluation"] = prop + break + + evaluated_theme_ids = [theme_id for theme_id, value in dict_evaluated_result.items() if value != {}] + + # fill where no equality is found/ The biggest predicted distance is returned as + # proposal + for theme_id in theme_ids: + if theme_id not in evaluated_theme_ids: + if len(dict_predicted[theme_id].keys()) == 0: + result = dict_series[theme_id][0] + dict_evaluated_result[theme_id][0] = result + prop_dictionary[theme_id][0]["formula"] = json.dumps( + self.get_formula(result["result"]) + ) + prop_dictionary[theme_id][0]["evaluation"] = Evaluation.NO_PREDICTION_5 + continue + # Add all predicted features so they can be manually checked + for dist in dict_predicted[theme_id].keys(): + predicted_resultset = dict_predicted[theme_id][dist] + dict_evaluated_result[theme_id][dist] = predicted_resultset + prop_dictionary[theme_id][dist]["formula"] = json.dumps( + self.get_formula(predicted_resultset["result"]) + ) + prop_dictionary[theme_id][dist]["evaluation"] = Evaluation.TO_CHECK_4 + + for theme_id, geom in dict_unchanged.items(): + prop_dictionary[theme_id] = {0: + {"result": geom, + "evaluation": Evaluation.NO_CHANGE_6, + "formula": json.dumps(actual_aligner.get_formula(geom)) + } + } + return dict_evaluated_result, prop_dictionary def _prepare_reference_data(self): """ @@ -1221,89 +1300,6 @@ def _equal_geom_in_array(geom,geom_array): return False - -@staticmethod -def evaluate( - actual_aligner, - dict_series, - dict_predicted, - thematic_dict_formula, - threshold_area=5, - threshold_percentage=1, - dict_unchanged=None, -): - """ - evaluate affected geometries and give attributes to evaluate and decide if new - proposals can be used - """ - if dict_unchanged is None: - dict_unchanged = {} - theme_ids = list(dict_series.keys()) - dict_evaluated_result = {} - prop_dictionary = {} - # Fill the dictionary-structure with empty values - for theme_id in theme_ids: - dict_evaluated_result[theme_id] = {} - prop_dictionary[theme_id] = {} - for dist in dict_series[theme_id].keys(): - prop_dictionary[theme_id][dist] = {} - for theme_id in dict_unchanged.keys(): - prop_dictionary[theme_id] = {} - - for theme_id, dict_results in dict_predicted.items(): - equality = False - for dist in sorted(dict_results.keys()): - if equality: - break - geomresult = dict_results[dist]["result"] - actual_formula = actual_aligner.get_formula(geomresult) - prop_dictionary[theme_id][dist]["formula"] = json.dumps(actual_formula) - base_formula = None - if theme_id in thematic_dict_formula: - base_formula = thematic_dict_formula[theme_id] - equality, prop = _check_equality( - base_formula, - actual_formula, - threshold_area, - threshold_percentage, - ) - if equality: - dict_evaluated_result[theme_id][dist] = dict_predicted[theme_id][dist] - prop_dictionary[theme_id][dist]["evaluation"] = prop - break - - evaluated_theme_ids = [theme_id for theme_id, value in dict_evaluated_result.items() if value != {}] - - # fill where no equality is found/ The biggest predicted distance is returned as - # proposal - for theme_id in theme_ids: - if theme_id not in evaluated_theme_ids: - if len(dict_predicted[theme_id].keys()) == 0: - result = dict_series[theme_id][0] - dict_evaluated_result[theme_id][0] = result - prop_dictionary[theme_id][0]["formula"] = json.dumps( - actual_aligner.get_formula(result["result"]) - ) - prop_dictionary[theme_id][0]["evaluation"] = Evaluation.NO_PREDICTION_5 - continue - # Add all predicted features so they can be manually checked - for dist in dict_predicted[theme_id].keys(): - predicted_resultset = dict_predicted[theme_id][dist] - dict_evaluated_result[theme_id][dist] = predicted_resultset - prop_dictionary[theme_id][dist]["formula"] = json.dumps( - actual_aligner.get_formula(predicted_resultset["result"]) - ) - prop_dictionary[theme_id][dist]["evaluation"] = Evaluation.TO_CHECK_4 - - for theme_id, geom in dict_unchanged.items(): - prop_dictionary[theme_id] = {0: - {"result": geom, - "evaluation": Evaluation.NO_CHANGE_6, - "formula": json.dumps(actual_aligner.get_formula(geom)) - } - } - return dict_evaluated_result, prop_dictionary - @staticmethod def _check_equality( base_formula, actual_formula, threshold_area=5, threshold_percentage=1 diff --git a/brdr/geometry_utils.py b/brdr/geometry_utils.py index 4c0aa0a..bca3996 100644 --- a/brdr/geometry_utils.py +++ b/brdr/geometry_utils.py @@ -490,9 +490,7 @@ def fill_and_remove_gaps(input_geometry, buffer_value): and empty_buffered_exterior_polygon and not exterior_polygon.is_empty ): - cleaned_geometry = safe_difference( - cleaned_geometry, exterior_polygon - ) + cleaned_geometry = safe_difference(cleaned_geometry, exterior_polygon) num_interior_rings = get_num_interior_rings(part) if num_interior_rings > 0: ix = 0 @@ -504,9 +502,7 @@ def fill_and_remove_gaps(input_geometry, buffer_value): interior_polygon, buffer_value ).is_empty if empty_buffered_interior_ring: - cleaned_geometry = safe_union( - cleaned_geometry, interior_polygon - ) + cleaned_geometry = safe_union(cleaned_geometry, interior_polygon) ix = ix + 1 ix_part = ix_part + 1 return cleaned_geometry diff --git a/brdr/grb.py b/brdr/grb.py index 3fb81eb..1189914 100644 --- a/brdr/grb.py +++ b/brdr/grb.py @@ -9,7 +9,7 @@ from shapely.geometry import shape from shapely.geometry.base import BaseGeometry -from brdr.aligner import Aligner, evaluate +from brdr.aligner import Aligner from brdr.constants import DEFAULT_CRS, LAST_VERSION_DATE, DATE_FORMAT, VERSION_DATE from brdr.constants import DOWNLOAD_LIMIT from brdr.constants import GRB_BUILDING_ID @@ -393,8 +393,8 @@ def update_to_actual_grb(featurecollection, id_theme_fieldname, formula_field="f GRBActualLoader(grb_type=GRBType.ADP, partition=1000, aligner=actual_aligner)) series = np.arange(0, max_distance_for_actualisation * 100, 10, dtype=int) / 100 - dict_series, dict_predicted, diffs_dict = actual_aligner.predictor(series) - dict_evaluated, prop_dictionary = evaluate(actual_aligner, dict_series, dict_predicted, dict_thematic_formula, + #actual_aligner.predictor(series) + dict_evaluated, prop_dictionary = actual_aligner.evaluate(series=series,thematic_dict_formula=dict_thematic_formula, threshold_area=5, threshold_percentage=1, dict_unchanged=dict_unchanged) diff --git a/examples/example_evaluate.py b/examples/example_evaluate.py index bfd4917..581880d 100644 --- a/examples/example_evaluate.py +++ b/examples/example_evaluate.py @@ -5,89 +5,13 @@ from brdr.aligner import Aligner from brdr.enums import GRBType +from brdr.geometry_utils import geojson_polygon_to_multipolygon from brdr.grb import GRBActualLoader from brdr.grb import GRBFiscalParcelLoader -from brdr.aligner import evaluate from brdr.grb import get_geoms_affected_by_grb_change from brdr.loader import DictLoader, GeoJsonFileLoader from brdr.utils import get_series_geojson_dict - - -def fid_to_geojson(geojson): - fid = 1 - for f in geojson["features"]: - f["properties"]["fid"] = str(fid) - fid = fid + 1 - if f["geometry"]["type"] == "Polygon": - f["geometry"] = { - "type": "MultiPolygon", - "coordinates": [f["geometry"]["coordinates"]], - } - - return geojson - - -# -# thematic_dict = { -# "theme_id_1": from_wkt( -# "MultiPolygon (((174180.20077791667426936 171966.14649116666987538, " -# "174415.60530965600628406 171940.9636807945498731, " -# "174388.65236948925303295 171770.99678386366576888, " -# "174182.10876987033407204 171836.13745758961886168, " -# "174184.88916448061354458 171873.07698598300339654, " -# "174180.20077791667426936 171966.14649116666987538)))" -# ) -# } - -# thematic_dict = { -# "theme_id_1": from_wkt( -# "MultiPolygon (((173463.11530961000244133 174423.83310307000647299, " -# "173460.22633100001257844 174422.02316300000529736, " -# "173455.24681099998997524 174429.98009100000490434, " -# "173454.4299790000077337 174429.34482699999352917, " -# "173452.06690700000035577 174432.43058700000983663, " -# "173451.25743500000680797 174431.8672589999914635, " -# "173448.74844299998949282 174434.96249100001296028, " -# "173448.5809550000121817 174435.80485899999621324, " -# "173454.82841871041455306 174442.46780387416947633, " -# "173461.44169100001454353 174446.50898700000834651, " -# "173472.15932299999985844 174429.49919500001124106, " -# "173466.18524341000011191 174425.75641125999391079, " -# "173466.9701960513193626 174424.8217541387421079, " -# "173462.59915620859828778 174424.8217541387421079, " -# "173463.11530961000244133 174423.83310307000647299)))" -# ) -# } -# Polygon ((173455.24681099998997524 174429.9801549999974668, " -# "173454.4299790000077337 174429.34482699999352917, " -# "173452.06690700000035577 174432.43058700000983663, " -# "173451.25743500000680797 174431.8672589999914635, " -# "173448.74844299998949282 174434.96249100001296028, " -# "173448.5809550000121817 174435.80485899999621324, " -# "173455.39772300000186078 174441.47852299999794923, " -# "173461.44169100001454353 174446.50898700000834651, " -# "173472.15932299999985844 174429.49919500001124106, " -# "173466.18524300001445226 174425.75641100000939332, " -# "173460.22633100001257844 174422.02316300000529736, " -# "173455.24681099998997524 174429.9801549999974668))" - -# MultiPolygon (((173463.11530961000244133 174423.83310307000647299, " -# "173460.22633100001257844 174422.02316300000529736, " -# "173455.24681099998997524 174429.98009100000490434, " -# "173454.4299790000077337 174429.34482699999352917, " -# "173452.06690700000035577 174432.43058700000983663, " -# "173451.25743500000680797 174431.8672589999914635, " -# "173448.74844299998949282 174434.96249100001296028, " -# "173448.5809550000121817 174435.80485899999621324, " -# "173454.82841871041455306 174442.46780387416947633, " -# "173461.44169100001454353 174446.50898700000834651, " -# "173472.15932299999985844 174429.49919500001124106, " -# "173466.18524341000011191 174425.75641125999391079, " -# "173466.9701960513193626 174424.8217541387421079, " -# "173462.59915620859828778 174424.8217541387421079, " -# "173463.11530961000244133 174423.83310307000647299)))" - thematic_dict = { "theme_id_1": from_wkt( "Polygon ((174072.91453437806922011 179188.47430499014444649, 174121.17416846146807075 179179.98909460185677744, 174116.93156326730968431 179156.47799081765697338, 174110.56765547610120848 179152.58893605635967106, 174069.37903004963300191 179159.30639428040012717, 174069.37903004963300191 179159.30639428040012717, 174070.97000699743512087 179169.7361320493509993, 174072.91453437806922011 179188.47430499014444649))" @@ -132,15 +56,9 @@ def fid_to_geojson(geojson): GRBActualLoader(grb_type=GRBType.ADP, partition=1000, aligner=actual_aligner) ) series = np.arange(0, 200, 10, dtype=int) / 100 -dict_series, dict_predicted, diffs_dict = actual_aligner.predictor(series) - -# diffs_dict=merge_diffs_dict(diffs_dict) -dict_evaluated, prop_dictionary = evaluate( - actual_aligner, - dict_series, - dict_predicted, - thematic_dict_formula, +dict_evaluated, prop_dictionary = actual_aligner.evaluate(series=series, + thematic_dict_formula=thematic_dict_formula, threshold_area=5, threshold_percentage=1, dict_unchanged=dict_unchanged, @@ -163,7 +81,7 @@ def fid_to_geojson(geojson): + feature["properties"]["evaluation"] ) -geojson = fid_to_geojson(fc["result"]) +geojson = geojson_polygon_to_multipolygon(fc["result"]) print(geojson) diff --git a/examples/example_evaluate_ao.py b/examples/example_evaluate_ao.py index c023317..2cbcb70 100644 --- a/examples/example_evaluate_ao.py +++ b/examples/example_evaluate_ao.py @@ -2,7 +2,7 @@ import numpy as np -from brdr.aligner import Aligner, evaluate +from brdr.aligner import Aligner from brdr.enums import GRBType from brdr.grb import ( get_geoms_affected_by_grb_change, @@ -14,8 +14,6 @@ from brdr.utils import get_series_geojson_dict base_aligner = Aligner() -# = OnroerendErfgoedLoader([125610,148305,127615,122316,120153,124699,115489,120288,120387,124762,148143,116141]) -# loader = OnroerendErfgoedLoader([10047, 10048, 10049, 10050, 10051, 10056]) loader = OnroerendErfgoedLoader([120288]) base_aligner.load_thematic_data(loader) base_year = "2022" @@ -50,13 +48,9 @@ loader = GRBActualLoader(grb_type=GRBType.ADP, partition=1000, aligner=actual_aligner) actual_aligner.load_reference_data(loader) series = np.arange(0, 300, 10, dtype=int) / 100 -dict_series, dict_predicted, diffs_dict = actual_aligner.predictor(series) -dict_evaluated, prop_dictionary = evaluate( - actual_aligner, - dict_series, - dict_predicted, - thematic_dict_formula, +dict_evaluated, prop_dictionary = actual_aligner.evaluate(series=series, + thematic_dict_formula=thematic_dict_formula, threshold_area=5, threshold_percentage=1, dict_unchanged=dict_unchanged, diff --git a/examples/example_evaluate_multi_to_single.py b/examples/example_evaluate_multi_to_single.py index 4c325ea..1e153df 100644 --- a/examples/example_evaluate_multi_to_single.py +++ b/examples/example_evaluate_multi_to_single.py @@ -2,7 +2,7 @@ import numpy as np -from brdr.aligner import Aligner, evaluate +from brdr.aligner import Aligner from brdr.enums import GRBType from brdr.grb import GRBActualLoader from brdr.grb import GRBFiscalParcelLoader @@ -11,12 +11,9 @@ from brdr.oe import OnroerendErfgoedLoader from brdr.utils import get_series_geojson_dict -# from brdr.utils import merge_process_results - multi_as_single_modus = False # Align the multipolygon to the fiscal parcels 2022 - base_aligner = Aligner() base_aligner.multi_as_single_modus = multi_as_single_modus loader = OnroerendErfgoedLoader([9946]) @@ -29,7 +26,6 @@ base_process_result = base_aligner.process_dict_thematic( relevant_distance=relevant_distance ) -# base_process_result = merge_process_results(base_process_result) thematic_dict_formula = {} thematic_dict_result = {} @@ -60,13 +56,9 @@ loader = GRBActualLoader(grb_type=GRBType.ADP, partition=1000, aligner=actual_aligner) actual_aligner.load_reference_data(loader) series = np.arange(0, 200, 10, dtype=int) / 100 -dict_series, dict_predicted, diffs_dict = actual_aligner.predictor(series) -dict_evaluated, prop_dictionary = evaluate( - actual_aligner, - dict_series, - dict_predicted, - thematic_dict_formula, +dict_evaluated, prop_dictionary = actual_aligner.evaluate(series=series, + thematic_dict_formula=thematic_dict_formula, threshold_area=5, threshold_percentage=1, dict_unchanged=dict_unchanged, diff --git a/tests/test_aligner.py b/tests/test_aligner.py index f3c7696..258007c 100644 --- a/tests/test_aligner.py +++ b/tests/test_aligner.py @@ -1,5 +1,6 @@ import os import unittest +from datetime import date import numpy as np from shapely import Point @@ -12,9 +13,10 @@ from brdr.enums import OpenbaarDomeinStrategy from brdr.geometry_utils import _grid_bounds from brdr.geometry_utils import buffer_neg_pos -from brdr.grb import GRBActualLoader +from brdr.grb import GRBActualLoader, GRBFiscalParcelLoader, get_geoms_affected_by_grb_change from brdr.loader import GeoJsonLoader, DictLoader from brdr.typings import FeatureCollection, ProcessResult +from brdr.utils import get_series_geojson_dict class TestAligner(unittest.TestCase): @@ -347,6 +349,63 @@ def test_fully_aligned_input(self): result["theme_id_1"][relevant_distance].get("result_diff_plus") == Polygon() ) + def test_evaluate(self): + thematic_dict = { + "theme_id_1": from_wkt( + "MultiPolygon (((174180.20077791667426936 171966.14649116666987538, " + "174415.60530965600628406 171940.9636807945498731, " + "174388.65236948925303295 171770.99678386366576888, " + "174182.10876987033407204 171836.13745758961886168, " + "174184.88916448061354458 171873.07698598300339654, " + "174180.20077791667426936 171966.14649116666987538)))" + ) + } + base_aligner = Aligner() + base_aligner.load_thematic_data(DictLoader(thematic_dict)) + base_aligner.load_reference_data( + GRBFiscalParcelLoader(aligner=base_aligner, year="2022", partition=1000) + ) + relevant_distance=1 + base_process_result = base_aligner.process_dict_thematic(relevant_distance=relevant_distance) + thematic_dict_formula = {} + thematic_dict_result = {} + for key in base_process_result: + thematic_dict_result[key] = base_process_result[key][relevant_distance]["result"] + thematic_dict_formula[key] = base_aligner.get_formula( + thematic_dict_result[key] + ) + aligner_result = Aligner() + aligner_result.load_thematic_data(DictLoader(thematic_dict_result)) + dict_affected, dict_unchanged = get_geoms_affected_by_grb_change( + aligner=aligner_result, + grb_type=GRBType.ADP, + date_start=date(2022, 1, 1), + date_end=date.today(), + one_by_one=False, + ) + + actual_aligner = Aligner() + loader = DictLoader(dict_affected) + actual_aligner.load_thematic_data(loader) + loader = GRBActualLoader( + grb_type=GRBType.ADP, partition=1000, aligner=actual_aligner + ) + actual_aligner.load_reference_data(loader) + series = np.arange(0, 200, 10, dtype=int) / 100 + + dict_evaluated, prop_dictionary = actual_aligner.evaluate(series=series, + thematic_dict_formula= thematic_dict_formula, + threshold_area=5, + threshold_percentage=1, + ) + fc = get_series_geojson_dict( + dict_evaluated, + crs=actual_aligner.CRS, + id_field=actual_aligner.name_thematic_id, + series_prop_dict=prop_dictionary, + ) + + def test_fully_aligned_geojson_output(self): aligned_shape = from_wkt( "MultiPolygon (((173463.11530961000244133 174423.83310307000647299, " diff --git a/tests/test_grb.py b/tests/test_grb.py index a2257eb..f649ffb 100644 --- a/tests/test_grb.py +++ b/tests/test_grb.py @@ -1,22 +1,17 @@ import unittest from datetime import date, timedelta -import numpy as np from shapely import Polygon, from_wkt -from brdr.aligner import Aligner, evaluate +from brdr.aligner import Aligner from brdr.enums import GRBType, Evaluation from brdr.grb import ( get_last_version_date, is_grb_changed, get_geoms_affected_by_grb_change, - GRBActualLoader, - GRBFiscalParcelLoader, GRBSpecificDateParcelLoader, update_to_actual_grb, + GRBSpecificDateParcelLoader, update_to_actual_grb, ) from brdr.loader import DictLoader -from brdr.utils import ( - get_series_geojson_dict, -) class TestGrb(unittest.TestCase): @@ -173,69 +168,6 @@ def test_get_geoms_affected_by_grb_change_bulk(self): ) assert len(dict_affected.keys()) > 0 - def test_evaluate(self): - #TODO - thematic_dict = { - "theme_id_1": from_wkt( - "MultiPolygon (((174180.20077791667426936 171966.14649116666987538, " - "174415.60530965600628406 171940.9636807945498731, " - "174388.65236948925303295 171770.99678386366576888, " - "174182.10876987033407204 171836.13745758961886168, " - "174184.88916448061354458 171873.07698598300339654, " - "174180.20077791667426936 171966.14649116666987538)))" - ) - } - base_aligner = Aligner() - base_aligner.load_thematic_data(DictLoader(thematic_dict)) - base_aligner.load_reference_data( - GRBFiscalParcelLoader(aligner=base_aligner, year="2022", partition=1000) - ) - relevant_distance=1 - base_process_result = base_aligner.process_dict_thematic(relevant_distance=relevant_distance) - thematic_dict_formula = {} - thematic_dict_result = {} - for key in base_process_result: - thematic_dict_result[key] = base_process_result[key][relevant_distance]["result"] - thematic_dict_formula[key] = base_aligner.get_formula( - thematic_dict_result[key] - ) - aligner_result = Aligner() - aligner_result.load_thematic_data(DictLoader(thematic_dict_result)) - dict_affected, dict_unchanged = get_geoms_affected_by_grb_change( - aligner=aligner_result, - grb_type=GRBType.ADP, - date_start=date(2022, 1, 1), - date_end=date.today(), - one_by_one=False, - ) - - actual_aligner = Aligner() - loader = DictLoader(dict_affected) - actual_aligner.load_thematic_data(loader) - loader = GRBActualLoader( - grb_type=GRBType.ADP, partition=1000, aligner=actual_aligner - ) - actual_aligner.load_reference_data(loader) - series = np.arange(0, 200, 10, dtype=int) / 100 - dict_series, dict_predicted, diffs_dict = actual_aligner.predictor(series) - - dict_evaluated, prop_dictionary = evaluate( - actual_aligner, - dict_series, - dict_predicted, - thematic_dict_formula, - threshold_area=5, - threshold_percentage=1, - ) - - fc = get_series_geojson_dict( - dict_evaluated, - crs=actual_aligner.CRS, - id_field=actual_aligner.name_thematic_id, - series_prop_dict=prop_dictionary, - ) - - print(fc["result"]) def test_grbspecificdateparcelloader(self): diff --git a/tests/test_utils.py b/tests/test_utils.py index 59a06da..8c56abb 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -8,7 +8,6 @@ from brdr.oe import get_oe_dict_by_ids from brdr.typings import ProcessResult from brdr.utils import diffs_from_dict_series - # from brdr.utils import filter_dict_by_key from brdr.utils import get_breakpoints_zerostreak from brdr.utils import get_collection From 23e9d7b7b9162580dd8c817f008b65785ce87cf9 Mon Sep 17 00:00:00 2001 From: dieuska Date: Wed, 18 Sep 2024 17:18:29 +0200 Subject: [PATCH 24/35] fix wrong import --- brdr/loader.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/brdr/loader.py b/brdr/loader.py index f8ec963..0db8bd4 100644 --- a/brdr/loader.py +++ b/brdr/loader.py @@ -1,10 +1,10 @@ import json from abc import ABC +from datetime import datetime import requests as requests from shapely import make_valid from shapely.geometry.base import BaseGeometry -from win32ctypes.pywin32.pywintypes import datetime from brdr.constants import DATE_FORMAT from brdr.constants import VERSION_DATE From 77814b21ed36b77586be05fd2c55a4e49097102f Mon Sep 17 00:00:00 2001 From: dieuska Date: Wed, 18 Sep 2024 17:33:55 +0200 Subject: [PATCH 25/35] fix for evaluate --- brdr/aligner.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/brdr/aligner.py b/brdr/aligner.py index 666359d..3333e86 100644 --- a/brdr/aligner.py +++ b/brdr/aligner.py @@ -773,7 +773,7 @@ def evaluate( prop_dictionary[theme_id] = {0: {"result": geom, "evaluation": Evaluation.NO_CHANGE_6, - "formula": json.dumps(actual_aligner.get_formula(geom)) + "formula": json.dumps(self.get_formula(geom)) } } return dict_evaluated_result, prop_dictionary From cd74c2a3e192e2d07d5233e7dcbd768bbb236d07 Mon Sep 17 00:00:00 2001 From: dieuska Date: Thu, 19 Sep 2024 15:16:44 +0200 Subject: [PATCH 26/35] #89 refactoring of aligner --- README.md | 39 +- brdr/aligner.py | 489 +++++++++--------- brdr/constants.py | 4 + brdr/enums.py | 24 + brdr/grb.py | 27 +- brdr/loader.py | 2 +- brdr/utils.py | 4 +- examples/__init__.py | 4 +- examples/example_131635.py | 8 +- examples/example_aligner.py | 4 +- examples/example_aligners.py | 14 +- examples/example_ao.py | 6 +- examples/example_combined_borders_adp_gbg.py | 8 +- examples/example_eo.py | 10 +- examples/example_evaluate.py | 30 +- examples/example_evaluate_ao.py | 21 +- examples/example_evaluate_multi_to_single.py | 73 --- examples/example_local_data.py | 4 +- examples/example_multi_to_single.py | 14 +- examples/example_multipolygon.py | 10 +- examples/example_parcel_change_detector.py | 100 +--- examples/example_parcel_vs_building.py | 4 +- examples/example_predictor.py | 8 +- .../example_predictor_double_prediction.py | 8 +- examples/example_readme.py | 9 +- examples/example_refactor_dict_series.py | 6 +- examples/example_speedtest.py | 2 +- examples/example_update_to_actual_grb.py | 53 +- examples/stats_snapping_distance_creation.py | 2 +- tests/test_aligner.py | 59 ++- tests/test_examples.py | 25 +- tests/test_grb.py | 9 +- tests/test_integration.py | 2 +- 33 files changed, 512 insertions(+), 570 deletions(-) delete mode 100644 examples/example_evaluate_multi_to_single.py diff --git a/README.md b/README.md index 273aa4d..6101e18 100644 --- a/README.md +++ b/README.md @@ -52,26 +52,18 @@ The figure below shows: GeoJsonFileLoader,GeoJsonUrlLoader) * Processors: - * aligner.process_dict_thematic(): Align thematic boundaries to reference boundaries with a specific relevant - distance - * aligner.process_series(): Align thematic boundaries to reference boundaries with a series of specified relevant - distances (process_series) - * aligner.predictor(): Make use of a 'predictor'-function that aligns thematic boundaries to reference boundaries - for 'predicted' interesting - relevant distances (predictor) - * aligner.get_formula(): Calculating a descriptive formula of a thematic boundary based on a reference layer - * evaluate(): Evaluates a thematic dictionary on equality with another version and adds a 'evaluation'-property - + * aligner.process(): Align thematic boundaries to reference boundaries with a specific relevant + distance or a range of relevant distances + * aligner.predictor(): Searches all 'stable' (=predicted) processresults in a range of relevant distances + * aligner.get_brdr_formula(): Calculating a descriptive formula of a thematic boundary based on a reference layer + * compare(): Compares input geometries with another version and adds a EVALUATION_FIELD_NAME * Exporters: - * aligner.get_results_as_geojson(): Returns a collection of geojson-dictionaries with the results (resulting - geometry, differences,...) - * aligner.get_predictions_as_geojson(): Returns a collection of geojson-dictionaries with the predictions (resulting - geometry, differences,...) - * aligner.get_series_as_geojson(): Returns a collection of geojson-dictionaries for a given resulting series ( - resulting geometry, differences,...) - * aligner.get_reference_as_geojson(): Returns a geojson-featurecollection with all the reference-geometries - * aligner.export_predictions(): Exports the predicted geojson-files to a specified path - * aligner.export_results(): Exports the resuling geojson-files to a specified path + * aligner.get_results_as_geojson(): Returns a collection of geojson-dictionaries with the processresults (resulting + geometry, differences,...): This can be used for all processresults or only the 'predicted' results + * aligner.get_input_as_geojson(): Returns a geojson-featurecollection from input-dictionaries (thematic or + reference) + * aligner.save_results(): Exports the resuling geojson-files to a specified path:This can be used for all + processresults or only the 'predicted' results Besides the generic functionalities, a range of Flanders-specific functionalities are provided: @@ -123,9 +115,6 @@ from brdr.loader import DictLoader # CREATE AN ALIGNER aligner = Aligner( - relevant_distance=1, - od_strategy=OpenbaarDomeinStrategy.SNAP_SINGLE_SIDE, - threshold_overlap_percentage=50, crs="EPSG:31370", ) # ADD A THEMATIC POLYGON TO THEMATIC DICTIONARY and LOAD into Aligner @@ -138,7 +127,11 @@ loader = DictLoader(reference_dict) aligner.load_reference_data(loader) # EXECUTE THE ALIGNMENT relevant_distance = 1 -process_result = aligner.process_dict_thematic(relevant_distance=relevant_distance) +process_result = aligner.process( + relevant_distance=relevant_distance, + od_strategy=OpenbaarDomeinStrategy.SNAP_SINGLE_SIDE, + threshold_overlap_percentage=50, +) # PRINT RESULTS IN WKT print("result: " + process_result["theme_id_1"][relevant_distance]["result"].wkt) print( diff --git a/brdr/aligner.py b/brdr/aligner.py index 3333e86..2a5fc05 100644 --- a/brdr/aligner.py +++ b/brdr/aligner.py @@ -19,11 +19,11 @@ from brdr import __version__ from brdr.constants import BUFFER_MULTIPLICATION_FACTOR, LAST_VERSION_DATE, VERSION_DATE, DATE_FORMAT, \ - THRESHOLD_EXCLUSION_PERCENTAGE, THRESHOLD_EXCLUSION_AREA + THRESHOLD_EXCLUSION_PERCENTAGE, THRESHOLD_EXCLUSION_AREA, FORMULA_FIELD_NAME, EVALUATION_FIELD_NAME from brdr.constants import CORR_DISTANCE from brdr.constants import DEFAULT_CRS from brdr.constants import THRESHOLD_CIRCLE_RATIO -from brdr.enums import OpenbaarDomeinStrategy, Evaluation +from brdr.enums import OpenbaarDomeinStrategy, Evaluation, AlignerResultType, AlignerInputType from brdr.geometry_utils import buffer_neg from brdr.geometry_utils import buffer_neg_pos from brdr.geometry_utils import buffer_pos @@ -60,6 +60,7 @@ def __init__( *, feedback=None, relevant_distance=1, + relevant_distances= np.arange(0, 200, 10, dtype=int) / 100, threshold_overlap_percentage=50, od_strategy=OpenbaarDomeinStrategy.SNAP_SINGLE_SIDE, crs=DEFAULT_CRS, @@ -91,6 +92,7 @@ def __init__( """ self.logger = Logger(feedback) self.relevant_distance = relevant_distance + self.relevant_distances = relevant_distances self.od_strategy = od_strategy self.threshold_overlap_percentage = threshold_overlap_percentage self.area_limit = area_limit @@ -125,10 +127,10 @@ def __init__( # results - # output-dictionaries (when processing dict_thematic) - self.dict_result: dict[str, dict[float, ProcessResult]]= {} - # dictionary with the 'predicted' results, grouped by relevant distance - self.dict_predicted : dict[str, dict[float, ProcessResult]] ={} + # output-dictionaries (all results of process()), grouped by theme_id and relevant_distance + self.dict_processresults: dict[str, dict[float, ProcessResult]]= {} + # dictionary with the 'predicted' results, grouped by theme_id and relevant_distance + self.dict_predictions : dict[str, dict[float, ProcessResult]] ={} # Coordinate reference system # thematic geometries and reference geometries are assumed to be in the same CRS @@ -145,6 +147,14 @@ def __init__( def buffer_distance(self): return self.relevant_distance / 2 + ##########LOADERS########################## + ########################################### + + def load_thematic_data(self, loader: Loader): + self.dict_thematic, self.dict_thematic_properties, self.dict_thematic_source = ( + loader.load_data() + ) + def load_reference_data(self, loader: Loader): ( self.dict_reference, @@ -153,10 +163,8 @@ def load_reference_data(self, loader: Loader): ) = loader.load_data() self._prepare_reference_data() - def load_thematic_data(self, loader: Loader): - self.dict_thematic, self.dict_thematic_properties, self.dict_thematic_source = ( - loader.load_data() - ) + ##########PROCESSORS####################### + ########################################### def process_geometry( self, @@ -261,40 +269,111 @@ def process_geometry( return result_dict - def process_dict_thematic( + def process( self, + relevant_distances: Iterable[float]=None, relevant_distance=1, od_strategy=OpenbaarDomeinStrategy.SNAP_SINGLE_SIDE, threshold_overlap_percentage=50, ) -> dict[str, dict[float, ProcessResult]]: """ - Aligns a thematic dictionary of geometries to the reference layer based on - specified parameters. - method to align a thematic dictionary to the reference - layer + Calculates the resulting dictionaries for thematic data based on a series of + relevant distances. Args: - relevant_distance (float, optional): The relevant distance (in meters) for - processing. Defaults to 1. + relevant_distances (Iterable[float]): A series of relevant distances + (in meters) to process od_strategy (int, optional): The strategy for overlap detection. Defaults to 1. threshold_overlap_percentage (float, optional): The threshold percentage for considering full overlap. Defaults to 50. Returns: - dict: A dict containing processed data for each thematic key: - - result: Aligned thematic data. - - result_diff: global differences between thematic data and reference - data. - - result_diff_plus: Positive differences. - - result_diff_min: Negative differences. - - relevant_intersection: relevant intersections. - - relevant_diff: relevant differences. + dict: A dictionary, for every thematic ID a dictionary with the results for all distances + { + 'theme_id_1': {0: (ProcessResult), 0.1: + (ProcessResult), ...}, + 'theme_id_2': {0: (ProcessResult), 0.1: + (ProcessResult), ...}, + ... + } """ - self.dict_result = self.process_series(relevant_distances=[relevant_distance], - od_strategy=od_strategy, - threshold_overlap_percentage=threshold_overlap_percentage) - return self.dict_result + if relevant_distances is None: + relevant_distances=[relevant_distance] + self.relevant_distance=relevant_distance + self.relevant_distances = relevant_distances + self.od_strategy = od_strategy + self.threshold_overlap_percentage = threshold_overlap_percentage + self.logger.feedback_debug("Process series" + str(self.relevant_distances)) + dict_series = {} + dict_thematic = self.dict_thematic + + if self.multi_as_single_modus: + dict_thematic = multipolygons_to_singles(dict_thematic) + + for key,geometry in dict_thematic.items(): + self.logger.feedback_info(f"thematic id {str(key)} processed with relevant distances (m) [{str(self.relevant_distances)}]") + dict_series[key] = {} + for relevant_distance in self.relevant_distances: + try: + self.relevant_distance=relevant_distance + processed_result = self.process_geometry( + geometry, + self.relevant_distance, + od_strategy, + threshold_overlap_percentage, + ) + except ValueError as e: + self.logger.feedback_warning(str(e)) + + dict_series[key][self.relevant_distance] = processed_result + + if self.multi_as_single_modus: + dict_series = merge_process_results(dict_series) + + self.logger.feedback_info( + "End of processing series: " + str(self.relevant_distances) + ) + self.dict_processresults = dict_series + + return self.dict_processresults + + # def process( + # self, + # relevant_distance=1, + # od_strategy=OpenbaarDomeinStrategy.SNAP_SINGLE_SIDE, + # threshold_overlap_percentage=50, + # ) -> dict[str, dict[float, ProcessResult]]: + # """ + # Aligns a thematic dictionary of geometries to the reference layer based on + # specified parameters. - method to align a thematic dictionary to the reference + # layer + # + # Args: + # relevant_distance (float, optional): The relevant distance (in meters) for + # processing. Defaults to 1. + # od_strategy (int, optional): The strategy for overlap detection. + # Defaults to 1. + # threshold_overlap_percentage (float, optional): The threshold percentage for + # considering full overlap. Defaults to 50. + # + # Returns: + # dict: A dict containing processed data for each thematic key: + # - result: Aligned thematic data. + # - result_diff: global differences between thematic data and reference + # data. + # - result_diff_plus: Positive differences. + # - result_diff_min: Negative differences. + # - relevant_intersection: relevant intersections. + # - relevant_diff: relevant differences. + # + # """ + # self.relevant_distance=relevant_distance + # self.dict_result = self.process(relevant_distances=[self.relevant_distance], + # od_strategy=od_strategy, + # threshold_overlap_percentage=threshold_overlap_percentage) + # return self.dict_result def predictor( self, @@ -331,7 +410,7 @@ def predictor( 4. **Predict Interesting Distances:** - The function considers distances corresponding to breakpoints and zero-streaks as potentially interesting for further analysis. - - These distances are stored in a dictionary (`dict_predicted`) with the + - These distances are stored in a dictionary (`dict_predictions`) with the thematic element key as the outer key. - Additionally, the corresponding results from the distance series for those distances are included. @@ -365,8 +444,8 @@ def predictor( Logs: - Debug logs the thematic element key being processed. """ - dict_predicted = defaultdict(dict) - dict_series = self.process_series( + dict_predictions = defaultdict(dict) + dict_series = self.process( relevant_distances=relevant_distances, od_strategy=od_strategy, threshold_overlap_percentage=threshold_overlap_percentage, @@ -388,101 +467,117 @@ def predictor( ) logging.debug(str(theme_id)) if len(zero_streaks) == 0: - dict_predicted[theme_id][relevant_distances[0]] = dict_series[theme_id][ + dict_predictions[theme_id][relevant_distances[0]] = dict_series[theme_id][ relevant_distances[0] ] logging.info("No zero-streaks found for: " + str(theme_id)) for zs in zero_streaks: - dict_predicted[theme_id] [zs[0]]= dict_series[theme_id][zs[0]] + dict_predictions[theme_id] [zs[0]]= dict_series[theme_id][zs[0]] #Check if the predicted reldists are unique (and remove duplicated predictions - dict_predicted_unique = defaultdict(dict) - for theme_id,dist_results in dict_predicted.items(): - dict_predicted_unique[theme_id] = {} + dict_predictions_unique = defaultdict(dict) + for theme_id,dist_results in dict_predictions.items(): + dict_predictions_unique[theme_id] = {} predicted_geoms_for_theme_id = [] for rel_dist, processresults in dist_results.items(): predicted_geom = processresults["result"] if not _equal_geom_in_array(predicted_geom,predicted_geoms_for_theme_id): - dict_predicted_unique[theme_id][rel_dist] = processresults + dict_predictions_unique[theme_id][rel_dist] = processresults predicted_geoms_for_theme_id.append(processresults["result"]) else: self.logger.feedback_info(f"Duplicate prediction found for key {theme_id} at distance {rel_dist}: Prediction excluded") - self.dict_predicted = dict_predicted_unique + self.dict_predictions = dict_predictions_unique return ( dict_series, - self.dict_predicted, + self.dict_predictions, diffs_dict, ) - - def process_series( - self, - relevant_distances: Iterable[float], - od_strategy=OpenbaarDomeinStrategy.SNAP_SINGLE_SIDE, - threshold_overlap_percentage=50, - ) -> dict[str, dict[float, ProcessResult]]: + def compare( + self, + threshold_area=5, + threshold_percentage=1, + dict_unchanged=None, + ): """ - Calculates the resulting dictionaries for thematic data based on a series of - relevant distances. - - Args: - relevant_distances (Iterable[float]): A series of relevant distances - (in meters) to process - od_strategy (int, optional): The strategy for overlap detection. - Defaults to 1. - threshold_overlap_percentage (float, optional): The threshold percentage for - considering full overlap. Defaults to 50. - - Returns: - dict: A dictionary, for every thematic ID a dictionary with the results for all distances - - { - 'theme_id_1': {0: (ProcessResult), 0.1: - (ProcessResult), ...}, - 'theme_id_2': {0: (ProcessResult), 0.1: - (ProcessResult), ...}, - ... - } + Compares input-geometries (with formula) and evaluates these geometries: An attribute is added to evaluate and decide if new + proposals can be used """ - self.logger.feedback_debug("Process series" + str(relevant_distances)) - self.od_strategy = od_strategy - self.threshold_overlap_percentage = threshold_overlap_percentage - dict_series = {} - dict_thematic = self.dict_thematic + dict_series,dict_predictions,diffs = self.predictor(self.relevant_distances) + if dict_unchanged is None: + dict_unchanged = {} + theme_ids = list(dict_series.keys()) + dict_evaluated_result = {} + prop_dictionary = {} + # Fill the dictionary-structure with empty values + for theme_id in theme_ids: + dict_evaluated_result[theme_id] = {} + prop_dictionary[theme_id] = {} + for dist in dict_series[theme_id].keys(): + prop_dictionary[theme_id][dist] = {} + for theme_id in dict_unchanged.keys(): + prop_dictionary[theme_id] = {} - if self.multi_as_single_modus: - dict_thematic = multipolygons_to_singles(dict_thematic) + for theme_id, dict_results in dict_predictions.items(): + equality = False + for dist in sorted(dict_results.keys()): + if equality: + break + geomresult = dict_results[dist]["result"] + actual_formula = self.get_brdr_formula(geomresult) + prop_dictionary[theme_id][dist][FORMULA_FIELD_NAME] = json.dumps(actual_formula) + base_formula = None + if theme_id in self.dict_thematic_properties and FORMULA_FIELD_NAME in self.dict_thematic_properties[theme_id]: + base_formula = self.dict_thematic_properties[theme_id][FORMULA_FIELD_NAME] + equality, prop = _check_equality( + base_formula, + actual_formula, + threshold_area, + threshold_percentage, + ) + if equality: + dict_evaluated_result[theme_id][dist] = dict_predictions[theme_id][dist] + prop_dictionary[theme_id][dist][EVALUATION_FIELD_NAME] = prop + break - for key,geometry in dict_thematic.items(): - self.logger.feedback_info(f"thematic id {str(key)} processed with relevant distances (m) [{str(relevant_distances)}]") - dict_series[key] = {} - for relevant_distance in relevant_distances: - try: - processed_result = self.process_geometry( - geometry, - relevant_distance, - od_strategy, - threshold_overlap_percentage, - ) - except ValueError as e: - self.logger.feedback_warning(str(e)) + evaluated_theme_ids = [theme_id for theme_id, value in dict_evaluated_result.items() if value != {}] - dict_series[key][relevant_distance] = processed_result + # fill where no equality is found/ The biggest predicted distance is returned as + # proposal + for theme_id in theme_ids: + if theme_id not in evaluated_theme_ids: + if len(dict_predictions[theme_id].keys()) == 0: + result = dict_series[theme_id][0] + dict_evaluated_result[theme_id][0] = result + prop_dictionary[theme_id][0][FORMULA_FIELD_NAME] = json.dumps( + self.get_brdr_formula(result["result"]) + ) + prop_dictionary[theme_id][0][EVALUATION_FIELD_NAME] = Evaluation.NO_PREDICTION_5 + continue + # Add all predicted features so they can be manually checked + for dist in dict_predictions[theme_id].keys(): + predicted_resultset = dict_predictions[theme_id][dist] + dict_evaluated_result[theme_id][dist] = predicted_resultset + prop_dictionary[theme_id][dist][FORMULA_FIELD_NAME] = json.dumps( + self.get_brdr_formula(predicted_resultset["result"]) + ) + prop_dictionary[theme_id][dist][EVALUATION_FIELD_NAME] = Evaluation.TO_CHECK_4 - if self.multi_as_single_modus: - dict_series = merge_process_results(dict_series) + for theme_id, geom in dict_unchanged.items(): + prop_dictionary[theme_id] = {0: + {"result": geom, + EVALUATION_FIELD_NAME: Evaluation.NO_CHANGE_6, + FORMULA_FIELD_NAME: json.dumps(self.get_brdr_formula(geom)) + } + } + return dict_evaluated_result, prop_dictionary - self.logger.feedback_info( - "End of processing series: " + str(relevant_distances) - ) - self.dict_result = dict_series - return self.dict_result - def get_formula(self, geometry: BaseGeometry, with_geom=False): + def get_brdr_formula(self, geometry: BaseGeometry, with_geom=False): """ Calculates formula-related information based on the input geometry. @@ -541,12 +636,6 @@ def get_formula(self, geometry: BaseGeometry, with_geom=False): if version_date is not None and version_date > last_version_date: last_version_date = version_date - # if safe_equals(geom_intersection, geom_reference): - # full = True - # area = round(geom_reference.area, 2) - # perc = 100 - # if with_geom: - # geom = geom_reference if perc > 99.99: full = True area = round(geom_reference.area, 2) @@ -591,40 +680,28 @@ def get_formula(self, geometry: BaseGeometry, with_geom=False): self.logger.feedback_debug(str(dict_formula)) return dict_formula - def get_results_as_geojson(self, formula=False): - """ - convert the results to geojson feature collections - - Args: - formula (bool, optional): Whether to include formula-related information - in the output. Defaults to False. - """ - return self.get_series_as_geojson( - formula,self.dict_result, - ) - - def get_predictions_as_geojson(self, formula=False): - """ - convert the predictions to geojson feature collections - - Args: - formula (bool, optional): Whether to include formula-related information - in the output. Defaults to False. - """ - return self.get_series_as_geojson( - formula,self.dict_predicted, - ) + ##########EXPORTERS######################## + ########################################### - def get_series_as_geojson(self, formula=False, series_dict=None): + def get_results_as_geojson(self, resulttype= AlignerResultType.PROCESSRESULTS, formula=False): """ get a geojson of a dictionary containing the resulting geometries for all 'serial' relevant distances. If no dict_series is given, the dict_result returned. Optional: The descriptive formula is added as an attribute to the result""" - series_dict = series_dict or self.dict_result + if resulttype == AlignerResultType.PROCESSRESULTS: + dict_series = self.dict_processresults + elif resulttype == AlignerResultType.PREDICTIONS: + dict_series = self.dict_predictions + else: + raise (ValueError, "AlignerResultType unknown") + if dict_series is None or dict_series == {}: + self.logger.feedback_warning ("Empty results: No calculated results to export.") + return {} + prop_dictionary = defaultdict(dict) - for theme_id, results_dict in series_dict.items(): + for theme_id, results_dict in dict_series.items(): nr_calculations = len(results_dict) for relevant_distance, process_results in results_dict.items(): prop_dictionary[theme_id][relevant_distance] = { @@ -632,64 +709,66 @@ def get_series_as_geojson(self, formula=False, series_dict=None): } if formula: result = process_results["result"] - formula = self.get_formula(result) - prop_dictionary[theme_id][relevant_distance]["formula"] =json.dumps(formula) + formula = self.get_brdr_formula(result) + prop_dictionary[theme_id][relevant_distance][FORMULA_FIELD_NAME] =json.dumps(formula) return get_series_geojson_dict( - series_dict, + dict_series, crs=self.CRS, id_field=self.name_thematic_id, series_prop_dict=prop_dictionary, ) - def get_reference_as_geojson(self): + def get_input_as_geojson(self,inputtype=AlignerInputType.REFERENCE): """ get a geojson of the reference polygons """ + + if inputtype == AlignerInputType.THEMATIC: + dict_to_geojson = self.dict_thematic + elif inputtype == AlignerInputType.REFERENCE: + dict_to_geojson = self.dict_reference + else: + raise (ValueError, "AlignerInputType unknown") + if dict_to_geojson is None or dict_to_geojson == {}: + self.logger.feedback_warning ("Empty input: No input to export.") + return {} + # TODO: also add properties? return geojson_from_dict( - self.dict_reference, self.CRS, self.name_reference_id, geom_attributes=False + dict_to_geojson, self.CRS, self.name_reference_id, geom_attributes=False ) - def export_predictions(self, path, formula=True): - """ - Exports 'predicted' results as GeoJSON files. - - This function exports 6 GeoJSON files containing the 'predicted' results to the - specified `path`. - """ - fcs = self.get_predictions_as_geojson(formula) - for name, fc in fcs.items(): - write_geojson(os.path.join(path, name + "_predictions.geojson"), fc) - - - def export_results(self, path, formula=True): + def save_results(self, path, resulttype=AlignerResultType.PROCESSRESULTS, formula=True): """ Exports analysis results as GeoJSON files. This function exports 6 GeoJSON files containing the analysis results to the - specified `path`. + specified `path`. Args: - path (str): The path to the directory where the GeoJSON files will be saved. - formula (bool, optional): Whether to include formula-related information - in the output. Defaults to True. + path (str): The path to the directory where the GeoJSON files will be saved. + formula (bool, optional): Whether to include formula-related information + in the output. Defaults to True. Details of exported files: - - result.geojson: Contains the original thematic data from ` - self.dict_result`. - - result_diff.geojson: Contains the difference between the original - and predicted data from `self.dict_result_diff`. - - result_diff_plus.geojson: Contains results for areas that are - added (increased area). - - result_diff_min.geojson: Contains results for areas that are - removed (decreased area). - - result_relevant_intersection.geojson: Contains the areas with - relevant intersection that has to be included in the result. - - result_relevant_difference.geojson: Contains the areas with - relevant difference that has to be excluded from the result. - """ - fcs = self.get_results_as_geojson(formula) + - result.geojson: Contains the original thematic data from ` + self.dict_result`. + - result_diff.geojson: Contains the difference between the original + and predicted data from `self.dict_result_diff`. + - result_diff_plus.geojson: Contains results for areas that are + added (increased area). + - result_diff_min.geojson: Contains results for areas that are + removed (decreased area). + - result_relevant_intersection.geojson: Contains the areas with + relevant intersection that has to be included in the result. + - result_relevant_difference.geojson: Contains the areas with + relevant difference that has to be excluded from the result. + """ + + fcs = self.get_results_as_geojson( + formula=formula,resulttype=resulttype, + ) for name, fc in fcs.items(): - write_geojson(os.path.join(path, name + ".geojson"), fc) + write_geojson(os.path.join(path, resulttype.value + "_"+ name +".geojson"), fc) def get_thematic_union(self): if self.thematic_union is None: @@ -698,86 +777,6 @@ def get_thematic_union(self): ) return self.thematic_union - def evaluate( - self,series, - thematic_dict_formula, - threshold_area=5, - threshold_percentage=1, - dict_unchanged=None, - ): - """ - evaluate affected geometries and give attributes to evaluate and decide if new - proposals can be used - """ - dict_series,dict_predicted,diffs = self.predictor(series) - if dict_unchanged is None: - dict_unchanged = {} - theme_ids = list(dict_series.keys()) - dict_evaluated_result = {} - prop_dictionary = {} - # Fill the dictionary-structure with empty values - for theme_id in theme_ids: - dict_evaluated_result[theme_id] = {} - prop_dictionary[theme_id] = {} - for dist in dict_series[theme_id].keys(): - prop_dictionary[theme_id][dist] = {} - for theme_id in dict_unchanged.keys(): - prop_dictionary[theme_id] = {} - - for theme_id, dict_results in dict_predicted.items(): - equality = False - for dist in sorted(dict_results.keys()): - if equality: - break - geomresult = dict_results[dist]["result"] - actual_formula = self.get_formula(geomresult) - prop_dictionary[theme_id][dist]["formula"] = json.dumps(actual_formula) - base_formula = None - if theme_id in thematic_dict_formula: - base_formula = thematic_dict_formula[theme_id] - equality, prop = _check_equality( - base_formula, - actual_formula, - threshold_area, - threshold_percentage, - ) - if equality: - dict_evaluated_result[theme_id][dist] = dict_predicted[theme_id][dist] - prop_dictionary[theme_id][dist]["evaluation"] = prop - break - - evaluated_theme_ids = [theme_id for theme_id, value in dict_evaluated_result.items() if value != {}] - - # fill where no equality is found/ The biggest predicted distance is returned as - # proposal - for theme_id in theme_ids: - if theme_id not in evaluated_theme_ids: - if len(dict_predicted[theme_id].keys()) == 0: - result = dict_series[theme_id][0] - dict_evaluated_result[theme_id][0] = result - prop_dictionary[theme_id][0]["formula"] = json.dumps( - self.get_formula(result["result"]) - ) - prop_dictionary[theme_id][0]["evaluation"] = Evaluation.NO_PREDICTION_5 - continue - # Add all predicted features so they can be manually checked - for dist in dict_predicted[theme_id].keys(): - predicted_resultset = dict_predicted[theme_id][dist] - dict_evaluated_result[theme_id][dist] = predicted_resultset - prop_dictionary[theme_id][dist]["formula"] = json.dumps( - self.get_formula(predicted_resultset["result"]) - ) - prop_dictionary[theme_id][dist]["evaluation"] = Evaluation.TO_CHECK_4 - - for theme_id, geom in dict_unchanged.items(): - prop_dictionary[theme_id] = {0: - {"result": geom, - "evaluation": Evaluation.NO_CHANGE_6, - "formula": json.dumps(self.get_formula(geom)) - } - } - return dict_evaluated_result, prop_dictionary - def _prepare_reference_data(self): """ Prepares reference data for spatial queries and analysis. diff --git a/brdr/constants.py b/brdr/constants.py index 48c7c94..6488304 100644 --- a/brdr/constants.py +++ b/brdr/constants.py @@ -38,6 +38,10 @@ # MULTI_SINGLE_ID_SEPARATOR #separator to split multipolygon_ids to single polygons MULTI_SINGLE_ID_SEPARATOR = "*$*" + +FORMULA_FIELD_NAME = "brdr_formula" +EVALUATION_FIELD_NAME = "brdr_evaluation" +RELEVANT_DISTANCE_FIELD_NAME = "brdr_relevant_distance" LAST_VERSION_DATE = "last_version_date" VERSION_DATE = "version_date" diff --git a/brdr/enums.py b/brdr/enums.py index d198cd5..030a622 100644 --- a/brdr/enums.py +++ b/brdr/enums.py @@ -36,6 +36,30 @@ class OpenbaarDomeinStrategy(IntEnum): SNAP_SINGLE_SIDE_VARIANT_2 = 6 +class AlignerResultType(str, Enum): + """ + 2 Types of resulting dictionary are available in Aligner + + * PREDICTIONS = "predictions" (only the predicted versions for specific relevenat distances are returned) + * PROCESSRESULTS = "processresults" (All versions of resulting geometries for all relevant distances are returned) + """ + + PREDICTIONS = "predictions" + PROCESSRESULTS = "processresults" + + +class AlignerInputType(str, Enum): + """ + 2 Types of input dictionary are available in Aligner + + * THEMATIC = "thematic" + * REFERENCE = "reference" + """ + + THEMATIC = "thematic" + REFERENCE = "reference" + + class GRBType(str, Enum): """ Determines which GRB feature collection is used. Different types are available: diff --git a/brdr/grb.py b/brdr/grb.py index 1189914..8f2a0e1 100644 --- a/brdr/grb.py +++ b/brdr/grb.py @@ -10,7 +10,7 @@ from shapely.geometry.base import BaseGeometry from brdr.aligner import Aligner -from brdr.constants import DEFAULT_CRS, LAST_VERSION_DATE, DATE_FORMAT, VERSION_DATE +from brdr.constants import DEFAULT_CRS, LAST_VERSION_DATE, DATE_FORMAT, VERSION_DATE, FORMULA_FIELD_NAME from brdr.constants import DOWNLOAD_LIMIT from brdr.constants import GRB_BUILDING_ID from brdr.constants import GRB_FEATURE_URL @@ -331,7 +331,7 @@ def get_collection_grb_parcels_by_date( return collection_specific_date -def update_to_actual_grb(featurecollection, id_theme_fieldname, formula_field="formula", max_distance_for_actualisation=2, feedback=None ): +def update_to_actual_grb(featurecollection, id_theme_fieldname, formula_field=FORMULA_FIELD_NAME, max_distance_for_actualisation=2, feedback=None ): """ Function to update a thematic featurecollection to the most actual version of GRB. Important to notice that the featurecollection needs a 'formula' for the base-alignment. @@ -339,7 +339,7 @@ def update_to_actual_grb(featurecollection, id_theme_fieldname, formula_field="f logger = Logger(feedback) # Load featurecollection into a shapely_dict: dict_thematic = {} - dict_thematic_formula = {} + dict_thematic_props = {} last_version_date = datetime.now().date() for feature in featurecollection["features"]: @@ -352,14 +352,14 @@ def update_to_actual_grb(featurecollection, id_theme_fieldname, formula_field="f logger.feedback_debug ("geometry (wkt): " + geom.wkt) dict_thematic[id_theme] = geom try: - dict_thematic_formula[id_theme] = json.loads(feature["properties"][formula_field]) - logger.feedback_debug ("formula: " +str(dict_thematic_formula[id_theme])) + dict_thematic_props[id_theme] = {FORMULA_FIELD_NAME: json.loads(feature["properties"][formula_field])} + logger.feedback_debug ("formula: " +str(dict_thematic_props[id_theme])) except Exception: raise Exception ("Formula -attribute-field (json) cannot be loaded") try: - logger.feedback_debug(str(dict_thematic_formula[id_theme])) - if LAST_VERSION_DATE in dict_thematic_formula[id_theme] and dict_thematic_formula[id_theme][LAST_VERSION_DATE] is not None and dict_thematic_formula[id_theme][LAST_VERSION_DATE] != "": - str_lvd = dict_thematic_formula[id_theme][LAST_VERSION_DATE] + logger.feedback_debug(str(dict_thematic_props[id_theme])) + if LAST_VERSION_DATE in dict_thematic_props[id_theme][FORMULA_FIELD_NAME] and dict_thematic_props[id_theme][FORMULA_FIELD_NAME][LAST_VERSION_DATE] is not None and dict_thematic_props[id_theme][FORMULA_FIELD_NAME][LAST_VERSION_DATE] != "": + str_lvd = dict_thematic_props[id_theme][FORMULA_FIELD_NAME][LAST_VERSION_DATE] lvd = datetime.strptime(str_lvd, DATE_FORMAT).date() if lvd < last_version_date: last_version_date = lvd @@ -388,15 +388,14 @@ def update_to_actual_grb(featurecollection, id_theme_fieldname, formula_field="f # Initiate a Aligner to reference thematic features to the actual borders actual_aligner = Aligner(feedback=feedback) - actual_aligner.load_thematic_data(DictLoader(dict_affected)) + actual_aligner.load_thematic_data(DictLoader(data_dict=dict_affected,data_dict_properties=dict_thematic_props)) actual_aligner.load_reference_data( GRBActualLoader(grb_type=GRBType.ADP, partition=1000, aligner=actual_aligner)) - series = np.arange(0, max_distance_for_actualisation * 100, 10, dtype=int) / 100 - #actual_aligner.predictor(series) - dict_evaluated, prop_dictionary = actual_aligner.evaluate(series=series,thematic_dict_formula=dict_thematic_formula, - threshold_area=5, threshold_percentage=1, - dict_unchanged=dict_unchanged) + actual_aligner.relevant_distances = np.arange(0, max_distance_for_actualisation * 100, 10, dtype=int) / 100 + dict_evaluated, prop_dictionary = actual_aligner.compare( + threshold_area=5, threshold_percentage=1, + dict_unchanged=dict_unchanged) return get_series_geojson_dict( dict_evaluated, diff --git a/brdr/loader.py b/brdr/loader.py index 0db8bd4..5f67a88 100644 --- a/brdr/loader.py +++ b/brdr/loader.py @@ -44,7 +44,7 @@ class DictLoader(Loader): def __init__( self, data_dict: dict[str:BaseGeometry], - data_dict_properties: dict[str:str] = {}, + data_dict_properties: dict[str:dict] = {}, data_dict_source: dict[str:str] = {}, ): super().__init__() diff --git a/brdr/utils.py b/brdr/utils.py index 31d0343..980e433 100644 --- a/brdr/utils.py +++ b/brdr/utils.py @@ -14,7 +14,7 @@ from shapely.geometry import shape from shapely.geometry.base import BaseGeometry -from brdr.constants import MULTI_SINGLE_ID_SEPARATOR, DEFAULT_CRS, DOWNLOAD_LIMIT +from brdr.constants import MULTI_SINGLE_ID_SEPARATOR, DEFAULT_CRS, DOWNLOAD_LIMIT, RELEVANT_DISTANCE_FIELD_NAME from brdr.enums import DiffMetric from brdr.geometry_utils import get_partitions, get_bbox from brdr.typings import ProcessResult @@ -37,7 +37,7 @@ def get_series_geojson_dict( for relative_distance, process_result in results_dict.items(): properties = prop_dict.get(relative_distance, {}) properties[id_field] = theme_id - properties["relevant_distance"] = relative_distance + properties[RELEVANT_DISTANCE_FIELD_NAME] = relative_distance for results_type, geom in process_result.items(): if results_type not in features_list_dict: diff --git a/examples/__init__.py b/examples/__init__.py index 5537c72..64baa2f 100644 --- a/examples/__init__.py +++ b/examples/__init__.py @@ -122,7 +122,7 @@ def show_map( plt.show() -def print_formula(dict_results, aligner): +def print_brdr_formula(dict_results, aligner): for theme_id, dist_results in dict_results.items(): for rel_dist, processresults in dist_results.items(): print( @@ -132,7 +132,7 @@ def print_formula(dict_results, aligner): + str(rel_dist) + "--------------" ) - print(aligner.get_formula(processresults["result"])) + print(aligner.get_brdr_formula(processresults["result"])) return diff --git a/examples/example_131635.py b/examples/example_131635.py index f39c05f..095d49c 100644 --- a/examples/example_131635.py +++ b/examples/example_131635.py @@ -2,7 +2,7 @@ from brdr.enums import GRBType from brdr.grb import GRBActualLoader from brdr.oe import OnroerendErfgoedLoader -from examples import print_formula +from examples import print_brdr_formula from examples import show_map if __name__ == "__main__": @@ -19,9 +19,9 @@ # RESULTS rel_dist = 2 - dict_results = aligner.process_dict_thematic(rel_dist, 4) + dict_results = aligner.process(relevant_distance=rel_dist, od_strategy=4) # put resulting tuple in a dictionary - aligner.export_results("output/", formula=True) + aligner.save_results("output/", formula=True) show_map(dict_results, aligner.dict_thematic, aligner.dict_reference) - print_formula(dict_results, aligner) + print_brdr_formula(dict_results, aligner) diff --git a/examples/example_aligner.py b/examples/example_aligner.py index 5c47555..ed08939 100644 --- a/examples/example_aligner.py +++ b/examples/example_aligner.py @@ -50,9 +50,9 @@ # Example how to use the Aligner rel_dist = 6 - dict_results = aligner.process_dict_thematic( + dict_results = aligner.process( relevant_distance=rel_dist, od_strategy=OpenbaarDomeinStrategy.SNAP_FULL_AREA_ALL_SIDE, ) - aligner.export_results("output/") + aligner.save_results("output/") show_map(dict_results, aligner.dict_thematic, aligner.dict_reference) diff --git a/examples/example_aligners.py b/examples/example_aligners.py index 3f7850e..a9523e5 100644 --- a/examples/example_aligners.py +++ b/examples/example_aligners.py @@ -21,26 +21,26 @@ # Example how to use the Aligner rel_dist = 10 - dict_results = aligner.process_dict_thematic( + dict_results = aligner.process( relevant_distance=rel_dist, od_strategy=OpenbaarDomeinStrategy.SNAP_FULL_AREA_SINGLE_SIDE, ) - aligner.export_results("output/") + aligner.save_results("output/") show_map(dict_results, aligner.dict_thematic, aligner.dict_reference) rel_dist = 6 - dict_results = aligner.process_dict_thematic( + dict_results = aligner.process( relevant_distance=rel_dist, od_strategy=OpenbaarDomeinStrategy.SNAP_ALL_SIDE ) - aligner.export_results("output/") + aligner.save_results("output/") show_map(dict_results, aligner.dict_thematic, aligner.dict_reference) # for key in r: # x.get_formula(r[key]) # Example how to use a series (for histogram) series = [0.1, 0.2, 0.3, 0.4, 0.5, 1, 2] - dict_series = aligner.process_series(series, 2, 50) + dict_series = aligner.process(series, 2, 50) resulting_areas = diffs_from_dict_series(dict_series, aligner.dict_thematic) plot_series(series, resulting_areas) @@ -48,11 +48,11 @@ # border will be used for cases where relevant zones cannot be used for # determination) rel_dist = 6 - dict_results = aligner.process_dict_thematic( + dict_results = aligner.process( relevant_distance=rel_dist, od_strategy=OpenbaarDomeinStrategy.SNAP_FULL_AREA_ALL_SIDE, threshold_overlap_percentage=-1, ) - aligner.export_results("output/") + aligner.save_results("output/") show_map(dict_results, aligner.dict_thematic, aligner.dict_reference) diff --git a/examples/example_ao.py b/examples/example_ao.py index 36a5a8b..0f2366e 100644 --- a/examples/example_ao.py +++ b/examples/example_ao.py @@ -22,14 +22,14 @@ series = np.arange(0, 500, 20, dtype=int) / 100 # predict which relevant distances are interesting to propose as resulting geometry - dict_series, dict_predicted, diffs = aligner.predictor( + dict_series, dict_predictions, diffs = aligner.predictor( relevant_distances=series, od_strategy=2, threshold_overlap_percentage=50 ) - for key in dict_predicted.keys(): + for key in dict_predictions.keys(): diff = {key: diffs[key]} plot_series(series, diff) show_map( - {key: dict_predicted[key]}, + {key: dict_predictions[key]}, {key: aligner.dict_thematic[key]}, aligner.dict_reference, ) diff --git a/examples/example_combined_borders_adp_gbg.py b/examples/example_combined_borders_adp_gbg.py index adc5239..c6ee219 100644 --- a/examples/example_combined_borders_adp_gbg.py +++ b/examples/example_combined_borders_adp_gbg.py @@ -3,7 +3,7 @@ from brdr.grb import get_collection_grb_actual, GRBActualLoader from brdr.loader import GeoJsonFileLoader, DictLoader from brdr.utils import polygonize_reference_data, geojson_to_dicts -from examples import show_map, print_formula +from examples import show_map, print_brdr_formula # example to test what happens if we combine borders # (so thematic data can use both polygons) @@ -57,7 +57,7 @@ aligner.load_reference_data(DictLoader(dict_ref)) rel_dist = 2 - dict_results = aligner.process_dict_thematic(rel_dist, 4) - aligner.export_results("output/") + dict_results = aligner.process(relevant_distances=[rel_dist], od_strategy=4) + aligner.save_results("output/") show_map(dict_results, aligner.dict_thematic, aligner.dict_reference) - print_formula(dict_results, aligner) + print_brdr_formula(dict_results, aligner) diff --git a/examples/example_eo.py b/examples/example_eo.py index d6b783c..dbfd77a 100644 --- a/examples/example_eo.py +++ b/examples/example_eo.py @@ -1,7 +1,7 @@ import numpy as np from brdr.aligner import Aligner -from brdr.enums import GRBType +from brdr.enums import GRBType, AlignerResultType from brdr.grb import GRBActualLoader from brdr.oe import OnroerendErfgoedLoader, OEType from brdr.utils import write_geojson @@ -33,18 +33,18 @@ series = np.arange(0, 200, 20, dtype=int) / 100 # predict which relevant distances are interesting to propose as resulting geometry - dict_series, dict_predicted, diffs = aligner.predictor( + dict_series, dict_predictions, diffs = aligner.predictor( relevant_distances=series, od_strategy=2, threshold_overlap_percentage=50 ) - fcs = aligner.get_series_as_geojson(series_dict=dict_predicted) + fcs = aligner.get_results_as_geojson(resulttype=AlignerResultType.PREDICTIONS) write_geojson("output/predicted.geojson", fcs["result"]) write_geojson("output/predicted_diff.geojson", fcs["result_diff"]) - for key in dict_predicted.keys(): + for key in dict_predictions.keys(): diff = {key: diffs[key]} plot_series(series, diff) show_map( - {key: dict_predicted[key]}, + {key: dict_predictions[key]}, {key: aligner.dict_thematic[key]}, aligner.dict_reference, ) diff --git a/examples/example_evaluate.py b/examples/example_evaluate.py index 581880d..bfc991b 100644 --- a/examples/example_evaluate.py +++ b/examples/example_evaluate.py @@ -4,8 +4,8 @@ from shapely import from_wkt from brdr.aligner import Aligner +from brdr.constants import FORMULA_FIELD_NAME, EVALUATION_FIELD_NAME from brdr.enums import GRBType -from brdr.geometry_utils import geojson_polygon_to_multipolygon from brdr.grb import GRBActualLoader from brdr.grb import GRBFiscalParcelLoader from brdr.grb import get_geoms_affected_by_grb_change @@ -25,14 +25,14 @@ GRBFiscalParcelLoader(year=base_year, aligner=base_aligner) ) relevant_distance = 2 -base_process_result = base_aligner.process_dict_thematic( +base_process_result = base_aligner.process( relevant_distance=relevant_distance ) thematic_dict_formula = {} thematic_dict_result = {} for key in base_process_result: thematic_dict_result[key] = base_process_result[key][relevant_distance]["result"] - thematic_dict_formula[key] = base_aligner.get_formula(thematic_dict_result[key]) + thematic_dict_formula[key]= {FORMULA_FIELD_NAME:base_aligner.get_brdr_formula(thematic_dict_result[key])} print(key + ": " + thematic_dict_result[key].wkt) print(key + ": " + str(thematic_dict_formula[key])) base_aligner_result = Aligner() @@ -51,18 +51,17 @@ print(key + ": " + value.wkt) actual_aligner = Aligner() loader = DictLoader(dict_affected) -actual_aligner.load_thematic_data(DictLoader(dict_affected)) +actual_aligner.load_thematic_data(DictLoader(data_dict=dict_affected,data_dict_properties=thematic_dict_formula)) actual_aligner.load_reference_data( GRBActualLoader(grb_type=GRBType.ADP, partition=1000, aligner=actual_aligner) ) -series = np.arange(0, 200, 10, dtype=int) / 100 - -dict_evaluated, prop_dictionary = actual_aligner.evaluate(series=series, - thematic_dict_formula=thematic_dict_formula, - threshold_area=5, - threshold_percentage=1, - dict_unchanged=dict_unchanged, -) +actual_aligner.relevant_distances = np.arange(0, 200, 10, dtype=int) / 100 +dict_evaluated, prop_dictionary = actual_aligner.compare( + #thematic_dict_formula=thematic_dict_formula, + threshold_area=5, + threshold_percentage=1, + dict_unchanged=dict_unchanged, + ) fc = get_series_geojson_dict( dict_evaluated, @@ -71,17 +70,14 @@ series_prop_dict=prop_dictionary, ) print(fc["result"]) -fcs = actual_aligner.get_series_as_geojson(formula=True) +fcs = actual_aligner.get_results_as_geojson(formula=True) print(fcs["result"]) for feature in fc["result"]["features"]: print( feature["properties"][actual_aligner.name_thematic_id] + ": " - + feature["properties"]["evaluation"] + + feature["properties"][EVALUATION_FIELD_NAME] ) -geojson = geojson_polygon_to_multipolygon(fc["result"]) - -print(geojson) diff --git a/examples/example_evaluate_ao.py b/examples/example_evaluate_ao.py index 2cbcb70..00253d9 100644 --- a/examples/example_evaluate_ao.py +++ b/examples/example_evaluate_ao.py @@ -3,6 +3,7 @@ import numpy as np from brdr.aligner import Aligner +from brdr.constants import EVALUATION_FIELD_NAME, FORMULA_FIELD_NAME from brdr.enums import GRBType from brdr.grb import ( get_geoms_affected_by_grb_change, @@ -21,14 +22,14 @@ GRBFiscalParcelLoader(year=base_year, aligner=base_aligner) ) relevant_distance = 3 -base_process_result = base_aligner.process_dict_thematic( +base_process_result = base_aligner.process( relevant_distance=relevant_distance ) thematic_dict_formula = {} thematic_dict_result = {} for key in base_process_result: thematic_dict_result[key] = base_process_result[key][relevant_distance]["result"] - thematic_dict_formula[key] = base_aligner.get_formula(thematic_dict_result[key]) + thematic_dict_formula[key]= {FORMULA_FIELD_NAME:base_aligner.get_brdr_formula(thematic_dict_result[key])} base_aligner_result = Aligner() base_aligner_result.load_thematic_data(DictLoader(thematic_dict_result)) dict_affected, dict_unchanged = get_geoms_affected_by_grb_change( @@ -43,18 +44,16 @@ exit() actual_aligner = Aligner() -loader = DictLoader(dict_affected) -actual_aligner.load_thematic_data(loader) +actual_aligner.load_thematic_data(DictLoader(data_dict=dict_affected,data_dict_properties=thematic_dict_formula)) loader = GRBActualLoader(grb_type=GRBType.ADP, partition=1000, aligner=actual_aligner) actual_aligner.load_reference_data(loader) series = np.arange(0, 300, 10, dtype=int) / 100 -dict_evaluated, prop_dictionary = actual_aligner.evaluate(series=series, - thematic_dict_formula=thematic_dict_formula, - threshold_area=5, - threshold_percentage=1, - dict_unchanged=dict_unchanged, -) +dict_evaluated, prop_dictionary = actual_aligner.compare( + threshold_area=5, + threshold_percentage=1, + dict_unchanged=dict_unchanged, + ) fc = get_series_geojson_dict( dict_evaluated, @@ -63,4 +62,4 @@ series_prop_dict=prop_dictionary, ) for feature in fc["result"]["features"]: - print(feature["properties"]["evaluation"]) + print(feature["properties"][EVALUATION_FIELD_NAME]) diff --git a/examples/example_evaluate_multi_to_single.py b/examples/example_evaluate_multi_to_single.py deleted file mode 100644 index 1e153df..0000000 --- a/examples/example_evaluate_multi_to_single.py +++ /dev/null @@ -1,73 +0,0 @@ -from datetime import date - -import numpy as np - -from brdr.aligner import Aligner -from brdr.enums import GRBType -from brdr.grb import GRBActualLoader -from brdr.grb import GRBFiscalParcelLoader -from brdr.grb import get_geoms_affected_by_grb_change -from brdr.loader import DictLoader -from brdr.oe import OnroerendErfgoedLoader -from brdr.utils import get_series_geojson_dict - -multi_as_single_modus = False - -# Align the multipolygon to the fiscal parcels 2022 -base_aligner = Aligner() -base_aligner.multi_as_single_modus = multi_as_single_modus -loader = OnroerendErfgoedLoader([9946]) -base_aligner.load_thematic_data(loader) -base_year = "2022" -base_aligner.load_reference_data( - GRBFiscalParcelLoader(year=base_year, aligner=base_aligner) -) -relevant_distance = 2 -base_process_result = base_aligner.process_dict_thematic( - relevant_distance=relevant_distance -) -thematic_dict_formula = {} -thematic_dict_result = {} - -# Create a dictionary with resulting geometries (aligned on Adpf2022) and a dictionary -# with the corresponding formula -for key in base_process_result: - thematic_dict_result[key] = base_process_result[key][relevant_distance]["result"] - thematic_dict_formula[key] = base_aligner.get_formula(thematic_dict_result[key]) - -# Determine all features that are possibly changed during timespan -base_aligner_result = Aligner() -base_aligner_result.multi_as_single_modus = multi_as_single_modus -base_aligner_result.load_thematic_data(DictLoader(thematic_dict_result)) -dict_affected, dict_unchanged = get_geoms_affected_by_grb_change( - base_aligner_result, - grb_type=GRBType.ADP, - date_start=date(2022, 1, 1), - date_end=date.today(), - one_by_one=False, -) -# Align the possibly affected geometry on the actual GRB parcels (evaluation) - - -actual_aligner = Aligner() -actual_aligner.multi_as_single_modus = multi_as_single_modus -loader = DictLoader(dict_affected) -actual_aligner.load_thematic_data(loader) -loader = GRBActualLoader(grb_type=GRBType.ADP, partition=1000, aligner=actual_aligner) -actual_aligner.load_reference_data(loader) -series = np.arange(0, 200, 10, dtype=int) / 100 - -dict_evaluated, prop_dictionary = actual_aligner.evaluate(series=series, - thematic_dict_formula=thematic_dict_formula, - threshold_area=5, - threshold_percentage=1, - dict_unchanged=dict_unchanged, -) -fc = get_series_geojson_dict( - dict_evaluated, - crs=actual_aligner.CRS, - id_field=actual_aligner.name_thematic_id, - series_prop_dict=prop_dictionary, -) - -print(fc["result"]) diff --git a/examples/example_local_data.py b/examples/example_local_data.py index 3072625..23599fe 100644 --- a/examples/example_local_data.py +++ b/examples/example_local_data.py @@ -14,10 +14,10 @@ aligner.load_reference_data(loader) # Example how to use the Aligner rel_dist = 1 - dict_results = aligner.process_dict_thematic( + dict_results = aligner.process( relevant_distance=rel_dist, od_strategy=OpenbaarDomeinStrategy.SNAP_FULL_AREA_ALL_SIDE, ) - aligner.export_results("output/") + aligner.save_results("output/") # show_map(dict_results, aligner.dict_thematic, aligner.dict_reference) diff --git a/examples/example_multi_to_single.py b/examples/example_multi_to_single.py index cb67efb..456f2b8 100644 --- a/examples/example_multi_to_single.py +++ b/examples/example_multi_to_single.py @@ -2,7 +2,7 @@ from brdr.enums import GRBType from brdr.grb import GRBActualLoader from brdr.oe import OnroerendErfgoedLoader -from examples import print_formula +from examples import print_brdr_formula from examples import show_map # EXAMPLE of "multi_as_single_modus" @@ -20,11 +20,11 @@ ) rel_dist = 20 -dict_results = aligner.process_dict_thematic(rel_dist, 4) -aligner.export_results("output/") +dict_results = aligner.process(relevant_distance=rel_dist, od_strategy=4) +aligner.save_results("output/") show_map(dict_results, aligner.dict_thematic, aligner.dict_reference) -print_formula(dict_results, aligner) +print_brdr_formula(dict_results, aligner) # WITH MULTI_TO_SINGLE @@ -40,8 +40,8 @@ ) rel_dist = 20 -dict_results = aligner.process_dict_thematic(rel_dist, 4) -aligner.export_results("output/") +dict_results = aligner.process(relevant_distance=rel_dist, od_strategy=4) +aligner.save_results("output/") show_map(dict_results, aligner.dict_thematic, aligner.dict_reference) -print_formula(dict_results, aligner) +print_brdr_formula(dict_results, aligner) diff --git a/examples/example_multipolygon.py b/examples/example_multipolygon.py index bc7b818..0c3ed81 100644 --- a/examples/example_multipolygon.py +++ b/examples/example_multipolygon.py @@ -1,6 +1,6 @@ # Initiate brdr from brdr.aligner import Aligner -from brdr.enums import GRBType +from brdr.enums import GRBType, AlignerResultType from brdr.grb import GRBActualLoader from brdr.loader import DictLoader, GeoJsonFileLoader from brdr.utils import multipolygons_to_singles @@ -27,8 +27,10 @@ GRBActualLoader(aligner=aligner, grb_type=GRBType.ADP, partition=1000) ) -dict_series, dict_predicted, diffs = aligner.predictor() -fcs = aligner.get_series_as_geojson(series_dict=dict_predicted, formula=True) -aligner.export_results("output/") +dict_series, dict_predictions, diffs = aligner.predictor() +fcs = aligner.get_results_as_geojson( + resulttype=AlignerResultType.PREDICTIONS, formula=True +) +aligner.save_results("output/") write_geojson("output/predicted.geojson", fcs["result"]) write_geojson("output/predicted_diff.geojson", fcs["result_diff"]) diff --git a/examples/example_parcel_change_detector.py b/examples/example_parcel_change_detector.py index 983cafa..72cd0f9 100644 --- a/examples/example_parcel_change_detector.py +++ b/examples/example_parcel_change_detector.py @@ -1,16 +1,9 @@ import logging -from datetime import date -from datetime import timedelta - -import numpy as np from brdr.aligner import Aligner -from brdr.enums import GRBType -from brdr.grb import GRBActualLoader +from brdr.constants import EVALUATION_FIELD_NAME, RELEVANT_DISTANCE_FIELD_NAME from brdr.grb import GRBFiscalParcelLoader -from brdr.grb import evaluate -from brdr.grb import get_geoms_affected_by_grb_change -from brdr.loader import DictLoader +from brdr.grb import update_to_actual_grb from brdr.oe import OnroerendErfgoedLoader # This code shows an example how the aligner can be used inside a flow of @@ -26,7 +19,7 @@ # ========= crs = "EPSG:31370" limit = 10000 -# bbox = "172800,170900,173000,171100" +bbox = [172800,170900,173000,171100] bbox = [172000, 172000, 174000, 174000] # bbox = "170000,170000,175000,174900" # bbox = "100000,195000,105000,195900" @@ -38,16 +31,16 @@ base_correction = 2 # geometries bigger than this, will be excluded excluded_area = 10000 -# series of relevant distance that is used to check if we can auto-align the geometries +# max_distance_for_actualisation of relevant distance that is used to check if we can auto-align the geometries # to the actual reference-polygons to get an 'equal' formula -series = np.arange(0, 200, 10, dtype=int) / 100 +max_distance_for_actualisation = 2 # BASE # ===== # Initiate an Aligner to create a themeset that is base-referenced on a specific # base_year base_aligner = Aligner() # Load the thematic data to evaluate -loader = OnroerendErfgoedLoader(bbox=bbox) +loader = OnroerendErfgoedLoader(bbox=bbox,partition=0) base_aligner.load_thematic_data(loader) logging.info( @@ -60,6 +53,7 @@ # Exclude objects bigger than specified area keys_to_exclude = [] +nr_features = len(base_aligner.dict_thematic) for key in base_aligner.dict_thematic: if base_aligner.dict_thematic[key].area > excluded_area: keys_to_exclude.append(key) @@ -70,79 +64,39 @@ for x in keys_to_exclude: del base_aligner.dict_thematic[x] +# # Align the features to the base-GRB +base_process_result = base_aligner.process( + relevant_distance=base_correction + ) +#get resulting aligned features on Adpfxxxx, with formula +processresults=base_aligner.get_results_as_geojson(formula=True) +if len(processresults)==0: + print("empty processresults") + exit() +featurecollection_base_result = processresults["result"] -# Align the features to the base-GRB -base_process_result = base_aligner.process_dict_thematic( - relevant_distance=base_correction -) - -thematic_dict_formula = {} -thematic_dict_result = {} -i = 0 -for key in base_process_result: - i = i + 1 - thematic_dict_result[key] = base_process_result[key][base_correction]["result"] - thematic_dict_formula[key] = base_aligner.get_formula(thematic_dict_result[key]) - if i > 500: - break - -base_aligner_result = Aligner() -base_aligner_result.load_thematic_data(DictLoader(thematic_dict_result)) -dict_affected, dict_unchanged = get_geoms_affected_by_grb_change( - aligner=base_aligner_result, - grb_type=GRBType.ADP, - date_start=date.today() - timedelta(days=365), - date_end=date.today(), - one_by_one=False, -) - -logging.info( - "Number of possible affected OE-thematic during timespan: " - + str(len(dict_affected)) -) +# Update Featurecollection to actual version +fcs = update_to_actual_grb( + featurecollection_base_result, base_aligner.name_thematic_id,max_distance_for_actualisation=max_distance_for_actualisation) -# ACTUAL -# Initiate a Aligner to reference thematic features to the actual borders -# ================================================================================ -# Initiate an Aligner to reference thematic features to the actual borders -actual_aligner = Aligner() -loader = DictLoader(dict_affected) -actual_aligner.load_thematic_data(loader) -loader = GRBActualLoader(grb_type=GRBType.ADP, partition=1000, aligner=actual_aligner) -actual_aligner.load_reference_data(loader) - -# LOOP AND PROCESS ALL POSSIBLE AFFECTED FEATURES -# ================================================= -series = np.arange(0, 200, 10, dtype=int) / 100 -dict_series, dict_predicted, diffs_dict = actual_aligner.predictor(series) -dict_evaluated_result, prop_dictionary = evaluate( - actual_aligner, - dict_series, - dict_predicted, - thematic_dict_formula, - threshold_area=5, - threshold_percentage=1, - dict_unchanged=dict_unchanged, -) counter_equality = 0 counter_equality_by_alignment = 0 counter_difference = 0 -for theme_id in dict_affected: - for dist in series: - if "evaluation" in prop_dictionary[theme_id][dist].keys(): - ev = prop_dictionary[theme_id][dist]["evaluation"] - if ev.startswith("equal") and dist == 0: +for feature in fcs["result"]["features"]: + if EVALUATION_FIELD_NAME in feature["properties"].keys(): + ev = feature["properties"][EVALUATION_FIELD_NAME] + rd =feature["properties"][RELEVANT_DISTANCE_FIELD_NAME] + if ev.startswith("equal") and rd == 0: counter_equality = counter_equality + 1 - elif ev.startswith("equal") and dist > 0: + elif ev.startswith("equal") and rd > 0: counter_equality_by_alignment = counter_equality_by_alignment + 1 else: counter_difference = counter_difference + 1 - break print( "Features: " - + str(len(dict_affected)) + + str(nr_features) + "//Equality: " + str(counter_equality) + "//Equality by alignment: " diff --git a/examples/example_parcel_vs_building.py b/examples/example_parcel_vs_building.py index 5799aee..7d3cba1 100644 --- a/examples/example_parcel_vs_building.py +++ b/examples/example_parcel_vs_building.py @@ -35,9 +35,9 @@ # Example how to use a series (for histogram) series = np.arange(0, 300, 10, dtype=int) / 100 - x_dict_series = aligner_x.process_series(series, 4, 50) + x_dict_series = aligner_x.process(series, 4, 50) x_resulting_areas = diffs_from_dict_series(x_dict_series, aligner_x.dict_thematic) - y_dict_series = aligner_y.process_series(series, 4, 50) + y_dict_series = aligner_y.process(series, 4, 50) y_resulting_areas = diffs_from_dict_series(y_dict_series, aligner_y.dict_thematic) # plot_diffs(series,x_resulting_areas) # plot_diffs(series,y_resulting_areas) diff --git a/examples/example_predictor.py b/examples/example_predictor.py index 20f5528..ffb7949 100644 --- a/examples/example_predictor.py +++ b/examples/example_predictor.py @@ -24,14 +24,14 @@ series = np.arange(0, 300, 10, dtype=int) / 100 # predict which relevant distances are interesting to propose as resulting geometry - dict_series, dict_predicted, diffs = aligner.predictor( + dict_series, dict_predictions, diffs = aligner.predictor( relevant_distances=series, od_strategy=4, threshold_overlap_percentage=50 ) - fcs = aligner.get_predictions_as_geojson(formula=False) + fcs = aligner.get_results_as_geojson(formula=False) print(fcs["result"]) - # for key in dict_predicted: + # for key in dict_predictions: # show_map( - # {key:dict_predicted[key]}, + # {key:dict_predictions[key]}, # {key: aligner.dict_thematic[key]}, # aligner.dict_reference, # ) diff --git a/examples/example_predictor_double_prediction.py b/examples/example_predictor_double_prediction.py index 4f92571..9b2ad5e 100644 --- a/examples/example_predictor_double_prediction.py +++ b/examples/example_predictor_double_prediction.py @@ -29,14 +29,14 @@ series = np.arange(0, 800, 10, dtype=int) / 100 # predict which relevant distances are interesting to propose as resulting geometry - dict_series, dict_predicted, diffs = aligner.predictor( + dict_series, dict_predictions, diffs = aligner.predictor( relevant_distances=series, od_strategy=4, threshold_overlap_percentage=50 ) - fcs = aligner.get_predictions_as_geojson(formula=False) + fcs = aligner.get_results_as_geojson(formula=False) print(fcs["result"]) - # for key in dict_predicted: + # for key in dict_predictions: # show_map( - # {key:dict_predicted[key]}, + # {key:dict_predictions[key]}, # {key: aligner.dict_thematic[key]}, # aligner.dict_reference, # ) diff --git a/examples/example_readme.py b/examples/example_readme.py index e890130..b885c2e 100644 --- a/examples/example_readme.py +++ b/examples/example_readme.py @@ -5,9 +5,6 @@ # CREATE AN ALIGNER aligner = Aligner( - relevant_distance=1, - od_strategy=OpenbaarDomeinStrategy.SNAP_SINGLE_SIDE, - threshold_overlap_percentage=50, crs="EPSG:31370", ) # ADD A THEMATIC POLYGON TO THEMATIC DICTIONARY and LOAD into Aligner @@ -20,7 +17,11 @@ aligner.load_reference_data(loader) # EXECUTE THE ALIGNMENT relevant_distance = 1 -process_result = aligner.process_dict_thematic(relevant_distance=relevant_distance) +process_result = aligner.process( + relevant_distance=relevant_distance, + od_strategy=OpenbaarDomeinStrategy.SNAP_SINGLE_SIDE, + threshold_overlap_percentage=50, +) # PRINT RESULTS IN WKT print("result: " + process_result["theme_id_1"][relevant_distance]["result"].wkt) print( diff --git a/examples/example_refactor_dict_series.py b/examples/example_refactor_dict_series.py index e831542..511a6a8 100644 --- a/examples/example_refactor_dict_series.py +++ b/examples/example_refactor_dict_series.py @@ -17,10 +17,10 @@ loader = GRBActualLoader(grb_type=GRBType.ADP, partition=1000, aligner=aligner) aligner.load_reference_data(loader) -test = aligner.process_dict_thematic() -test = aligner.process_series([1, 2, 3]) +test = aligner.process() +test = aligner.process([1, 2, 3]) test = aligner.predictor() -fcs = aligner.get_series_as_geojson(formula=True) +fcs = aligner.get_results_as_geojson(formula=True) print(test) print(fcs) print(fcs["result"]) diff --git a/examples/example_speedtest.py b/examples/example_speedtest.py index 1b368a8..ff460f1 100644 --- a/examples/example_speedtest.py +++ b/examples/example_speedtest.py @@ -24,7 +24,7 @@ # Example how to use the Aligner aligner.predictor() - fcs = aligner.get_series_as_geojson(formula=True) + fcs = aligner.get_results_as_geojson(formula=True) endtime = datetime.now() seconds = (endtime - starttime).total_seconds() times.append(seconds) diff --git a/examples/example_update_to_actual_grb.py b/examples/example_update_to_actual_grb.py index a701da1..9f97bf0 100644 --- a/examples/example_update_to_actual_grb.py +++ b/examples/example_update_to_actual_grb.py @@ -1,18 +1,63 @@ from brdr.aligner import Aligner +from brdr.constants import EVALUATION_FIELD_NAME from brdr.grb import GRBFiscalParcelLoader from brdr.grb import update_to_actual_grb -from brdr.loader import GeoJsonFileLoader +from brdr.loader import GeoJsonLoader # Create a featurecollection (aligned on 2022), to use for the 'update_to_actual_grb' base_year = "2022" base_aligner = Aligner() name_thematic_id = "theme_identifier" -loader = GeoJsonFileLoader("themelayer.geojson", name_thematic_id) +loader = GeoJsonLoader( + _input={ + "type": "FeatureCollection", + "name": "extract", + "crs": {"type": "name", "properties": {"name": "urn:ogc:def:crs:EPSG::31370"}}, + "features": [ + { + "type": "Feature", + "properties": { + "nr_calculations": 1, + "ID": "206285", + "relevant_distance": 2.0, + "area": 503.67736346047076, + "perimeter": 125.74541473322422, + "shape_index": 0.24965468741597097, + }, + "geometry": { + "type": "MultiPolygon", + "coordinates": [ + [ + [ + [138539.326299999986077, 193994.138199999986682], + [138529.3663, 193995.566400000010617], + [138522.0997, 193996.6084], + [138514.984399999986636, 193997.6287], + [138505.8261, 193996.615], + [138498.8406, 193996.4314], + [138492.9442, 193996.289500000013504], + [138491.224599999986822, 193996.2481], + [138491.4111, 194004.814699999988079], + [138514.368500000011409, 194005.1297], + [138520.2585, 194004.5753], + [138520.3946, 194005.5833], + [138520.542599999986123, 194009.731999999989057], + [138541.4173, 194007.7292], + [138539.326299999986077, 193994.138199999986682], + ] + ] + ], + }, + } + ], + }, + id_property="ID", +) base_aligner.load_thematic_data(loader) base_aligner.load_reference_data( GRBFiscalParcelLoader(year=base_year, aligner=base_aligner) ) -base_process_result = base_aligner.process_dict_thematic(relevant_distance=2) +base_process_result = base_aligner.process(relevant_distance=2) fcs = base_aligner.get_results_as_geojson(formula=True) featurecollection_base_result = fcs["result"] print(featurecollection_base_result) @@ -25,7 +70,7 @@ print( feature["properties"][name_thematic_id] + ": " - + feature["properties"]["evaluation"] + + feature["properties"][EVALUATION_FIELD_NAME] ) geojson = featurecollection["result"] print(geojson) diff --git a/examples/stats_snapping_distance_creation.py b/examples/stats_snapping_distance_creation.py index ae8fdbd..106e4bb 100644 --- a/examples/stats_snapping_distance_creation.py +++ b/examples/stats_snapping_distance_creation.py @@ -54,7 +54,7 @@ results_diff_min, si, sd, - ) = x.process_dict_thematic(s, od, full_percentage) + ) = x.process(s, od, full_percentage) for key in results: results_area = results[key].area results_diff_plus_area = results_diff_plus[key].area diff --git a/tests/test_aligner.py b/tests/test_aligner.py index 258007c..4fb9578 100644 --- a/tests/test_aligner.py +++ b/tests/test_aligner.py @@ -9,7 +9,8 @@ from shapely.geometry import shape from brdr.aligner import Aligner -from brdr.enums import GRBType +from brdr.constants import FORMULA_FIELD_NAME +from brdr.enums import GRBType, AlignerResultType from brdr.enums import OpenbaarDomeinStrategy from brdr.geometry_utils import _grid_bounds from brdr.geometry_utils import buffer_neg_pos @@ -62,10 +63,11 @@ def test_export_results(self): aligner.load_reference_data( DictLoader({"ref_id_1": from_wkt("POLYGON ((0 1, 0 10,8 10,10 1,0 1))")}) ) - aligner.process_dict_thematic() + aligner.process() path = "./tmp/" - aligner.export_results(path=path) - filenames = [f"{k}.geojson" for k in ProcessResult.__annotations__] + resulttype = AlignerResultType.PROCESSRESULTS + aligner.save_results(path=path, resulttype=resulttype) + filenames = [resulttype.value + f"_{k}.geojson" for k in ProcessResult.__annotations__] for file_name in os.listdir(path): os.remove(path + file_name) assert file_name in filenames @@ -76,7 +78,7 @@ def test_get_formula_full_intersection(self): key = "a" ref_dict = {key: self.sample_geom} self.sample_aligner.load_reference_data(DictLoader(ref_dict)) - res = self.sample_aligner.get_formula(self.sample_geom, with_geom=True) + res = self.sample_aligner.get_brdr_formula(self.sample_geom, with_geom=True) self.assertTrue(res["full"]) result = res["reference_features"][key] self.assertTrue(result["full"]) @@ -87,7 +89,7 @@ def test_get_formula_partial_intersection(self): key = "a" ref_dict = {key: self.sample_geom.buffer(0.5)} self.sample_aligner.load_reference_data(DictLoader(ref_dict)) - res = self.sample_aligner.get_formula(self.sample_geom, with_geom=True) + res = self.sample_aligner.get_brdr_formula(self.sample_geom, with_geom=True) self.assertFalse(res["full"]) result = res["reference_features"][key] self.assertFalse(result["full"]) @@ -122,15 +124,15 @@ def test_predictor(self): # predict which relevant distances are interesting to propose as resulting # geometry - dict_series, dict_predicted, dict_diffs = self.sample_aligner.predictor( + dict_series, dict_predictions, dict_diffs = self.sample_aligner.predictor( relevant_distances=series, od_strategy=4, threshold_overlap_percentage=50 ) - self.assertEqual(len(dict_predicted), len(thematic_dict)) + self.assertEqual(len(dict_predictions), len(thematic_dict)) def test_predictor_double_prediction(self): """ Test if a double prediction is filtered out of the prediction results. - This testdata has 2 resulting predictions that are the same (at 0.0 and 6.0), and 6.0 will be removed from dict_predicted + This testdata has 2 resulting predictions that are the same (at 0.0 and 6.0), and 6.0 will be removed from dict_predictions """ # Initiate an Aligner aligner = Aligner() @@ -148,10 +150,10 @@ def test_predictor_double_prediction(self): series = np.arange(0, 800, 10, dtype=int) / 100 # predict which relevant distances are interesting to propose as resulting geometry - dict_series, dict_predicted, diffs = aligner.predictor( + dict_series, dict_predictions, diffs = aligner.predictor( relevant_distances=series, od_strategy=4, threshold_overlap_percentage=50 ) - self.assertEqual(len(dict_predicted["id1"]), 3) + self.assertEqual(len(dict_predictions["id1"]), 3) def test_load_reference_data_grb_actual_adp(self): thematic_dict = { @@ -211,7 +213,7 @@ def test_load_reference_data_grb_actual_knw(self): aligner=self.sample_aligner, grb_type=GRBType.KNW, partition=1000 ) ) - self.sample_aligner.process_dict_thematic() + self.sample_aligner.process() self.assertGreaterEqual(len(self.sample_aligner.dict_reference), 0) def test_all_od_strategies(self): @@ -227,7 +229,7 @@ def test_all_od_strategies(self): self.sample_aligner.load_reference_data(DictLoader(reference_dict)) relevant_distance = 1 for od_strategy in OpenbaarDomeinStrategy: - process_result = self.sample_aligner.process_dict_thematic( + process_result = self.sample_aligner.process( relevant_distance=relevant_distance, od_strategy=od_strategy, threshold_overlap_percentage=50, @@ -257,7 +259,7 @@ def test_process_interior_ring(self): aligner=self.sample_aligner, grb_type=GRBType.GBG, partition=1000 ) ) - result_dict = self.sample_aligner.process_dict_thematic() + result_dict = self.sample_aligner.process() self.assertEqual(len(result_dict), len(thematic_dict)) def test_process_circle(self): @@ -272,7 +274,7 @@ def test_process_circle(self): ) ) relevant_distance = 1 - results_dict = self.sample_aligner.process_dict_thematic( + results_dict = self.sample_aligner.process( relevant_distance=relevant_distance ) self.assertEqual(geometry, results_dict["key"][relevant_distance]["result"]) @@ -326,8 +328,8 @@ def test_get_reference_as_geojson(self): self.sample_aligner.load_reference_data( DictLoader({"ref_id_1": from_wkt("POLYGON ((0 1, 0 10,8 10,10 1,0 1))")}) ) - self.sample_aligner.process_dict_thematic() - self.sample_aligner.get_reference_as_geojson() + self.sample_aligner.process() + self.sample_aligner.get_input_as_geojson() def test_fully_aligned_input(self): aligned_shape = from_wkt("POLYGON ((0 0, 0 9, 5 10, 10 0, 0 0))") @@ -337,7 +339,7 @@ def test_fully_aligned_input(self): ) self.sample_aligner.load_reference_data(DictLoader({"ref_id_1": aligned_shape})) relevant_distance = 1 - result = self.sample_aligner.process_dict_thematic( + result = self.sample_aligner.process( relevant_distance=relevant_distance ) assert result["theme_id_1"][relevant_distance].get("result") == aligned_shape @@ -366,14 +368,12 @@ def test_evaluate(self): GRBFiscalParcelLoader(aligner=base_aligner, year="2022", partition=1000) ) relevant_distance=1 - base_process_result = base_aligner.process_dict_thematic(relevant_distance=relevant_distance) + base_process_result = base_aligner.process(relevant_distance=relevant_distance) thematic_dict_formula = {} thematic_dict_result = {} for key in base_process_result: thematic_dict_result[key] = base_process_result[key][relevant_distance]["result"] - thematic_dict_formula[key] = base_aligner.get_formula( - thematic_dict_result[key] - ) + thematic_dict_formula[key] = {FORMULA_FIELD_NAME: base_aligner.get_brdr_formula(thematic_dict_result[key])} aligner_result = Aligner() aligner_result.load_thematic_data(DictLoader(thematic_dict_result)) dict_affected, dict_unchanged = get_geoms_affected_by_grb_change( @@ -385,19 +385,18 @@ def test_evaluate(self): ) actual_aligner = Aligner() - loader = DictLoader(dict_affected) - actual_aligner.load_thematic_data(loader) + actual_aligner.load_thematic_data( + DictLoader(data_dict=dict_affected, data_dict_properties=thematic_dict_formula)) loader = GRBActualLoader( grb_type=GRBType.ADP, partition=1000, aligner=actual_aligner ) actual_aligner.load_reference_data(loader) series = np.arange(0, 200, 10, dtype=int) / 100 - dict_evaluated, prop_dictionary = actual_aligner.evaluate(series=series, - thematic_dict_formula= thematic_dict_formula, - threshold_area=5, - threshold_percentage=1, - ) + dict_evaluated, prop_dictionary = actual_aligner.compare( + threshold_area=5, + threshold_percentage=1, + ) fc = get_series_geojson_dict( dict_evaluated, crs=actual_aligner.CRS, @@ -429,7 +428,7 @@ def test_fully_aligned_geojson_output(self): DictLoader({"theme_id_1": aligned_shape}) ) self.sample_aligner.load_reference_data(DictLoader({"ref_id_1": aligned_shape})) - self.sample_aligner.process_dict_thematic() + self.sample_aligner.process() fcs = self.sample_aligner.get_results_as_geojson(formula=True) assert fcs["result"]["features"][0]["properties"]["area"] > 0 assert fcs["result_diff"]["features"][0]["properties"]["area"] == 0 diff --git a/tests/test_examples.py b/tests/test_examples.py index 7c9940c..1f4c720 100644 --- a/tests/test_examples.py +++ b/tests/test_examples.py @@ -23,10 +23,9 @@ def test_example_131635(self): loader = GRBActualLoader(grb_type=GRBType.ADP, partition=1000, aligner=aligner) aligner.load_reference_data(loader) rel_dist = 2 - aligner.process_dict_thematic(rel_dist, 4) + aligner.process(relevant_distance=rel_dist, od_strategy=4) def test_example_combined_borders_adp_gbg(self): - aligner = Aligner() loader = OnroerendErfgoedLoader([131635]) aligner.load_thematic_data(loader) @@ -43,9 +42,9 @@ def test_example_combined_borders_adp_gbg(self): aligner.load_reference_data(DictLoader(dict_ref)) rel_dist = 2 - result_dict = aligner.process_dict_thematic(rel_dist, 4) + result_dict = aligner.process(relevant_distance=rel_dist, od_strategy=4) for process_results in result_dict.values(): - aligner.get_formula(process_results[rel_dist]["result"]) + aligner.get_brdr_formula(process_results[rel_dist]["result"]) def test_example_multipolygon(self): aligner0 = Aligner() @@ -171,10 +170,10 @@ def test_example_multipolygon(self): GRBActualLoader(grb_type=GRBType.ADP, partition=1000, aligner=aligner) ) - _, dict_predicted, _ = aligner.predictor() + _, dict_predictions, _ = aligner.predictor() - self.assertGreater(len(dict_predicted), 0) - fcs = aligner.get_series_as_geojson(formula=True) + self.assertGreater(len(dict_predictions), 0) + fcs = aligner.get_results_as_geojson(formula=True) self.assertEqual(len(fcs), 6) def test_example_wanted_changes(self): @@ -188,11 +187,11 @@ def test_example_wanted_changes(self): # Example how to use the Aligner rel_dist = 2 - aligner.process_dict_thematic(rel_dist, 4) + aligner.process(relevant_distance=rel_dist,od_strategy= 4) # Example how to use a series (for histogram) series = np.arange(0, 300, 10, dtype=int) / 100 - dict_series = aligner.process_series(series, 4, 50) + dict_series = aligner.process(series, 4, 50) resulting_areas = diffs_from_dict_series(dict_series, aligner.dict_thematic) for key in resulting_areas: if len(resulting_areas[key]) == len(series): @@ -206,7 +205,7 @@ def test_example_wanted_changes(self): f"{st[0]:.2f} - {st[1]:.2f} -{st[2]:.2f} - {st[3]:.2f}" f" - startextreme {st[4]:.2f} " ) - aligner.process_dict_thematic(st[0], 4) + aligner.process(relevant_distance=st[0], od_strategy=4) def test_example_predictor(self): aligner = Aligner() @@ -220,9 +219,9 @@ def test_example_predictor(self): # predict which relevant distances are interesting to propose as resulting # geometry - _, dict_predicted, _ = aligner.predictor( + _, dict_predictions, _ = aligner.predictor( relevant_distances=series, od_strategy=4, threshold_overlap_percentage=50 ) - for key in dict_predicted.keys(): - assert key in dict_predicted.keys() + for key in dict_predictions.keys(): + assert key in dict_predictions.keys() continue diff --git a/tests/test_grb.py b/tests/test_grb.py index f649ffb..7e02dce 100644 --- a/tests/test_grb.py +++ b/tests/test_grb.py @@ -4,6 +4,7 @@ from shapely import Polygon, from_wkt from brdr.aligner import Aligner +from brdr.constants import EVALUATION_FIELD_NAME from brdr.enums import GRBType, Evaluation from brdr.grb import ( get_last_version_date, @@ -171,7 +172,6 @@ def test_get_geoms_affected_by_grb_change_bulk(self): def test_grbspecificdateparcelloader(self): - aligner = Aligner() thematic_dict = { "theme_id_1": from_wkt( "Polygon ((172283.76869662097305991 174272.85233648214489222, 172276.89871930953813717 174278.68436246179044247, 172274.71383684969623573 174280.57171753142029047, 172274.63047763772192411 174280.64478165470063686, 172272.45265833073062822 174282.52660570573061705, 172269.33533191855531186 174285.22093996312469244, 172265.55258252174826339 174288.49089696351438761, 172258.77032718938426115 174294.22654021997004747, 172258.63259260458289646 174294.342757155187428, 172254.93673790179309435 174288.79932878911495209, 172248.71360730109154247 174279.61860501393675804, 172248.96566232520854101 174279.43056782521307468, 172255.25363882273086347 174274.73737183399498463, 172257.08298882702365518 174273.37133203260600567, 172259.32325354730710387 174271.69890458136796951, 172261.65807284769834951 174269.9690355472266674, 172266.35596220899606124 174266.4871726930141449, 172273.34350050613284111 174261.30863015633076429, 172289.60360219911672175 174249.35944479051977396, 172293.30328181147342548 174246.59864199347794056, 172297.34760522318538278 174253.10583685990422964, 172289.53060952731175348 174259.6846851697191596, 172292.86485871637705714 174265.19099397677928209, 172283.76869662097305991 174272.85233648214489222))" @@ -188,13 +188,14 @@ def test_grbspecificdateparcelloader(self): aligner.load_reference_data(loader) assert len (aligner.dict_reference.keys())==52 - def test_grbspecificdateparcelloader(self): + def test_update_to_actual_grb(self): #Create a featurecollection (aligned on 2022), to use for the 'update_to_actual_grb' name_thematic_id = "theme_identifier" - featurecollection_base_result = {"crs": {"properties": {"name": "EPSG:31370"}, "type": "name"}, "features": [{"geometry": {"coordinates": [[[174165.099014, 179510.530095], [174165.8317, 179512.9879], [174171.989, 179533.6401], [174176.4529, 179548.8062], [174179.309, 179558.51], [174179.380292, 179558.485703], [174181.1589, 179557.8801], [174187.9589, 179555.5901], [174190.259, 179554.81], [174197.229, 179552.4601], [174199.5291, 179551.6901], [174203.588398, 179550.315901], [174204.019, 179550.1701], [174196.945502, 179518.200008], [174196.899, 179517.9901], [174193.6237, 179503.5462], [174193.0752, 179501.1272], [174193.069, 179501.1002], [174192.963218, 179501.135794], [174183.549015, 179504.310095], [174174.279, 179507.4301], [174167.8091, 179509.6149], [174165.099014, 179510.530095]]], "type": "Polygon"}, "properties": {"area": 1390.3280890476424, "formula": "{\"alignment_date\": \"2024-09-16\", \"brdr_version\": \"0.2.1\", \"reference_source\": {\"source\": \"Adpf\", \"version_date\": \"2022-01-01\"}, \"full\": true, \"reference_features\": {\"24126B0049/00X000\": {\"full\": true, \"area\": 502.91, \"percentage\": 100, \"geometry\": null}, \"24126B0049/00Z000\": {\"full\": true, \"area\": 398.32, \"percentage\": 100, \"geometry\": null}, \"24126B0049/00Y000\": {\"full\": true, \"area\": 489.09, \"percentage\": 100, \"geometry\": null}}, \"reference_od\": null, \"last_version_date\": \"2022-07-29\"}", "perimeter": 155.9132823823815, "relevant_distance": 2, "shape_index": 0.11214135973414749, "theme_identifier": "100"}, "type": "Feature"}, {"geometry": {"coordinates": [[[174149.124298, 179571.446101], [174149.4742, 179571.3366], [174140.7496, 179544.3599], [174140.0649, 179544.0909], [174131.8684, 179521.8687], [174127.3538, 179523.3958], [174125.1598, 179524.1334], [174118.177, 179526.5181], [174117.5579, 179526.7295], [174121.3028, 179537.5797], [174134.5641, 179576.001], [174141.4845, 179573.8361], [174149.124298, 179571.446101]]], "type": "Polygon"}, "properties": {"area": 818.9938386019529, "formula": "{\"alignment_date\": \"2024-09-16\", \"brdr_version\": \"0.2.1\", \"reference_source\": {\"source\": \"Adpf\", \"version_date\": \"2022-01-01\"}, \"full\": true, \"reference_features\": {\"24126B0051/00W000\": {\"full\": true, \"area\": 419.99, \"percentage\": 100, \"geometry\": null}, \"24126B0051/00M002\": {\"full\": true, \"area\": 399.01, \"percentage\": 100, \"geometry\": null}}, \"reference_od\": null, \"last_version_date\": \"2022-07-29\"}", "perimeter": 135.6337116105736, "relevant_distance": 2, "shape_index": 0.16561017338311657, "theme_identifier": "200"}, "type": "Feature"}, {"geometry": {"coordinates": [[[174111.549006, 179153.956005], [174111.5042, 179153.9243], [174110.0614, 179154.1094], [174068.867, 179159.3947], [174068.8661, 179159.4262], [174068.8626, 179159.5573], [174073.7483, 179188.9357], [174120.4387, 179180.3235], [174116.1333, 179157.2025], [174111.549006, 179153.956005]]], "type": "Polygon"}, "properties": {"area": 1344.8114559611831, "formula": "{\"alignment_date\": \"2024-09-16\", \"brdr_version\": \"0.2.1\", \"reference_source\": {\"source\": \"Adpf\", \"version_date\": \"2022-01-01\"}, \"full\": true, \"reference_features\": {\"24126B0031/00N005\": {\"full\": true, \"area\": 1344.81, \"percentage\": 100, \"geometry\": null}}, \"reference_od\": null, \"last_version_date\": \"2019-07-25\"}", "perimeter": 149.60002606562426, "relevant_distance": 2, "shape_index": 0.11124237929598835, "theme_identifier": "300"}, "type": "Feature"}, {"geometry": {"coordinates": [[[174034.4177, 178984.8249], [174030.7603, 178982.3136], [174030.6565, 178982.4711], [174025.7399, 178989.9312], [174018.094404, 178999.593195], [174017.939403, 178999.788996], [174016.3725, 179001.7693], [174018.7192, 179003.659], [174021.115, 179005.7825], [174019.7443, 179007.5141], [174019.7371, 179007.5233], [174015.7101, 179025.628], [174040.6882, 179032.2831], [174037.3194, 178987.071901], [174037.2994, 178986.8036], [174036.3836, 178986.1748], [174034.4177, 178984.8249]]], "type": "Polygon"}, "properties": {"area": 842.1930629252586, "formula": "{\"alignment_date\": \"2024-09-16\", \"brdr_version\": \"0.2.1\", \"reference_source\": {\"source\": \"Adpf\", \"version_date\": \"2022-01-01\"}, \"full\": true, \"reference_features\": {\"24126B0031/00T007\": {\"full\": true, \"area\": 842.19, \"percentage\": 100, \"geometry\": null}}, \"reference_od\": null, \"last_version_date\": \"2019-07-25\"}", "perimeter": 130.58810547796506, "relevant_distance": 2, "shape_index": 0.1550572086457025, "theme_identifier": "400"}, "type": "Feature"}, {"geometry": {"coordinates": [[[173966.389028, 179298.100271], [173965.849202, 179298.315899], [173964.192, 179298.978], [173958.0291, 179301.4402], [173953.8952, 179302.1971], [173948.0517, 179303.2669], [173947.9791, 179303.2803], [173945.8891, 179303.6902], [173911.239422, 179309.581196], [173910.388103, 179309.7266], [173909.9886, 179309.7948], [173905.785701, 179319.638098], [173905.1785, 179321.060399], [173900.5608, 179331.8751], [173900.9241, 179331.8081], [173940.7763, 179325.5153], [173944.092, 179324.9918], [173949.739089, 179324.100202], [173962.1865, 179322.1395], [173966.131594, 179321.518001], [173966.499, 179321.4602], [173970.4676, 179319.931], [173974.1291, 179318.5202], [173972.379009, 179313.840224], [173968.2391, 179302.7402], [173968.229604, 179302.716411], [173966.389028, 179298.100271]]], "type": "Polygon"}, "properties": {"area": 1379.498322959166, "formula": "{\"alignment_date\": \"2024-09-16\", \"brdr_version\": \"0.2.1\", \"reference_source\": {\"source\": \"Adpf\", \"version_date\": \"2022-01-01\"}, \"full\": true, \"reference_features\": {\"24126B0006/00M002\": {\"full\": true, \"area\": 386.55, \"percentage\": 100, \"geometry\": null}, \"24126B0006/00E002\": {\"full\": true, \"area\": 409.64, \"percentage\": 100, \"geometry\": null}, \"24126B0006/00N002\": {\"full\": true, \"area\": 108.75, \"percentage\": 100, \"geometry\": null}, \"24126B0006/00F002\": {\"full\": true, \"area\": 474.56, \"percentage\": 100, \"geometry\": null}}, \"reference_od\": null, \"last_version_date\": \"2021-07-07\"}", "perimeter": 178.54520703582963, "relevant_distance": 2, "shape_index": 0.12942763616619105, "theme_identifier": "500"}, "type": "Feature"}, {"geometry": {"coordinates": [[[174240.361258, 179443.003306], [174240.4272, 179443.1969], [174234.5671, 179445.0969], [174241.3871, 179463.097], [174241.474, 179463.0721], [174244.1019, 179471.6328], [174249.4882, 179469.7988], [174254.26, 179468.16], [174256.144, 179467.513], [174254.5936, 179463.058], [174252.2125, 179456.2165], [174251.3099, 179453.623], [174249.5697, 179448.6229], [174249.0652, 179448.8045], [174248.960701, 179448.502502], [174246.296344, 179440.805126], [174240.361258, 179443.003306]]], "type": "Polygon"}, "properties": {"area": 354.31723731849075, "formula": "{\"alignment_date\": \"2024-09-16\", \"brdr_version\": \"0.2.1\", \"reference_source\": {\"source\": \"Adpf\", \"version_date\": \"2022-01-01\"}, \"full\": false, \"reference_features\": {\"24126B0027/00B002\": {\"full\": false, \"area\": 58.59, \"percentage\": 10.55, \"geometry\": null}, \"24126B0027/00R000\": {\"full\": true, \"area\": 161.68, \"percentage\": 100, \"geometry\": null}, \"24126B0027/00K003\": {\"full\": true, \"area\": 134.05, \"percentage\": 100, \"geometry\": null}}, \"reference_od\": null, \"last_version_date\": \"2019-07-25\"}", "perimeter": 82.71815334885017, "relevant_distance": 2, "shape_index": 0.23345788642649634, "theme_identifier": "600"}, "type": "Feature"}], "type": "FeatureCollection"} + featurecollection_base_result = {"crs": {"properties": {"name": "EPSG:31370"}, "type": "name"}, "features": [{"geometry": {"coordinates": [[[138541.4173, 194007.7292], [138539.3263, 193994.1382], [138529.3663, 193995.5664], [138522.0997, 193996.6084], [138514.9844, 193997.6287], [138505.8261, 193996.615], [138498.8406, 193996.4314], [138492.9442, 193996.2895], [138491.2246, 193996.2481], [138491.4111, 194004.8147], [138514.3685, 194005.1297], [138520.2585, 194004.5753], [138520.3946, 194005.5833], [138520.5426, 194009.732], [138541.4173, 194007.7292]]], "type": "Polygon"}, "properties": {"area": 503.67736346047064, "brdr_formula": "{\"alignment_date\": \"2024-09-19\", \"brdr_version\": \"0.2.1\", \"reference_source\": {\"source\": \"Adpf\", \"version_date\": \"2022-01-01\"}, \"full\": true, \"reference_features\": {\"12034A0181/00K000\": {\"full\": true, \"area\": 503.68, \"percentage\": 100, \"version_date\": \"2019-08-30\"}}, \"reference_od\": null, \"last_version_date\": \"2019-08-30\"}", "nr_calculations": 1, "perimeter": 125.74541473322422, "relevant_distance": 2, "shape_index": 0.24965468741597102, "theme_identifier": "206285"}, "type": "Feature"}], "type": "FeatureCollection"} + #Update Featurecollection to actual version featurecollection = update_to_actual_grb(featurecollection_base_result,name_thematic_id) #Print results for feature in featurecollection["result"]["features"]: - assert isinstance(feature["properties"]["evaluation"],Evaluation) + assert isinstance(feature["properties"][EVALUATION_FIELD_NAME],Evaluation) diff --git a/tests/test_integration.py b/tests/test_integration.py index a269cbf..0a77a56 100644 --- a/tests/test_integration.py +++ b/tests/test_integration.py @@ -55,7 +55,7 @@ def test_webservice_brdr(self): series = np.arange(0, 61, 1, dtype=float) / 10 - dict_series = aligner.process_series(series, openbaardomein_strategy, 50) + dict_series = aligner.process(series, openbaardomein_strategy, 50) dict_diffs = diffs_from_dict_series( dict_series, aligner.dict_thematic, DiffMetric.CHANGES_AREA ) From 18084e2f2e0f4e6081dbc3fe2570ab2b122a8aef Mon Sep 17 00:00:00 2001 From: dieuska Date: Thu, 19 Sep 2024 17:05:08 +0200 Subject: [PATCH 27/35] fix for exporting aligner input to geojson --- brdr/aligner.py | 8 ++- brdr/grb.py | 128 ++++++++++++++++++++++++------------- examples/example_131635.py | 1 + tests/test_aligner.py | 42 +++++++----- 4 files changed, 118 insertions(+), 61 deletions(-) diff --git a/brdr/aligner.py b/brdr/aligner.py index 2a5fc05..2ddc46b 100644 --- a/brdr/aligner.py +++ b/brdr/aligner.py @@ -726,16 +726,20 @@ def get_input_as_geojson(self,inputtype=AlignerInputType.REFERENCE): if inputtype == AlignerInputType.THEMATIC: dict_to_geojson = self.dict_thematic + dict_properties = self.dict_thematic_properties + property_id = self.name_thematic_id elif inputtype == AlignerInputType.REFERENCE: dict_to_geojson = self.dict_reference + dict_properties = self.dict_reference_properties + property_id = self.name_reference_id else: raise (ValueError, "AlignerInputType unknown") + dict_properties if dict_to_geojson is None or dict_to_geojson == {}: self.logger.feedback_warning ("Empty input: No input to export.") return {} - # TODO: also add properties? return geojson_from_dict( - dict_to_geojson, self.CRS, self.name_reference_id, geom_attributes=False + dict_to_geojson, self.CRS, property_id,prop_dict=dict_properties, geom_attributes=False ) def save_results(self, path, resulttype=AlignerResultType.PROCESSRESULTS, formula=True): """ diff --git a/brdr/grb.py b/brdr/grb.py index 8f2a0e1..1e846d1 100644 --- a/brdr/grb.py +++ b/brdr/grb.py @@ -10,7 +10,13 @@ from shapely.geometry.base import BaseGeometry from brdr.aligner import Aligner -from brdr.constants import DEFAULT_CRS, LAST_VERSION_DATE, DATE_FORMAT, VERSION_DATE, FORMULA_FIELD_NAME +from brdr.constants import ( + DEFAULT_CRS, + LAST_VERSION_DATE, + DATE_FORMAT, + VERSION_DATE, + FORMULA_FIELD_NAME, +) from brdr.constants import DOWNLOAD_LIMIT from brdr.constants import GRB_BUILDING_ID from brdr.constants import GRB_FEATURE_URL @@ -268,6 +274,8 @@ def get_collection_grb_fiscal_parcels( return get_collection_by_partition( url, geometry=geometry, partition=partition, limit=limit, crs=crs ) + + def get_collection_grb_parcels_by_date( geometry, date, @@ -281,12 +289,15 @@ def get_collection_grb_parcels_by_date( partition=partition, crs=crs, ) - #Filter on specific date: delete all features > specific_date - #TODO: experimental loader; unclear if we have to use "year-1 & year" OR if we have to use "year & year + 1" + # Filter on specific date: delete all features > specific_date + # TODO: experimental loader; unclear if we have to use "year-1 & year" OR if we have to use "year & year + 1" collection_year_after_filtered = deepcopy(collection_year_after) - logging.debug(len (collection_year_after_filtered["features"])) - if "features" in collection_year_after_filtered and len (collection_year_after_filtered["features"])>0: - removed_features =[] + logging.debug(len(collection_year_after_filtered["features"])) + if ( + "features" in collection_year_after_filtered + and len(collection_year_after_filtered["features"]) > 0 + ): + removed_features = [] for feature in collection_year_after_filtered["features"]: versiondate = datetime.strptime( feature["properties"][GRB_VERSION_DATE][:10], DATE_FORMAT @@ -295,43 +306,49 @@ def get_collection_grb_parcels_by_date( removed_features.append(feature) collection_year_after_filtered["features"].remove(feature) logging.debug(len(collection_year_after_filtered["features"])) - #if no features are removed, return the full collection of year_after - if len(removed_features)==0: + # if no features are removed, return the full collection of year_after + if len(removed_features) == 0: return collection_year_after # if features are removed, search for the features in year before collection_year_before = get_collection_grb_fiscal_parcels( - year=str(date.year-1), + year=str(date.year - 1), geometry=geometry, partition=partition, crs=crs, ) kept_features = [] - if "features" in collection_year_before and len(collection_year_before)>0: + if "features" in collection_year_before and len(collection_year_before) > 0: for feature in collection_year_before["features"]: for rf in removed_features: geom_feature = shape(feature["geometry"]) - geom_removed_feature= shape(rf["geometry"]) + geom_removed_feature = shape(rf["geometry"]) if intersects(geom_feature, geom_removed_feature): - intersection =safe_intersection(geom_feature, geom_removed_feature) - if intersection.area>1: + intersection = safe_intersection(geom_feature, geom_removed_feature) + if intersection.area > 1: if feature not in kept_features: kept_features.append(feature) + # search for intersection and check if it more than x% + # keep these features - #search for intersection and check if it more than x% - #keep these features - - #add them to + # add them to collection_specific_date = deepcopy(collection_year_after_filtered) filtered_features = collection_year_after_filtered["features"] specific_date_features = filtered_features + kept_features logging.debug(len(specific_date_features)) - collection_specific_date["features"]=specific_date_features + collection_specific_date["features"] = specific_date_features return collection_specific_date -def update_to_actual_grb(featurecollection, id_theme_fieldname, formula_field=FORMULA_FIELD_NAME, max_distance_for_actualisation=2, feedback=None ): + +def update_to_actual_grb( + featurecollection, + id_theme_fieldname, + formula_field=FORMULA_FIELD_NAME, + max_distance_for_actualisation=2, + feedback=None, +): """ Function to update a thematic featurecollection to the most actual version of GRB. Important to notice that the featurecollection needs a 'formula' for the base-alignment. @@ -349,17 +366,27 @@ def update_to_actual_grb(featurecollection, id_theme_fieldname, formula_field=FO except Exception: geom = Polygon() logger.feedback_debug("id theme: " + id_theme) - logger.feedback_debug ("geometry (wkt): " + geom.wkt) + logger.feedback_debug("geometry (wkt): " + geom.wkt) dict_thematic[id_theme] = geom try: - dict_thematic_props[id_theme] = {FORMULA_FIELD_NAME: json.loads(feature["properties"][formula_field])} - logger.feedback_debug ("formula: " +str(dict_thematic_props[id_theme])) + dict_thematic_props[id_theme] = { + FORMULA_FIELD_NAME: json.loads(feature["properties"][formula_field]) + } + logger.feedback_debug("formula: " + str(dict_thematic_props[id_theme])) except Exception: - raise Exception ("Formula -attribute-field (json) cannot be loaded") + raise Exception("Formula -attribute-field (json) cannot be loaded") try: logger.feedback_debug(str(dict_thematic_props[id_theme])) - if LAST_VERSION_DATE in dict_thematic_props[id_theme][FORMULA_FIELD_NAME] and dict_thematic_props[id_theme][FORMULA_FIELD_NAME][LAST_VERSION_DATE] is not None and dict_thematic_props[id_theme][FORMULA_FIELD_NAME][LAST_VERSION_DATE] != "": - str_lvd = dict_thematic_props[id_theme][FORMULA_FIELD_NAME][LAST_VERSION_DATE] + if ( + LAST_VERSION_DATE in dict_thematic_props[id_theme][FORMULA_FIELD_NAME] + and dict_thematic_props[id_theme][FORMULA_FIELD_NAME][LAST_VERSION_DATE] + is not None + and dict_thematic_props[id_theme][FORMULA_FIELD_NAME][LAST_VERSION_DATE] + != "" + ): + str_lvd = dict_thematic_props[id_theme][FORMULA_FIELD_NAME][ + LAST_VERSION_DATE + ] lvd = datetime.strptime(str_lvd, DATE_FORMAT).date() if lvd < last_version_date: last_version_date = lvd @@ -379,23 +406,33 @@ def update_to_actual_grb(featurecollection, id_theme_fieldname, formula_field=FO date_end=datetime_end, one_by_one=False, ) - logger.feedback_info("Number of possible affected OE-thematic during timespan: " + str(len(dict_affected))) + logger.feedback_info( + "Number of possible affected OE-thematic during timespan: " + + str(len(dict_affected)) + ) if len(dict_affected) == 0: - logger.feedback_info("No change detected in referencelayer during timespan. Script is finished") + logger.feedback_info( + "No change detected in referencelayer during timespan. Script is finished" + ) return {} logger.feedback_debug(str(datetime_start)) logger.feedback_debug(str(formula_field)) # Initiate a Aligner to reference thematic features to the actual borders actual_aligner = Aligner(feedback=feedback) - actual_aligner.load_thematic_data(DictLoader(data_dict=dict_affected,data_dict_properties=dict_thematic_props)) + actual_aligner.load_thematic_data( + DictLoader(data_dict=dict_affected, data_dict_properties=dict_thematic_props) + ) actual_aligner.load_reference_data( - GRBActualLoader(grb_type=GRBType.ADP, partition=1000, aligner=actual_aligner)) + GRBActualLoader(grb_type=GRBType.ADP, partition=1000, aligner=actual_aligner) + ) - actual_aligner.relevant_distances = np.arange(0, max_distance_for_actualisation * 100, 10, dtype=int) / 100 + actual_aligner.relevant_distances = ( + np.arange(0, max_distance_for_actualisation * 100, 10, dtype=int) / 100 + ) dict_evaluated, prop_dictionary = actual_aligner.compare( - threshold_area=5, threshold_percentage=1, - dict_unchanged=dict_unchanged) + threshold_area=5, threshold_percentage=1, dict_unchanged=dict_unchanged + ) return get_series_geojson_dict( dict_evaluated, @@ -412,7 +449,7 @@ def __init__(self, grb_type: GRBType, aligner, partition: int = 1000): self.grb_type = grb_type self.part = partition self.data_dict_source["source"] = grb_type.value - self.versiondate_info= {"name": GRB_VERSION_DATE,"format": DATE_FORMAT} + self.versiondate_info = {"name": GRB_VERSION_DATE, "format": DATE_FORMAT} def load_data(self): if not self.aligner.dict_thematic: @@ -441,7 +478,7 @@ def __init__(self, year: str, aligner, partition=1000): self.data_dict_source[VERSION_DATE] = datetime(int(year), 1, 1).strftime( DATE_FORMAT ) - self.versiondate_info= {"name": GRB_VERSION_DATE,"format": datetime_format_TZ} + self.versiondate_info = {"name": GRB_VERSION_DATE, "format": datetime_format_TZ} def load_data(self): if not self.aligner.dict_thematic: @@ -457,35 +494,40 @@ def load_data(self): self.aligner.logger.feedback_info(f"Adpf downloaded for year: {self.year}") return super().load_data() + class GRBSpecificDateParcelLoader(GeoJsonLoader): def __init__(self, date, aligner, partition=1000): logging.warning("experimental loader; use with care!!!") try: - date = datetime.strptime(date, DATE_FORMAT - ).date() - if date.year>=datetime.now().year: - raise ValueError("The GRBSpecificDateParcelLoader can only be used for dates prior to the current year.") + date = datetime.strptime(date, DATE_FORMAT).date() + if date.year >= datetime.now().year: + raise ValueError( + "The GRBSpecificDateParcelLoader can only be used for dates prior to the current year." + ) except Exception: - raise ValueError("No valid date, please provide a date in the format: " + DATE_FORMAT) + raise ValueError( + "No valid date, please provide a date in the format: " + DATE_FORMAT + ) super().__init__(_input=None, id_property=GRB_PARCEL_ID) self.aligner = aligner self.date = date self.part = partition self.data_dict_source["source"] = "Adp" self.data_dict_source[VERSION_DATE] = date.strftime(DATE_FORMAT) - self.versiondate_info= {"name": GRB_VERSION_DATE,"format": datetime_format_TZ} + self.versiondate_info = {"name": GRB_VERSION_DATE, "format": datetime_format_TZ} def load_data(self): if not self.aligner.dict_thematic: raise ValueError("Thematic data not loaded") geom_union = buffer_pos(self.aligner.get_thematic_union(), MAX_REFERENCE_BUFFER) - collection= get_collection_grb_parcels_by_date( + collection = get_collection_grb_parcels_by_date( date=self.date, geometry=geom_union, partition=self.part, crs=self.aligner.CRS, ) self.input = dict(collection) - self.aligner.logger.feedback_info(f"Parcels downloaded for specific date: {self.date.strftime(DATE_FORMAT)}") + self.aligner.logger.feedback_info( + f"Parcels downloaded for specific date: {self.date.strftime(DATE_FORMAT)}" + ) return super().load_data() - diff --git a/examples/example_131635.py b/examples/example_131635.py index 095d49c..27e9655 100644 --- a/examples/example_131635.py +++ b/examples/example_131635.py @@ -16,6 +16,7 @@ aligner.load_thematic_data(loader) loader = GRBActualLoader(grb_type=GRBType.ADP, partition=1000, aligner=aligner) aligner.load_reference_data(loader) + ref_geojson = aligner.get_input_as_geojson() # RESULTS rel_dist = 2 diff --git a/tests/test_aligner.py b/tests/test_aligner.py index 4fb9578..2dc2f7d 100644 --- a/tests/test_aligner.py +++ b/tests/test_aligner.py @@ -14,7 +14,11 @@ from brdr.enums import OpenbaarDomeinStrategy from brdr.geometry_utils import _grid_bounds from brdr.geometry_utils import buffer_neg_pos -from brdr.grb import GRBActualLoader, GRBFiscalParcelLoader, get_geoms_affected_by_grb_change +from brdr.grb import ( + GRBActualLoader, + GRBFiscalParcelLoader, + get_geoms_affected_by_grb_change, +) from brdr.loader import GeoJsonLoader, DictLoader from brdr.typings import FeatureCollection, ProcessResult from brdr.utils import get_series_geojson_dict @@ -67,7 +71,9 @@ def test_export_results(self): path = "./tmp/" resulttype = AlignerResultType.PROCESSRESULTS aligner.save_results(path=path, resulttype=resulttype) - filenames = [resulttype.value + f"_{k}.geojson" for k in ProcessResult.__annotations__] + filenames = [ + resulttype.value + f"_{k}.geojson" for k in ProcessResult.__annotations__ + ] for file_name in os.listdir(path): os.remove(path + file_name) assert file_name in filenames @@ -274,9 +280,7 @@ def test_process_circle(self): ) ) relevant_distance = 1 - results_dict = self.sample_aligner.process( - relevant_distance=relevant_distance - ) + results_dict = self.sample_aligner.process(relevant_distance=relevant_distance) self.assertEqual(geometry, results_dict["key"][relevant_distance]["result"]) def test__prepare_thematic_data(self): @@ -339,9 +343,7 @@ def test_fully_aligned_input(self): ) self.sample_aligner.load_reference_data(DictLoader({"ref_id_1": aligned_shape})) relevant_distance = 1 - result = self.sample_aligner.process( - relevant_distance=relevant_distance - ) + result = self.sample_aligner.process(relevant_distance=relevant_distance) assert result["theme_id_1"][relevant_distance].get("result") == aligned_shape assert result["theme_id_1"][relevant_distance].get("result_diff") == Polygon() assert ( @@ -367,13 +369,19 @@ def test_evaluate(self): base_aligner.load_reference_data( GRBFiscalParcelLoader(aligner=base_aligner, year="2022", partition=1000) ) - relevant_distance=1 + relevant_distance = 1 base_process_result = base_aligner.process(relevant_distance=relevant_distance) thematic_dict_formula = {} thematic_dict_result = {} for key in base_process_result: - thematic_dict_result[key] = base_process_result[key][relevant_distance]["result"] - thematic_dict_formula[key] = {FORMULA_FIELD_NAME: base_aligner.get_brdr_formula(thematic_dict_result[key])} + thematic_dict_result[key] = base_process_result[key][relevant_distance][ + "result" + ] + thematic_dict_formula[key] = { + FORMULA_FIELD_NAME: base_aligner.get_brdr_formula( + thematic_dict_result[key] + ) + } aligner_result = Aligner() aligner_result.load_thematic_data(DictLoader(thematic_dict_result)) dict_affected, dict_unchanged = get_geoms_affected_by_grb_change( @@ -386,7 +394,10 @@ def test_evaluate(self): actual_aligner = Aligner() actual_aligner.load_thematic_data( - DictLoader(data_dict=dict_affected, data_dict_properties=thematic_dict_formula)) + DictLoader( + data_dict=dict_affected, data_dict_properties=thematic_dict_formula + ) + ) loader = GRBActualLoader( grb_type=GRBType.ADP, partition=1000, aligner=actual_aligner ) @@ -394,9 +405,9 @@ def test_evaluate(self): series = np.arange(0, 200, 10, dtype=int) / 100 dict_evaluated, prop_dictionary = actual_aligner.compare( - threshold_area=5, - threshold_percentage=1, - ) + threshold_area=5, + threshold_percentage=1, + ) fc = get_series_geojson_dict( dict_evaluated, crs=actual_aligner.CRS, @@ -404,7 +415,6 @@ def test_evaluate(self): series_prop_dict=prop_dictionary, ) - def test_fully_aligned_geojson_output(self): aligned_shape = from_wkt( "MultiPolygon (((173463.11530961000244133 174423.83310307000647299, " From 188e2215328824a0fad315182baf9d791d0dbaf3 Mon Sep 17 00:00:00 2001 From: dieuska Date: Thu, 19 Sep 2024 17:29:14 +0200 Subject: [PATCH 28/35] updated versionnr to 0.3.0 --- CHANGES.md | 30 +++++++++++++++++++++++++++++- brdr/__init__.py | 2 +- pyproject.toml | 2 +- 3 files changed, 31 insertions(+), 3 deletions(-) diff --git a/CHANGES.md b/CHANGES.md index 83441c6..c8bfd5a 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -16,4 +16,32 @@ - fixed last_version_date in aligner.get_formula() - fixed logic of evaluate() in grb.py -- added function to transform geojson to consistent geometry-type (MultiPolygon) \ No newline at end of file +- added function to transform geojson to consistent geometry-type (MultiPolygon) + +# 0.3.0 + +- Refactoring: + - refactor the structure of the (internal) dicts: dict_series, dict_predicted,.... enhancement question [#57] + - refactoring of 'formula-unction': generic vs GRB enhancement question [#59] + - remove deprecated loaders from code enhancement [#77] + - simplify the core-functionalities of Aligner [#89] + - cleanup unused functions cleanup [#81] + - +- Functionalities: + - Add brdr-version to formula enhancement [#66] + - predictor: when multiple predictions with the same resulting geometries are found, only keep the smallest enhancement [#70] + - Predictor: add a field/attribute that states how many predictions are found, so it can used in the output enhancement [#69] + - Add function to grb.py: "update_to_actual_version" brdrQ enhancement [#64] + - GRBSpecificdateLoader: Alignment on GRB (parcels) on specific date [f.e alignment-date] enhancement [#63] + - Create OnroerendErfgoedLoader; to replace other utirl function to load OE-data enhancement [#65] +- Bugfixing: + - adding a safe_equals-function to catch GEOsException bug enhancement [#71] + - research: evaluation of case - to check bug enhancement research [#67] + + + + + + + + diff --git a/brdr/__init__.py b/brdr/__init__.py index 3ced358..493f741 100644 --- a/brdr/__init__.py +++ b/brdr/__init__.py @@ -1 +1 @@ -__version__ = "0.2.1" +__version__ = "0.3.0" diff --git a/pyproject.toml b/pyproject.toml index 7360032..56a919b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "brdr" -version = "0.2.1" +version = "0.3.0" description = "BRDR - a Python library to assist in realigning (multi-)polygons (OGC Simple Features) to reference borders " readme = { file = "README.md", content-type = "text/markdown" } license = { file = "LICENSE" } From 87142b41277c38b0e3ef6b378748da4415027b5a Mon Sep 17 00:00:00 2001 From: dieuska Date: Thu, 19 Sep 2024 19:14:05 +0200 Subject: [PATCH 29/35] fix for nr_calculations --- CHANGES.md | 30 ++++++++++++++++-------------- brdr/aligner.py | 6 +----- brdr/constants.py | 1 + brdr/utils.py | 5 ++++- examples/example_131635.py | 2 ++ examples/example_predictor.py | 4 ++-- tests/test_examples.py | 2 +- 7 files changed, 27 insertions(+), 23 deletions(-) diff --git a/CHANGES.md b/CHANGES.md index c8bfd5a..9b6e01c 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -21,22 +21,24 @@ # 0.3.0 - Refactoring: - - refactor the structure of the (internal) dicts: dict_series, dict_predicted,.... enhancement question [#57] - - refactoring of 'formula-unction': generic vs GRB enhancement question [#59] - - remove deprecated loaders from code enhancement [#77] - - simplify the core-functionalities of Aligner [#89] - - cleanup unused functions cleanup [#81] - - + - refactor the structure of the (internal) dicts: dict_series, dict_predicted,.... enhancement question [#57] + - refactoring of 'formula-unction': generic vs GRB enhancement question [#59] + - remove deprecated loaders from code enhancement [#77] + - simplify the core-functionalities of Aligner [#89] + - cleanup unused functions cleanup [#81] + - - Functionalities: - - Add brdr-version to formula enhancement [#66] - - predictor: when multiple predictions with the same resulting geometries are found, only keep the smallest enhancement [#70] - - Predictor: add a field/attribute that states how many predictions are found, so it can used in the output enhancement [#69] - - Add function to grb.py: "update_to_actual_version" brdrQ enhancement [#64] - - GRBSpecificdateLoader: Alignment on GRB (parcels) on specific date [f.e alignment-date] enhancement [#63] - - Create OnroerendErfgoedLoader; to replace other utirl function to load OE-data enhancement [#65] + - Add brdr-version to formula enhancement [#66] + - predictor: when multiple predictions with the same resulting geometries are found, only keep the smallest + enhancement [#70] + - Predictor: add a field/attribute that states how many predictions are found, so it can used in the output + enhancement [#69] + - Add function to grb.py: "update_to_actual_version" brdrQ enhancement [#64] + - GRBSpecificdateLoader: Alignment on GRB (parcels) on specific date [f.e alignment-date] enhancement [#63] + - Create OnroerendErfgoedLoader; to replace other utirl function to load OE-data enhancement [#65] - Bugfixing: - - adding a safe_equals-function to catch GEOsException bug enhancement [#71] - - research: evaluation of case - to check bug enhancement research [#67] + - adding a safe_equals-function to catch GEOsException bug enhancement [#71] + - research: evaluation of case - to check bug enhancement research [#67] diff --git a/brdr/aligner.py b/brdr/aligner.py index 2ddc46b..cd1f5f5 100644 --- a/brdr/aligner.py +++ b/brdr/aligner.py @@ -702,15 +702,11 @@ def get_results_as_geojson(self, resulttype= AlignerResultType.PROCESSRESULTS, f prop_dictionary = defaultdict(dict) for theme_id, results_dict in dict_series.items(): - nr_calculations = len(results_dict) for relevant_distance, process_results in results_dict.items(): - prop_dictionary[theme_id][relevant_distance] = { - "nr_calculations": nr_calculations - } if formula: result = process_results["result"] formula = self.get_brdr_formula(result) - prop_dictionary[theme_id][relevant_distance][FORMULA_FIELD_NAME] =json.dumps(formula) + prop_dictionary[theme_id][relevant_distance] ={FORMULA_FIELD_NAME:json.dumps(formula)} return get_series_geojson_dict( dict_series, diff --git a/brdr/constants.py b/brdr/constants.py index 6488304..f43e20f 100644 --- a/brdr/constants.py +++ b/brdr/constants.py @@ -41,6 +41,7 @@ FORMULA_FIELD_NAME = "brdr_formula" EVALUATION_FIELD_NAME = "brdr_evaluation" +NR_CALCULATION_FIELD_NAME = "brdr_nr_calculations" RELEVANT_DISTANCE_FIELD_NAME = "brdr_relevant_distance" LAST_VERSION_DATE = "last_version_date" VERSION_DATE = "version_date" diff --git a/brdr/utils.py b/brdr/utils.py index 980e433..01e1b2b 100644 --- a/brdr/utils.py +++ b/brdr/utils.py @@ -14,7 +14,8 @@ from shapely.geometry import shape from shapely.geometry.base import BaseGeometry -from brdr.constants import MULTI_SINGLE_ID_SEPARATOR, DEFAULT_CRS, DOWNLOAD_LIMIT, RELEVANT_DISTANCE_FIELD_NAME +from brdr.constants import MULTI_SINGLE_ID_SEPARATOR, DEFAULT_CRS, DOWNLOAD_LIMIT, RELEVANT_DISTANCE_FIELD_NAME, \ + NR_CALCULATION_FIELD_NAME from brdr.enums import DiffMetric from brdr.geometry_utils import get_partitions, get_bbox from brdr.typings import ProcessResult @@ -33,10 +34,12 @@ def get_series_geojson_dict( features_list_dict = {} for theme_id, results_dict in series_dict.items(): + nr_calculations = len(results_dict) prop_dict = dict(series_prop_dict or {}).get(theme_id, {}) for relative_distance, process_result in results_dict.items(): properties = prop_dict.get(relative_distance, {}) properties[id_field] = theme_id + properties[NR_CALCULATION_FIELD_NAME] = nr_calculations properties[RELEVANT_DISTANCE_FIELD_NAME] = relative_distance for results_type, geom in process_result.items(): diff --git a/examples/example_131635.py b/examples/example_131635.py index 27e9655..7051514 100644 --- a/examples/example_131635.py +++ b/examples/example_131635.py @@ -21,6 +21,8 @@ # RESULTS rel_dist = 2 dict_results = aligner.process(relevant_distance=rel_dist, od_strategy=4) + fcs = aligner.get_results_as_geojson() + print(fcs["result"]) # put resulting tuple in a dictionary aligner.save_results("output/", formula=True) diff --git a/examples/example_predictor.py b/examples/example_predictor.py index ffb7949..4344d30 100644 --- a/examples/example_predictor.py +++ b/examples/example_predictor.py @@ -1,7 +1,7 @@ import numpy as np from brdr.aligner import Aligner -from brdr.enums import GRBType +from brdr.enums import GRBType, AlignerResultType from brdr.grb import GRBActualLoader from brdr.loader import GeoJsonFileLoader @@ -27,7 +27,7 @@ dict_series, dict_predictions, diffs = aligner.predictor( relevant_distances=series, od_strategy=4, threshold_overlap_percentage=50 ) - fcs = aligner.get_results_as_geojson(formula=False) + fcs = aligner.get_results_as_geojson(resulttype=AlignerResultType.PREDICTIONS,formula=False) print(fcs["result"]) # for key in dict_predictions: # show_map( diff --git a/tests/test_examples.py b/tests/test_examples.py index 1f4c720..3bd1f02 100644 --- a/tests/test_examples.py +++ b/tests/test_examples.py @@ -187,7 +187,7 @@ def test_example_wanted_changes(self): # Example how to use the Aligner rel_dist = 2 - aligner.process(relevant_distance=rel_dist,od_strategy= 4) + aligner.process(relevant_distance=rel_dist, od_strategy=4) # Example how to use a series (for histogram) series = np.arange(0, 300, 10, dtype=int) / 100 From bbab825477fc2f79dcab2d4fed6a55df419a9749 Mon Sep 17 00:00:00 2001 From: dieuska Date: Thu, 19 Sep 2024 19:24:08 +0200 Subject: [PATCH 30/35] updated CHANGES --- CHANGES.md | 32 ++++++++++++++++---------------- 1 file changed, 16 insertions(+), 16 deletions(-) diff --git a/CHANGES.md b/CHANGES.md index 9b6e01c..7248ff5 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -20,25 +20,25 @@ # 0.3.0 +! Not Backwards compatable ! + - Refactoring: - - refactor the structure of the (internal) dicts: dict_series, dict_predicted,.... enhancement question [#57] - - refactoring of 'formula-unction': generic vs GRB enhancement question [#59] - - remove deprecated loaders from code enhancement [#77] - - simplify the core-functionalities of Aligner [#89] - - cleanup unused functions cleanup [#81] - - + - refactor the structure of the (internal) dicts: dict_series, dict_predicted. More logical and faster [#57] + - refactoring of 'formula-function': more generic [#59] + - removed deprecated loaders from codebase [#77] + - simplify the core-functionalities of Aligner: process, predict, compare [#89] + - cleanup unused functions [#81] + - Functionalities: - - Add brdr-version to formula enhancement [#66] - - predictor: when multiple predictions with the same resulting geometries are found, only keep the smallest - enhancement [#70] - - Predictor: add a field/attribute that states how many predictions are found, so it can used in the output - enhancement [#69] - - Add function to grb.py: "update_to_actual_version" brdrQ enhancement [#64] - - GRBSpecificdateLoader: Alignment on GRB (parcels) on specific date [f.e alignment-date] enhancement [#63] - - Create OnroerendErfgoedLoader; to replace other utirl function to load OE-data enhancement [#65] + - Add brdr-version to formula [#66] + - predict: filter duplicate predictions [#70] + - Predictor: add a attribute (nr_calculations) that states how many predictions are found, so it can used in the + output [#69] + - Added GRB-function"update_to_actual_version", to be used in brdrQ [#64] + - Added GRBSpecificDateLoader: Alignment on GRB (parcels) on specific date [#63] + - Added OnroerendErfgoedLoader, to load OnroerendErfgoed-data [#65] - Bugfixing: - - adding a safe_equals-function to catch GEOsException bug enhancement [#71] - - research: evaluation of case - to check bug enhancement research [#67] + - adding a safe_equals-function to catch GEOsException bug [#71] From 719f2874c9ca3392cacdd17d19ba33453aced903 Mon Sep 17 00:00:00 2001 From: dieuska Date: Thu, 19 Sep 2024 19:29:50 +0200 Subject: [PATCH 31/35] black formatted --- brdr/aligner.py | 194 +++++++++++++-------- brdr/oe.py | 78 ++++++--- brdr/utils.py | 27 ++- examples/example_evaluate.py | 24 +-- examples/example_evaluate_ao.py | 20 ++- examples/example_parcel_change_detector.py | 37 ++-- examples/example_predictor.py | 4 +- tests/test_grb.py | 63 +++++-- tests/test_oe.py | 26 +-- tests/test_utils.py | 1 + 10 files changed, 304 insertions(+), 170 deletions(-) diff --git a/brdr/aligner.py b/brdr/aligner.py index cd1f5f5..06c70f6 100644 --- a/brdr/aligner.py +++ b/brdr/aligner.py @@ -18,12 +18,25 @@ from shapely.geometry.base import BaseGeometry from brdr import __version__ -from brdr.constants import BUFFER_MULTIPLICATION_FACTOR, LAST_VERSION_DATE, VERSION_DATE, DATE_FORMAT, \ - THRESHOLD_EXCLUSION_PERCENTAGE, THRESHOLD_EXCLUSION_AREA, FORMULA_FIELD_NAME, EVALUATION_FIELD_NAME +from brdr.constants import ( + BUFFER_MULTIPLICATION_FACTOR, + LAST_VERSION_DATE, + VERSION_DATE, + DATE_FORMAT, + THRESHOLD_EXCLUSION_PERCENTAGE, + THRESHOLD_EXCLUSION_AREA, + FORMULA_FIELD_NAME, + EVALUATION_FIELD_NAME, +) from brdr.constants import CORR_DISTANCE from brdr.constants import DEFAULT_CRS from brdr.constants import THRESHOLD_CIRCLE_RATIO -from brdr.enums import OpenbaarDomeinStrategy, Evaluation, AlignerResultType, AlignerInputType +from brdr.enums import ( + OpenbaarDomeinStrategy, + Evaluation, + AlignerResultType, + AlignerInputType, +) from brdr.geometry_utils import buffer_neg from brdr.geometry_utils import buffer_neg_pos from brdr.geometry_utils import buffer_pos @@ -60,7 +73,7 @@ def __init__( *, feedback=None, relevant_distance=1, - relevant_distances= np.arange(0, 200, 10, dtype=int) / 100, + relevant_distances=np.arange(0, 200, 10, dtype=int) / 100, threshold_overlap_percentage=50, od_strategy=OpenbaarDomeinStrategy.SNAP_SINGLE_SIDE, crs=DEFAULT_CRS, @@ -128,9 +141,9 @@ def __init__( # results # output-dictionaries (all results of process()), grouped by theme_id and relevant_distance - self.dict_processresults: dict[str, dict[float, ProcessResult]]= {} + self.dict_processresults: dict[str, dict[float, ProcessResult]] = {} # dictionary with the 'predicted' results, grouped by theme_id and relevant_distance - self.dict_predictions : dict[str, dict[float, ProcessResult]] ={} + self.dict_predictions: dict[str, dict[float, ProcessResult]] = {} # Coordinate reference system # thematic geometries and reference geometries are assumed to be in the same CRS @@ -159,7 +172,7 @@ def load_reference_data(self, loader: Loader): ( self.dict_reference, self.dict_reference_properties, - self.dict_reference_source + self.dict_reference_source, ) = loader.load_data() self._prepare_reference_data() @@ -169,7 +182,7 @@ def load_reference_data(self, loader: Loader): def process_geometry( self, input_geometry: BaseGeometry, - relevant_distance:float=1, + relevant_distance: float = 1, od_strategy=OpenbaarDomeinStrategy.SNAP_SINGLE_SIDE, threshold_overlap_percentage=50, ) -> ProcessResult: @@ -271,7 +284,7 @@ def process_geometry( def process( self, - relevant_distances: Iterable[float]=None, + relevant_distances: Iterable[float] = None, relevant_distance=1, od_strategy=OpenbaarDomeinStrategy.SNAP_SINGLE_SIDE, threshold_overlap_percentage=50, @@ -300,8 +313,8 @@ def process( } """ if relevant_distances is None: - relevant_distances=[relevant_distance] - self.relevant_distance=relevant_distance + relevant_distances = [relevant_distance] + self.relevant_distance = relevant_distance self.relevant_distances = relevant_distances self.od_strategy = od_strategy self.threshold_overlap_percentage = threshold_overlap_percentage @@ -312,12 +325,14 @@ def process( if self.multi_as_single_modus: dict_thematic = multipolygons_to_singles(dict_thematic) - for key,geometry in dict_thematic.items(): - self.logger.feedback_info(f"thematic id {str(key)} processed with relevant distances (m) [{str(self.relevant_distances)}]") + for key, geometry in dict_thematic.items(): + self.logger.feedback_info( + f"thematic id {str(key)} processed with relevant distances (m) [{str(self.relevant_distances)}]" + ) dict_series[key] = {} for relevant_distance in self.relevant_distances: try: - self.relevant_distance=relevant_distance + self.relevant_distance = relevant_distance processed_result = self.process_geometry( geometry, self.relevant_distance, @@ -327,7 +342,7 @@ def process( except ValueError as e: self.logger.feedback_warning(str(e)) - dict_series[key][self.relevant_distance] = processed_result + dict_series[key][self.relevant_distance] = processed_result if self.multi_as_single_modus: dict_series = merge_process_results(dict_series) @@ -467,25 +482,29 @@ def predictor( ) logging.debug(str(theme_id)) if len(zero_streaks) == 0: - dict_predictions[theme_id][relevant_distances[0]] = dict_series[theme_id][ - relevant_distances[0] - ] + dict_predictions[theme_id][relevant_distances[0]] = dict_series[ + theme_id + ][relevant_distances[0]] logging.info("No zero-streaks found for: " + str(theme_id)) for zs in zero_streaks: - dict_predictions[theme_id] [zs[0]]= dict_series[theme_id][zs[0]] + dict_predictions[theme_id][zs[0]] = dict_series[theme_id][zs[0]] - #Check if the predicted reldists are unique (and remove duplicated predictions + # Check if the predicted reldists are unique (and remove duplicated predictions dict_predictions_unique = defaultdict(dict) - for theme_id,dist_results in dict_predictions.items(): + for theme_id, dist_results in dict_predictions.items(): dict_predictions_unique[theme_id] = {} predicted_geoms_for_theme_id = [] for rel_dist, processresults in dist_results.items(): predicted_geom = processresults["result"] - if not _equal_geom_in_array(predicted_geom,predicted_geoms_for_theme_id): + if not _equal_geom_in_array( + predicted_geom, predicted_geoms_for_theme_id + ): dict_predictions_unique[theme_id][rel_dist] = processresults predicted_geoms_for_theme_id.append(processresults["result"]) else: - self.logger.feedback_info(f"Duplicate prediction found for key {theme_id} at distance {rel_dist}: Prediction excluded") + self.logger.feedback_info( + f"Duplicate prediction found for key {theme_id} at distance {rel_dist}: Prediction excluded" + ) self.dict_predictions = dict_predictions_unique @@ -495,18 +514,17 @@ def predictor( diffs_dict, ) - def compare( - self, - threshold_area=5, - threshold_percentage=1, - dict_unchanged=None, + self, + threshold_area=5, + threshold_percentage=1, + dict_unchanged=None, ): """ Compares input-geometries (with formula) and evaluates these geometries: An attribute is added to evaluate and decide if new proposals can be used """ - dict_series,dict_predictions,diffs = self.predictor(self.relevant_distances) + dict_series, dict_predictions, diffs = self.predictor(self.relevant_distances) if dict_unchanged is None: dict_unchanged = {} theme_ids = list(dict_series.keys()) @@ -528,10 +546,17 @@ def compare( break geomresult = dict_results[dist]["result"] actual_formula = self.get_brdr_formula(geomresult) - prop_dictionary[theme_id][dist][FORMULA_FIELD_NAME] = json.dumps(actual_formula) + prop_dictionary[theme_id][dist][FORMULA_FIELD_NAME] = json.dumps( + actual_formula + ) base_formula = None - if theme_id in self.dict_thematic_properties and FORMULA_FIELD_NAME in self.dict_thematic_properties[theme_id]: - base_formula = self.dict_thematic_properties[theme_id][FORMULA_FIELD_NAME] + if ( + theme_id in self.dict_thematic_properties + and FORMULA_FIELD_NAME in self.dict_thematic_properties[theme_id] + ): + base_formula = self.dict_thematic_properties[theme_id][ + FORMULA_FIELD_NAME + ] equality, prop = _check_equality( base_formula, actual_formula, @@ -539,11 +564,15 @@ def compare( threshold_percentage, ) if equality: - dict_evaluated_result[theme_id][dist] = dict_predictions[theme_id][dist] + dict_evaluated_result[theme_id][dist] = dict_predictions[theme_id][ + dist + ] prop_dictionary[theme_id][dist][EVALUATION_FIELD_NAME] = prop break - evaluated_theme_ids = [theme_id for theme_id, value in dict_evaluated_result.items() if value != {}] + evaluated_theme_ids = [ + theme_id for theme_id, value in dict_evaluated_result.items() if value != {} + ] # fill where no equality is found/ The biggest predicted distance is returned as # proposal @@ -555,7 +584,9 @@ def compare( prop_dictionary[theme_id][0][FORMULA_FIELD_NAME] = json.dumps( self.get_brdr_formula(result["result"]) ) - prop_dictionary[theme_id][0][EVALUATION_FIELD_NAME] = Evaluation.NO_PREDICTION_5 + prop_dictionary[theme_id][0][ + EVALUATION_FIELD_NAME + ] = Evaluation.NO_PREDICTION_5 continue # Add all predicted features so they can be manually checked for dist in dict_predictions[theme_id].keys(): @@ -564,19 +595,20 @@ def compare( prop_dictionary[theme_id][dist][FORMULA_FIELD_NAME] = json.dumps( self.get_brdr_formula(predicted_resultset["result"]) ) - prop_dictionary[theme_id][dist][EVALUATION_FIELD_NAME] = Evaluation.TO_CHECK_4 + prop_dictionary[theme_id][dist][ + EVALUATION_FIELD_NAME + ] = Evaluation.TO_CHECK_4 for theme_id, geom in dict_unchanged.items(): - prop_dictionary[theme_id] = {0: - {"result": geom, - EVALUATION_FIELD_NAME: Evaluation.NO_CHANGE_6, - FORMULA_FIELD_NAME: json.dumps(self.get_brdr_formula(geom)) - } - } + prop_dictionary[theme_id] = { + 0: { + "result": geom, + EVALUATION_FIELD_NAME: Evaluation.NO_CHANGE_6, + FORMULA_FIELD_NAME: json.dumps(self.get_brdr_formula(geom)), + } + } return dict_evaluated_result, prop_dictionary - - def get_brdr_formula(self, geometry: BaseGeometry, with_geom=False): """ Calculates formula-related information based on the input geometry. @@ -603,7 +635,7 @@ def get_brdr_formula(self, geometry: BaseGeometry, with_geom=False): "reference_source": self.dict_reference_source, "full": True, "reference_features": {}, - "reference_od": None + "reference_od": None, } full_total = True @@ -637,11 +669,11 @@ def get_brdr_formula(self, geometry: BaseGeometry, with_geom=False): last_version_date = version_date if perc > 99.99: - full = True - area = round(geom_reference.area, 2) - perc = 100 - if with_geom: - geom = geom_reference + full = True + area = round(geom_reference.area, 2) + perc = 100 + if with_geom: + geom = geom_reference else: full = False full_total = False @@ -652,12 +684,16 @@ def get_brdr_formula(self, geometry: BaseGeometry, with_geom=False): dict_formula["reference_features"][key_ref] = { "full": full, "area": area, - "percentage": perc + "percentage": perc, } if version_date is not None: - dict_formula["reference_features"][key_ref][VERSION_DATE] = version_date.strftime(DATE_FORMAT) + dict_formula["reference_features"][key_ref][VERSION_DATE] = ( + version_date.strftime(DATE_FORMAT) + ) if with_geom: - dict_formula["reference_features"][key_ref]["geometry"] = to_geojson(geom) + dict_formula["reference_features"][key_ref]["geometry"] = to_geojson( + geom + ) dict_formula["full"] = full_total if last_version_date is not None: @@ -672,9 +708,7 @@ def get_brdr_formula(self, geometry: BaseGeometry, with_geom=False): if geom_od is not None: area_od = round(geom_od.area, 2) if area_od > 0: - dict_formula["reference_od"] = { - "area": area_od - } + dict_formula["reference_od"] = {"area": area_od} if with_geom: dict_formula["reference_od"]["geometry"] = to_geojson(geom_od) self.logger.feedback_debug(str(dict_formula)) @@ -683,7 +717,9 @@ def get_brdr_formula(self, geometry: BaseGeometry, with_geom=False): ##########EXPORTERS######################## ########################################### - def get_results_as_geojson(self, resulttype= AlignerResultType.PROCESSRESULTS, formula=False): + def get_results_as_geojson( + self, resulttype=AlignerResultType.PROCESSRESULTS, formula=False + ): """ get a geojson of a dictionary containing the resulting geometries for all 'serial' relevant distances. If no dict_series is given, the dict_result returned. @@ -696,7 +732,9 @@ def get_results_as_geojson(self, resulttype= AlignerResultType.PROCESSRESULTS, f else: raise (ValueError, "AlignerResultType unknown") if dict_series is None or dict_series == {}: - self.logger.feedback_warning ("Empty results: No calculated results to export.") + self.logger.feedback_warning( + "Empty results: No calculated results to export." + ) return {} prop_dictionary = defaultdict(dict) @@ -706,7 +744,9 @@ def get_results_as_geojson(self, resulttype= AlignerResultType.PROCESSRESULTS, f if formula: result = process_results["result"] formula = self.get_brdr_formula(result) - prop_dictionary[theme_id][relevant_distance] ={FORMULA_FIELD_NAME:json.dumps(formula)} + prop_dictionary[theme_id][relevant_distance] = { + FORMULA_FIELD_NAME: json.dumps(formula) + } return get_series_geojson_dict( dict_series, @@ -715,7 +755,7 @@ def get_results_as_geojson(self, resulttype= AlignerResultType.PROCESSRESULTS, f series_prop_dict=prop_dictionary, ) - def get_input_as_geojson(self,inputtype=AlignerInputType.REFERENCE): + def get_input_as_geojson(self, inputtype=AlignerInputType.REFERENCE): """ get a geojson of the reference polygons """ @@ -732,12 +772,19 @@ def get_input_as_geojson(self,inputtype=AlignerInputType.REFERENCE): raise (ValueError, "AlignerInputType unknown") dict_properties if dict_to_geojson is None or dict_to_geojson == {}: - self.logger.feedback_warning ("Empty input: No input to export.") + self.logger.feedback_warning("Empty input: No input to export.") return {} return geojson_from_dict( - dict_to_geojson, self.CRS, property_id,prop_dict=dict_properties, geom_attributes=False + dict_to_geojson, + self.CRS, + property_id, + prop_dict=dict_properties, + geom_attributes=False, ) - def save_results(self, path, resulttype=AlignerResultType.PROCESSRESULTS, formula=True): + + def save_results( + self, path, resulttype=AlignerResultType.PROCESSRESULTS, formula=True + ): """ Exports analysis results as GeoJSON files. @@ -762,13 +809,16 @@ def save_results(self, path, resulttype=AlignerResultType.PROCESSRESULTS, formul relevant intersection that has to be included in the result. - result_relevant_difference.geojson: Contains the areas with relevant difference that has to be excluded from the result. - """ + """ fcs = self.get_results_as_geojson( - formula=formula,resulttype=resulttype, + formula=formula, + resulttype=resulttype, ) for name, fc in fcs.items(): - write_geojson(os.path.join(path, resulttype.value + "_"+ name +".geojson"), fc) + write_geojson( + os.path.join(path, resulttype.value + "_" + name + ".geojson"), fc + ) def get_thematic_union(self): if self.thematic_union is None: @@ -977,7 +1027,6 @@ def _od_snap_all_side(self, geometry): ) return geom_thematic_od, relevant_difference_array, relevant_intersection_array - def _get_reference_union(self): if self.reference_union is None: self.reference_union = make_valid( @@ -985,7 +1034,6 @@ def _get_reference_union(self): ) return self.reference_union - def _postprocess_preresult(self, preresult, geom_thematic) -> ProcessResult: """ Postprocess the preresult with the following actions to create the final result @@ -1154,6 +1202,7 @@ def _add_multi_polygons_from_geom_to_array(geom: BaseGeometry, array): array.append(g) return array + @staticmethod def _calculate_geom_by_intersection_and_reference( geom_intersection: BaseGeometry, @@ -1261,6 +1310,7 @@ def _calculate_geom_by_intersection_and_reference( geom = geom_relevant_intersection # (=empty geometry) return geom, geom_relevant_intersection, geom_relevant_difference + @staticmethod def _get_relevant_polygons_from_geom(geometry: BaseGeometry, buffer_distance: float): """ @@ -1285,16 +1335,17 @@ def _get_relevant_polygons_from_geom(geometry: BaseGeometry, buffer_distance: fl array.append(g) return make_valid(unary_union(array)) + @staticmethod -def _equal_geom_in_array(geom,geom_array): +def _equal_geom_in_array(geom, geom_array): """ Check if a predicted geometry is equal to other predicted geometries in a list. Equality is defined as there is the symmetrical difference is smaller than the CORRECTION DISTANCE Returns True if one of the elements is equal, otherwise False """ for g in geom_array: - #if safe_equals(geom,g): - if buffer_neg(safe_symmetric_difference(geom, g),CORR_DISTANCE).is_empty: + # if safe_equals(geom,g): + if buffer_neg(safe_symmetric_difference(geom, g), CORR_DISTANCE).is_empty: return True return False @@ -1363,4 +1414,3 @@ def _check_equality( if base_formula["full"] and base_formula["full"] and od_alike: return True, Evaluation.EQUALITY_GEOM_3 return False, Evaluation.NO_PREDICTION_5 - diff --git a/brdr/oe.py b/brdr/oe.py index 04c02c7..4a3b715 100644 --- a/brdr/oe.py +++ b/brdr/oe.py @@ -53,15 +53,17 @@ def get_oe_dict_by_ids(objectids, oetype=OEType.AO): logging.warning("deprecated method, use OnroerendErfgoedLoader instead") # TODO remove function dict_thematic = {} - if oetype==OEType.AO: + if oetype == OEType.AO: typename = "aanduidingsobjecten" - #id_property = "aanduid_id" - elif oetype==OEType.EO: + # id_property = "aanduid_id" + elif oetype == OEType.EO: typename = "erfgoedobjecten" - #id_property = "erfgoed_id" + # id_property = "erfgoed_id" else: - logging.warning("Undefined OE-type: " + str(oetype) + ": Empty collection returned") - return {},None + logging.warning( + "Undefined OE-type: " + str(oetype) + ": Empty collection returned" + ) + return {}, None base_url = "https://inventaris.onroerenderfgoed.be/" + typename + "/" headers = {"Accept": "application/json"} @@ -77,7 +79,14 @@ def get_oe_dict_by_ids(objectids, oetype=OEType.AO): return dict_thematic -def get_collection_oe_objects(oetype=OEType.AO,objectids=None,bbox=None,limit=DOWNLOAD_LIMIT, partition=1000 ,crs=DEFAULT_CRS): +def get_collection_oe_objects( + oetype=OEType.AO, + objectids=None, + bbox=None, + limit=DOWNLOAD_LIMIT, + partition=1000, + crs=DEFAULT_CRS, +): """ Fetches GeoJSON data for designated heritage objects (aanduidingsobjecten) within a bounding box. @@ -97,16 +106,17 @@ def get_collection_oe_objects(oetype=OEType.AO,objectids=None,bbox=None,limit=DO collection might be truncated if the total number of features exceeds the specified limit. """ - if oetype==OEType.AO: + if oetype == OEType.AO: typename = "ps:ps_aandobj" id_property = "aanduid_id" - elif oetype==OEType.EO: + elif oetype == OEType.EO: typename = "lu:lu_wet_erfgobj_pub" id_property = "erfgoed_id" else: - logging.warning("Undefined OE-type: " + str(oetype) + ": Empty collection returned") - return {},None - + logging.warning( + "Undefined OE-type: " + str(oetype) + ": Empty collection returned" + ) + return {}, None theme_url = ( "https://www.mercator.vlaanderen.be/raadpleegdienstenmercatorpubliek/wfs?" @@ -114,48 +124,62 @@ def get_collection_oe_objects(oetype=OEType.AO,objectids=None,bbox=None,limit=DO f"TYPENAMES={typename}&" f"SRSNAME={crs}" "&outputFormat=application/json" - ) if objectids is not None: - filter = f"&CQL_FILTER={id_property} IN (" + ', '.join(str(o) for o in objectids) + ")" + filter = ( + f"&CQL_FILTER={id_property} IN (" + + ", ".join(str(o) for o in objectids) + + ")" + ) theme_url = theme_url + filter bbox_polygon = None if bbox is not None: bbox_polygon = box(*tuple(o for o in bbox)) - return get_collection_by_partition( - theme_url, geometry=bbox_polygon, partition=partition, limit=limit, crs=crs - ),id_property + return ( + get_collection_by_partition( + theme_url, geometry=bbox_polygon, partition=partition, limit=limit, crs=crs + ), + id_property, + ) class OnroerendErfgoedLoader(GeoJsonLoader): - def __init__(self, objectids=None, oetype=OEType.AO, bbox = None,limit=DOWNLOAD_LIMIT, partition=1000,crs=DEFAULT_CRS): - if (objectids is None and bbox is None) or (objectids is not None and bbox is not None): + def __init__( + self, + objectids=None, + oetype=OEType.AO, + bbox=None, + limit=DOWNLOAD_LIMIT, + partition=1000, + crs=DEFAULT_CRS, + ): + if (objectids is None and bbox is None) or ( + objectids is not None and bbox is not None + ): raise ValueError("Please provide a ID-filter OR a BBOX-filter, not both") super().__init__() self.objectids = objectids self.oetype = oetype self.bbox = bbox - self.limit= limit + self.limit = limit self.part = partition - self.crs=crs + self.crs = crs self.data_dict_source["source"] = "Onroerend Erfgoed" def load_data(self): - #geom_union = buffer_pos(self.aligner.get_thematic_union(), MAX_REFERENCE_BUFFER) - collection,id_property = get_collection_oe_objects( + # geom_union = buffer_pos(self.aligner.get_thematic_union(), MAX_REFERENCE_BUFFER) + collection, id_property = get_collection_oe_objects( oetype=self.oetype, objectids=self.objectids, bbox=self.bbox, partition=self.part, - limit = self.limit, - crs = self.crs + limit=self.limit, + crs=self.crs, ) self.id_property = id_property self.input = dict(collection) self.data_dict_source[VERSION_DATE] = datetime.now().strftime(DATE_FORMAT) LOGGER.debug(f"OnroerendErfgoed-objects downloaded") return super().load_data() - - diff --git a/brdr/utils.py b/brdr/utils.py index 01e1b2b..05b47a7 100644 --- a/brdr/utils.py +++ b/brdr/utils.py @@ -14,8 +14,13 @@ from shapely.geometry import shape from shapely.geometry.base import BaseGeometry -from brdr.constants import MULTI_SINGLE_ID_SEPARATOR, DEFAULT_CRS, DOWNLOAD_LIMIT, RELEVANT_DISTANCE_FIELD_NAME, \ - NR_CALCULATION_FIELD_NAME +from brdr.constants import ( + MULTI_SINGLE_ID_SEPARATOR, + DEFAULT_CRS, + DOWNLOAD_LIMIT, + RELEVANT_DISTANCE_FIELD_NAME, + NR_CALCULATION_FIELD_NAME, +) from brdr.enums import DiffMetric from brdr.geometry_utils import get_partitions, get_bbox from brdr.typings import ProcessResult @@ -200,7 +205,6 @@ def polygonize_reference_data(dict_ref): return dict_ref - def get_breakpoints_zerostreak(x, y): """ Determine the extremes and zero_streaks of a graph based on the derivative, and @@ -305,6 +309,7 @@ def _numerical_derivative(x, y): return derivative + def diffs_from_dict_series( dict_series: dict[str, dict[float, ProcessResult]], dict_thematic: dict[str, BaseGeometry], @@ -467,7 +472,9 @@ def get_collection_by_partition( else: geoms = get_partitions(geometry, partition) for g in geoms: - coll = get_collection(_add_bbox_to_url(url=url, crs=crs, bbox=get_bbox(g)), limit) + coll = get_collection( + _add_bbox_to_url(url=url, crs=crs, bbox=get_bbox(g)), limit + ) if collection == {}: collection = dict(coll) elif "features" in collection and "features" in coll: @@ -502,13 +509,15 @@ def merge_process_results( if id_theme_global not in grouped_results: grouped_results[id_theme_global] = dict_results else: - for rel_dist,process_result in dict_results.items(): + for rel_dist, process_result in dict_results.items(): for key in process_result: geom: BaseGeometry = process_result[key] # noqa if geom.is_empty or geom is None: continue - existing: BaseGeometry = grouped_results[id_theme_global][rel_dist][key] # noqa - grouped_results[id_theme_global][rel_dist][key] = unary_union( # noqa + existing: BaseGeometry = grouped_results[id_theme_global][rel_dist][ + key + ] # noqa + grouped_results[id_theme_global][rel_dist][key] = unary_union( [existing, geom] - ) - return grouped_results \ No newline at end of file + ) # noqa + return grouped_results diff --git a/examples/example_evaluate.py b/examples/example_evaluate.py index bfc991b..5b399c1 100644 --- a/examples/example_evaluate.py +++ b/examples/example_evaluate.py @@ -25,14 +25,14 @@ GRBFiscalParcelLoader(year=base_year, aligner=base_aligner) ) relevant_distance = 2 -base_process_result = base_aligner.process( - relevant_distance=relevant_distance -) +base_process_result = base_aligner.process(relevant_distance=relevant_distance) thematic_dict_formula = {} thematic_dict_result = {} for key in base_process_result: thematic_dict_result[key] = base_process_result[key][relevant_distance]["result"] - thematic_dict_formula[key]= {FORMULA_FIELD_NAME:base_aligner.get_brdr_formula(thematic_dict_result[key])} + thematic_dict_formula[key] = { + FORMULA_FIELD_NAME: base_aligner.get_brdr_formula(thematic_dict_result[key]) + } print(key + ": " + thematic_dict_result[key].wkt) print(key + ": " + str(thematic_dict_formula[key])) base_aligner_result = Aligner() @@ -51,17 +51,19 @@ print(key + ": " + value.wkt) actual_aligner = Aligner() loader = DictLoader(dict_affected) -actual_aligner.load_thematic_data(DictLoader(data_dict=dict_affected,data_dict_properties=thematic_dict_formula)) +actual_aligner.load_thematic_data( + DictLoader(data_dict=dict_affected, data_dict_properties=thematic_dict_formula) +) actual_aligner.load_reference_data( GRBActualLoader(grb_type=GRBType.ADP, partition=1000, aligner=actual_aligner) ) actual_aligner.relevant_distances = np.arange(0, 200, 10, dtype=int) / 100 dict_evaluated, prop_dictionary = actual_aligner.compare( - #thematic_dict_formula=thematic_dict_formula, - threshold_area=5, - threshold_percentage=1, - dict_unchanged=dict_unchanged, - ) + # thematic_dict_formula=thematic_dict_formula, + threshold_area=5, + threshold_percentage=1, + dict_unchanged=dict_unchanged, +) fc = get_series_geojson_dict( dict_evaluated, @@ -79,5 +81,3 @@ + ": " + feature["properties"][EVALUATION_FIELD_NAME] ) - - diff --git a/examples/example_evaluate_ao.py b/examples/example_evaluate_ao.py index 00253d9..6025736 100644 --- a/examples/example_evaluate_ao.py +++ b/examples/example_evaluate_ao.py @@ -22,14 +22,14 @@ GRBFiscalParcelLoader(year=base_year, aligner=base_aligner) ) relevant_distance = 3 -base_process_result = base_aligner.process( - relevant_distance=relevant_distance -) +base_process_result = base_aligner.process(relevant_distance=relevant_distance) thematic_dict_formula = {} thematic_dict_result = {} for key in base_process_result: thematic_dict_result[key] = base_process_result[key][relevant_distance]["result"] - thematic_dict_formula[key]= {FORMULA_FIELD_NAME:base_aligner.get_brdr_formula(thematic_dict_result[key])} + thematic_dict_formula[key] = { + FORMULA_FIELD_NAME: base_aligner.get_brdr_formula(thematic_dict_result[key]) + } base_aligner_result = Aligner() base_aligner_result.load_thematic_data(DictLoader(thematic_dict_result)) dict_affected, dict_unchanged = get_geoms_affected_by_grb_change( @@ -44,16 +44,18 @@ exit() actual_aligner = Aligner() -actual_aligner.load_thematic_data(DictLoader(data_dict=dict_affected,data_dict_properties=thematic_dict_formula)) +actual_aligner.load_thematic_data( + DictLoader(data_dict=dict_affected, data_dict_properties=thematic_dict_formula) +) loader = GRBActualLoader(grb_type=GRBType.ADP, partition=1000, aligner=actual_aligner) actual_aligner.load_reference_data(loader) series = np.arange(0, 300, 10, dtype=int) / 100 dict_evaluated, prop_dictionary = actual_aligner.compare( - threshold_area=5, - threshold_percentage=1, - dict_unchanged=dict_unchanged, - ) + threshold_area=5, + threshold_percentage=1, + dict_unchanged=dict_unchanged, +) fc = get_series_geojson_dict( dict_evaluated, diff --git a/examples/example_parcel_change_detector.py b/examples/example_parcel_change_detector.py index 72cd0f9..b419396 100644 --- a/examples/example_parcel_change_detector.py +++ b/examples/example_parcel_change_detector.py @@ -19,7 +19,7 @@ # ========= crs = "EPSG:31370" limit = 10000 -bbox = [172800,170900,173000,171100] +bbox = [172800, 170900, 173000, 171100] bbox = [172000, 172000, 174000, 174000] # bbox = "170000,170000,175000,174900" # bbox = "100000,195000,105000,195900" @@ -40,7 +40,7 @@ # base_year base_aligner = Aligner() # Load the thematic data to evaluate -loader = OnroerendErfgoedLoader(bbox=bbox,partition=0) +loader = OnroerendErfgoedLoader(bbox=bbox, partition=0) base_aligner.load_thematic_data(loader) logging.info( @@ -65,34 +65,35 @@ del base_aligner.dict_thematic[x] # # Align the features to the base-GRB -base_process_result = base_aligner.process( - relevant_distance=base_correction - ) -#get resulting aligned features on Adpfxxxx, with formula -processresults=base_aligner.get_results_as_geojson(formula=True) -if len(processresults)==0: +base_process_result = base_aligner.process(relevant_distance=base_correction) +# get resulting aligned features on Adpfxxxx, with formula +processresults = base_aligner.get_results_as_geojson(formula=True) +if len(processresults) == 0: print("empty processresults") exit() featurecollection_base_result = processresults["result"] # Update Featurecollection to actual version fcs = update_to_actual_grb( - featurecollection_base_result, base_aligner.name_thematic_id,max_distance_for_actualisation=max_distance_for_actualisation) + featurecollection_base_result, + base_aligner.name_thematic_id, + max_distance_for_actualisation=max_distance_for_actualisation, +) counter_equality = 0 counter_equality_by_alignment = 0 counter_difference = 0 for feature in fcs["result"]["features"]: - if EVALUATION_FIELD_NAME in feature["properties"].keys(): - ev = feature["properties"][EVALUATION_FIELD_NAME] - rd =feature["properties"][RELEVANT_DISTANCE_FIELD_NAME] - if ev.startswith("equal") and rd == 0: - counter_equality = counter_equality + 1 - elif ev.startswith("equal") and rd > 0: - counter_equality_by_alignment = counter_equality_by_alignment + 1 - else: - counter_difference = counter_difference + 1 + if EVALUATION_FIELD_NAME in feature["properties"].keys(): + ev = feature["properties"][EVALUATION_FIELD_NAME] + rd = feature["properties"][RELEVANT_DISTANCE_FIELD_NAME] + if ev.startswith("equal") and rd == 0: + counter_equality = counter_equality + 1 + elif ev.startswith("equal") and rd > 0: + counter_equality_by_alignment = counter_equality_by_alignment + 1 + else: + counter_difference = counter_difference + 1 print( "Features: " diff --git a/examples/example_predictor.py b/examples/example_predictor.py index 4344d30..e7d60ac 100644 --- a/examples/example_predictor.py +++ b/examples/example_predictor.py @@ -27,7 +27,9 @@ dict_series, dict_predictions, diffs = aligner.predictor( relevant_distances=series, od_strategy=4, threshold_overlap_percentage=50 ) - fcs = aligner.get_results_as_geojson(resulttype=AlignerResultType.PREDICTIONS,formula=False) + fcs = aligner.get_results_as_geojson( + resulttype=AlignerResultType.PREDICTIONS, formula=False + ) print(fcs["result"]) # for key in dict_predictions: # show_map( diff --git a/tests/test_grb.py b/tests/test_grb.py index 7e02dce..adc156f 100644 --- a/tests/test_grb.py +++ b/tests/test_grb.py @@ -10,7 +10,8 @@ get_last_version_date, is_grb_changed, get_geoms_affected_by_grb_change, - GRBSpecificDateParcelLoader, update_to_actual_grb, + GRBSpecificDateParcelLoader, + update_to_actual_grb, ) from brdr.loader import DictLoader @@ -169,8 +170,6 @@ def test_get_geoms_affected_by_grb_change_bulk(self): ) assert len(dict_affected.keys()) > 0 - - def test_grbspecificdateparcelloader(self): thematic_dict = { "theme_id_1": from_wkt( @@ -182,20 +181,60 @@ def test_grbspecificdateparcelloader(self): aligner.load_thematic_data(loader) loader = GRBSpecificDateParcelLoader(date="2023-01-03", aligner=aligner) aligner.load_reference_data(loader) - assert len (aligner.dict_reference.keys())==53 + assert len(aligner.dict_reference.keys()) == 53 loader = GRBSpecificDateParcelLoader(date="2023-08-03", aligner=aligner) aligner.load_reference_data(loader) - assert len (aligner.dict_reference.keys())==52 + assert len(aligner.dict_reference.keys()) == 52 def test_update_to_actual_grb(self): - #Create a featurecollection (aligned on 2022), to use for the 'update_to_actual_grb' + # Create a featurecollection (aligned on 2022), to use for the 'update_to_actual_grb' name_thematic_id = "theme_identifier" - featurecollection_base_result = {"crs": {"properties": {"name": "EPSG:31370"}, "type": "name"}, "features": [{"geometry": {"coordinates": [[[138541.4173, 194007.7292], [138539.3263, 193994.1382], [138529.3663, 193995.5664], [138522.0997, 193996.6084], [138514.9844, 193997.6287], [138505.8261, 193996.615], [138498.8406, 193996.4314], [138492.9442, 193996.2895], [138491.2246, 193996.2481], [138491.4111, 194004.8147], [138514.3685, 194005.1297], [138520.2585, 194004.5753], [138520.3946, 194005.5833], [138520.5426, 194009.732], [138541.4173, 194007.7292]]], "type": "Polygon"}, "properties": {"area": 503.67736346047064, "brdr_formula": "{\"alignment_date\": \"2024-09-19\", \"brdr_version\": \"0.2.1\", \"reference_source\": {\"source\": \"Adpf\", \"version_date\": \"2022-01-01\"}, \"full\": true, \"reference_features\": {\"12034A0181/00K000\": {\"full\": true, \"area\": 503.68, \"percentage\": 100, \"version_date\": \"2019-08-30\"}}, \"reference_od\": null, \"last_version_date\": \"2019-08-30\"}", "nr_calculations": 1, "perimeter": 125.74541473322422, "relevant_distance": 2, "shape_index": 0.24965468741597102, "theme_identifier": "206285"}, "type": "Feature"}], "type": "FeatureCollection"} - + featurecollection_base_result = { + "crs": {"properties": {"name": "EPSG:31370"}, "type": "name"}, + "features": [ + { + "geometry": { + "coordinates": [ + [ + [138541.4173, 194007.7292], + [138539.3263, 193994.1382], + [138529.3663, 193995.5664], + [138522.0997, 193996.6084], + [138514.9844, 193997.6287], + [138505.8261, 193996.615], + [138498.8406, 193996.4314], + [138492.9442, 193996.2895], + [138491.2246, 193996.2481], + [138491.4111, 194004.8147], + [138514.3685, 194005.1297], + [138520.2585, 194004.5753], + [138520.3946, 194005.5833], + [138520.5426, 194009.732], + [138541.4173, 194007.7292], + ] + ], + "type": "Polygon", + }, + "properties": { + "area": 503.67736346047064, + "brdr_formula": '{"alignment_date": "2024-09-19", "brdr_version": "0.2.1", "reference_source": {"source": "Adpf", "version_date": "2022-01-01"}, "full": true, "reference_features": {"12034A0181/00K000": {"full": true, "area": 503.68, "percentage": 100, "version_date": "2019-08-30"}}, "reference_od": null, "last_version_date": "2019-08-30"}', + "nr_calculations": 1, + "perimeter": 125.74541473322422, + "relevant_distance": 2, + "shape_index": 0.24965468741597102, + "theme_identifier": "206285", + }, + "type": "Feature", + } + ], + "type": "FeatureCollection", + } - #Update Featurecollection to actual version - featurecollection = update_to_actual_grb(featurecollection_base_result,name_thematic_id) - #Print results + # Update Featurecollection to actual version + featurecollection = update_to_actual_grb( + featurecollection_base_result, name_thematic_id + ) + # Print results for feature in featurecollection["result"]["features"]: - assert isinstance(feature["properties"][EVALUATION_FIELD_NAME],Evaluation) + assert isinstance(feature["properties"][EVALUATION_FIELD_NAME], Evaluation) diff --git a/tests/test_oe.py b/tests/test_oe.py index 82943fe..ed15706 100644 --- a/tests/test_oe.py +++ b/tests/test_oe.py @@ -6,27 +6,33 @@ class TestOE(unittest.TestCase): def test_onroerenderfgoedloader_by_aanduidid(self): - loader = OnroerendErfgoedLoader(objectids = [120288,10275],oetype=OEType.AO) - aligner=Aligner() + loader = OnroerendErfgoedLoader(objectids=[120288, 10275], oetype=OEType.AO) + aligner = Aligner() aligner.load_thematic_data(loader) - assert len (aligner.dict_thematic.keys())==2 + assert len(aligner.dict_thematic.keys()) == 2 def test_onroerenderfgoedloader_by_erfgoedid(self): - loader = OnroerendErfgoedLoader(objectids = [42549],oetype=OEType.EO) - aligner=Aligner() + loader = OnroerendErfgoedLoader(objectids=[42549], oetype=OEType.EO) + aligner = Aligner() aligner.load_thematic_data(loader) - assert len (aligner.dict_thematic.keys())==1 + assert len(aligner.dict_thematic.keys()) == 1 def test_onroerenderfgoedloader_by_bbox(self): - loader = OnroerendErfgoedLoader(bbox=[172000,172000,174000,174000], oetype=OEType.EO) + loader = OnroerendErfgoedLoader( + bbox=[172000, 172000, 174000, 174000], oetype=OEType.EO + ) aligner = Aligner() aligner.load_thematic_data(loader) - assert len(aligner.dict_thematic.keys()) >0 + assert len(aligner.dict_thematic.keys()) > 0 def test_onroerenderfgoedloader_by_bbox_and_objectid(self): with self.assertRaises(Exception) as context: - loader = OnroerendErfgoedLoader(objectids=[42549],bbox=[172000,172000,174000,174000], oetype=OEType.EO) + loader = OnroerendErfgoedLoader( + objectids=[42549], + bbox=[172000, 172000, 174000, 174000], + oetype=OEType.EO, + ) with self.assertRaises(Exception) as context: - loader = OnroerendErfgoedLoader(objectids=None,bbox=None, oetype=OEType.EO) + loader = OnroerendErfgoedLoader(objectids=None, bbox=None, oetype=OEType.EO) diff --git a/tests/test_utils.py b/tests/test_utils.py index 8c56abb..59a06da 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -8,6 +8,7 @@ from brdr.oe import get_oe_dict_by_ids from brdr.typings import ProcessResult from brdr.utils import diffs_from_dict_series + # from brdr.utils import filter_dict_by_key from brdr.utils import get_breakpoints_zerostreak from brdr.utils import get_collection From 1d6faf00a907f5297f960bc6ffc603227bc78f95 Mon Sep 17 00:00:00 2001 From: dieuska Date: Thu, 19 Sep 2024 20:11:43 +0200 Subject: [PATCH 32/35] updated requirement for python>=3.10 --- .github/workflows/python-package.yml | 2 +- CHANGES.md | 11 +++-------- pyproject.toml | 5 ++--- 3 files changed, 6 insertions(+), 12 deletions(-) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index 34a7698..c882a90 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -17,7 +17,7 @@ jobs: strategy: matrix: - python-version: [ "3.9", "3.10", "3.11", "3.12" ] + python-version: [ "3.10", "3.11", "3.12" ] steps: - uses: actions/checkout@v4 diff --git a/CHANGES.md b/CHANGES.md index 7248ff5..a2abe2a 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -20,7 +20,7 @@ # 0.3.0 -! Not Backwards compatable ! +! Not Backwards compatable! - Refactoring: - refactor the structure of the (internal) dicts: dict_series, dict_predicted. More logical and faster [#57] @@ -28,6 +28,7 @@ - removed deprecated loaders from codebase [#77] - simplify the core-functionalities of Aligner: process, predict, compare [#89] - cleanup unused functions [#81] + - requires python>=3.10 - Functionalities: - Add brdr-version to formula [#66] @@ -40,10 +41,4 @@ - Bugfixing: - adding a safe_equals-function to catch GEOsException bug [#71] - - - - - - - + diff --git a/pyproject.toml b/pyproject.toml index 56a919b..a6052cf 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -13,7 +13,7 @@ maintainers = [ { name = "Koen Van Daele", email = "koen.vandaele@vlaanderen.be" }, { name = "Vermeyen Maerten", email = "maarten.vermeyen@vlaanderen.be" } ] -requires-python = ">=3.9" +requires-python = ">=3.10" classifiers = [ "Development Status :: 3 - Alpha", "License :: OSI Approved :: MIT License", @@ -21,7 +21,6 @@ classifiers = [ "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", - "Programming Language :: Python :: 3.9", "Topic :: Scientific/Engineering :: GIS", ] dependencies = [ @@ -58,4 +57,4 @@ build-backend = "hatchling.build" packages = ["brdr"] [tool.black] -target-version = ['py39', 'py310', 'py311', 'py312'] +target-version = ['py310', 'py311', 'py312'] From cc844f62d276d440c22eb652d9c8bc87deccd83e Mon Sep 17 00:00:00 2001 From: dieuska Date: Thu, 19 Sep 2024 20:19:01 +0200 Subject: [PATCH 33/35] Revert "updated requirement for python>=3.10" This reverts commit 1d6faf00a907f5297f960bc6ffc603227bc78f95. --- .github/workflows/python-package.yml | 2 +- CHANGES.md | 11 ++++++++--- pyproject.toml | 5 +++-- 3 files changed, 12 insertions(+), 6 deletions(-) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index c882a90..34a7698 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -17,7 +17,7 @@ jobs: strategy: matrix: - python-version: [ "3.10", "3.11", "3.12" ] + python-version: [ "3.9", "3.10", "3.11", "3.12" ] steps: - uses: actions/checkout@v4 diff --git a/CHANGES.md b/CHANGES.md index a2abe2a..7248ff5 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -20,7 +20,7 @@ # 0.3.0 -! Not Backwards compatable! +! Not Backwards compatable ! - Refactoring: - refactor the structure of the (internal) dicts: dict_series, dict_predicted. More logical and faster [#57] @@ -28,7 +28,6 @@ - removed deprecated loaders from codebase [#77] - simplify the core-functionalities of Aligner: process, predict, compare [#89] - cleanup unused functions [#81] - - requires python>=3.10 - Functionalities: - Add brdr-version to formula [#66] @@ -41,4 +40,10 @@ - Bugfixing: - adding a safe_equals-function to catch GEOsException bug [#71] - + + + + + + + diff --git a/pyproject.toml b/pyproject.toml index a6052cf..56a919b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -13,7 +13,7 @@ maintainers = [ { name = "Koen Van Daele", email = "koen.vandaele@vlaanderen.be" }, { name = "Vermeyen Maerten", email = "maarten.vermeyen@vlaanderen.be" } ] -requires-python = ">=3.10" +requires-python = ">=3.9" classifiers = [ "Development Status :: 3 - Alpha", "License :: OSI Approved :: MIT License", @@ -21,6 +21,7 @@ classifiers = [ "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.9", "Topic :: Scientific/Engineering :: GIS", ] dependencies = [ @@ -57,4 +58,4 @@ build-backend = "hatchling.build" packages = ["brdr"] [tool.black] -target-version = ['py310', 'py311', 'py312'] +target-version = ['py39', 'py310', 'py311', 'py312'] From 7bad951efce37c6dae96ab03853b75418c71d5be Mon Sep 17 00:00:00 2001 From: dieuska Date: Thu, 19 Sep 2024 20:21:08 +0200 Subject: [PATCH 34/35] removed static methods --- brdr/aligner.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/brdr/aligner.py b/brdr/aligner.py index 06c70f6..6e79801 100644 --- a/brdr/aligner.py +++ b/brdr/aligner.py @@ -1203,7 +1203,7 @@ def _add_multi_polygons_from_geom_to_array(geom: BaseGeometry, array): return array -@staticmethod + def _calculate_geom_by_intersection_and_reference( geom_intersection: BaseGeometry, geom_reference: BaseGeometry, @@ -1311,7 +1311,7 @@ def _calculate_geom_by_intersection_and_reference( return geom, geom_relevant_intersection, geom_relevant_difference -@staticmethod + def _get_relevant_polygons_from_geom(geometry: BaseGeometry, buffer_distance: float): """ Get only the relevant parts (polygon) from a geometry. @@ -1336,7 +1336,7 @@ def _get_relevant_polygons_from_geom(geometry: BaseGeometry, buffer_distance: fl return make_valid(unary_union(array)) -@staticmethod + def _equal_geom_in_array(geom, geom_array): """ Check if a predicted geometry is equal to other predicted geometries in a list. @@ -1350,7 +1350,6 @@ def _equal_geom_in_array(geom, geom_array): return False -@staticmethod def _check_equality( base_formula, actual_formula, threshold_area=5, threshold_percentage=1 ): From 9d3b2dec2643c7e05732997e4acab5153e775516 Mon Sep 17 00:00:00 2001 From: dieuska Date: Thu, 19 Sep 2024 20:22:43 +0200 Subject: [PATCH 35/35] black reformatted --- brdr/aligner.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/brdr/aligner.py b/brdr/aligner.py index 6e79801..5ff3cfe 100644 --- a/brdr/aligner.py +++ b/brdr/aligner.py @@ -1203,7 +1203,6 @@ def _add_multi_polygons_from_geom_to_array(geom: BaseGeometry, array): return array - def _calculate_geom_by_intersection_and_reference( geom_intersection: BaseGeometry, geom_reference: BaseGeometry, @@ -1311,7 +1310,6 @@ def _calculate_geom_by_intersection_and_reference( return geom, geom_relevant_intersection, geom_relevant_difference - def _get_relevant_polygons_from_geom(geometry: BaseGeometry, buffer_distance: float): """ Get only the relevant parts (polygon) from a geometry. @@ -1336,7 +1334,6 @@ def _get_relevant_polygons_from_geom(geometry: BaseGeometry, buffer_distance: fl return make_valid(unary_union(array)) - def _equal_geom_in_array(geom, geom_array): """ Check if a predicted geometry is equal to other predicted geometries in a list.