Skip to content

Commit

Permalink
Merge branch 'main' into widgetfactory-mulibuffer-point
Browse files Browse the repository at this point in the history
  • Loading branch information
hennie-k committed Sep 23, 2024
2 parents 30c94d8 + 7e79a8a commit b872130
Show file tree
Hide file tree
Showing 17 changed files with 937 additions and 186 deletions.
1 change: 0 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,6 @@ nix-result/
.direnv/

data
core
app.py
/geest.zip
.~lock.*
Expand Down
3 changes: 2 additions & 1 deletion geest/core/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,4 +6,5 @@
from .settings import setting, set_setting

from .default_settings import default_settings
#from .json_validator import JSONValidator

# from .json_validator import JSONValidator
36 changes: 27 additions & 9 deletions geest/core/generate_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ def load_spreadsheet(self):
"Use Polyline per Cell",
"Use Point per Cell",
"Analysis Mode", # New column
"Layer Required" # New column
"Layer Required", # New column
]
]

Expand All @@ -84,8 +84,16 @@ def parse_to_json(self):

# Prepare dimension data
dimension_id = self.create_id(dimension)
dimension_required = row["Dimension Required"] if not pd.isna(row["Dimension Required"]) else ""
default_dimension_analysis_weighting = row["Default Dimension Analysis Weighting"] if not pd.isna(row["Default Dimension Analysis Weighting"]) else ""
dimension_required = (
row["Dimension Required"]
if not pd.isna(row["Dimension Required"])
else ""
)
default_dimension_analysis_weighting = (
row["Default Dimension Analysis Weighting"]
if not pd.isna(row["Default Dimension Analysis Weighting"])
else ""
)

# If the Dimension doesn't exist yet, create it
if dimension not in dimension_map:
Expand All @@ -94,15 +102,21 @@ def parse_to_json(self):
"name": dimension,
"required": dimension_required,
"default_analysis_weighting": default_dimension_analysis_weighting,
"factors": []
"factors": [],
}
self.result["dimensions"].append(new_dimension)
dimension_map[dimension] = new_dimension

# Prepare factor data
factor_id = self.create_id(factor)
factor_required = row["Factor Required"] if not pd.isna(row["Factor Required"]) else ""
default_factor_dimension_weighting = row["Default Factor Dimension Weighting"] if not pd.isna(row["Default Factor Dimension Weighting"]) else ""
factor_required = (
row["Factor Required"] if not pd.isna(row["Factor Required"]) else ""
)
default_factor_dimension_weighting = (
row["Default Factor Dimension Weighting"]
if not pd.isna(row["Default Factor Dimension Weighting"])
else ""
)

# If the Factor doesn't exist in the current dimension, add it
factor_map = {f["name"]: f for f in dimension_map[dimension]["factors"]}
Expand All @@ -112,7 +126,7 @@ def parse_to_json(self):
"name": factor,
"required": factor_required,
"default_dimension_weighting": default_factor_dimension_weighting,
"layers": []
"layers": [],
}
dimension_map[dimension]["factors"].append(new_factor)
factor_map[factor] = new_factor
Expand Down Expand Up @@ -229,8 +243,12 @@ def parse_to_json(self):
if not pd.isna(row["Use Point per Cell"])
else ""
),
"Analysis Mode": row["Analysis Mode"] if not pd.isna(row["Analysis Mode"]) else "", # New column
"Layer Required": row["Layer Required"] if not pd.isna(row["Layer Required"]) else "" # New column
"Analysis Mode": (
row["Analysis Mode"] if not pd.isna(row["Analysis Mode"]) else ""
), # New column
"Layer Required": (
row["Layer Required"] if not pd.isna(row["Layer Required"]) else ""
), # New column
}

factor_map[factor]["layers"].append(layer_data)
Expand Down
76 changes: 76 additions & 0 deletions geest/core/generate_schema.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,76 @@
#!/usr/bin/env python

import json
import os


def infer_schema(data):
"""Infers the JSON schema from the given JSON data."""
if isinstance(data, dict):
properties = {}
for key, value in data.items():
properties[key] = infer_schema(value)
return {
"type": "object",
"properties": properties,
"required": list(data.keys()), # Mark all keys as required
}
elif isinstance(data, list):
if len(data) > 0:
# Assume the schema of the first element for list items
return {"type": "array", "items": infer_schema(data[0])}
else:
return {"type": "array", "items": {}}
elif isinstance(data, str):
return {"type": "string"}
elif isinstance(data, int):
return {"type": "integer"}
elif isinstance(data, float):
return {"type": "number"}
elif isinstance(data, bool):
return {"type": "boolean"}
elif data is None:
return {"type": "null"}
else:
return {"type": "string"}


def generate_schema_from_json(json_file, schema_file):
"""Generates a schema from a JSON file and writes it to a schema file."""
# Load the JSON file
with open(json_file, "r") as f:
data = json.load(f)

# Infer the schema
schema = {
"$schema": "http://json-schema.org/draft-07/schema#",
"type": "object",
"properties": {"dimensions": infer_schema(data["dimensions"])},
"required": ["dimensions"],
}

# Save the schema to the schema file
with open(schema_file, "w") as f:
json.dump(schema, f, indent=4)

print(f"Schema has been generated and saved to {schema_file}")


# Main function to generate the schema
def main():
# Set default paths
cwd = os.getcwd()
model_json_path = os.path.join(cwd, "geest", "resources", "model.json")
schema_json_path = os.path.join(cwd, "geest", "resources", "schema.json")

# Check if model.json exists
if not os.path.exists(model_json_path):
print(f"Error: {model_json_path} not found.")
return

# Generate schema from model.json
generate_schema_from_json(model_json_path, schema_json_path)


if __name__ == "__main__":
main()
6 changes: 4 additions & 2 deletions geest/core/json_validator.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
import jsonschema
from jsonschema import validate


class JSONValidator:
def __init__(self, json_schema_path, json_data_path):
"""
Expand All @@ -13,13 +14,13 @@ def __init__(self, json_schema_path, json_data_path):
self.json_data_path = json_data_path
self.json_schema = self.load_json(json_schema_path)
self.json_data = self.load_json(json_data_path)

def load_json(self, file_path):
"""
Load JSON from the given file path.
"""
try:
with open(file_path, 'r') as file:
with open(file_path, "r") as file:
return json.load(file)
except Exception as e:
print(f"Error loading JSON file: {file_path}")
Expand All @@ -38,6 +39,7 @@ def validate_json(self):
print("Validation error: The JSON document is invalid.")
print(f"Error details: {err.message}")


# Example usage:
# validator = JSONValidator('schema.json', 'model.json')
# validator.validate_json()
45 changes: 30 additions & 15 deletions geest/core/osm_data_downloader.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,22 @@
import xml.etree.ElementTree as ET
from qgis.core import (
QgsProject, QgsVectorLayer, QgsFeature, QgsGeometry,
QgsPointXY, QgsPolygon, QgsFields, QgsField, QgsCoordinateReferenceSystem,
QgsVectorFileWriter, QgsApplication, QgsBlockingNetworkRequest, QgsNetworkRequest
QgsProject,
QgsVectorLayer,
QgsFeature,
QgsGeometry,
QgsPointXY,
QgsPolygon,
QgsFields,
QgsField,
QgsCoordinateReferenceSystem,
QgsVectorFileWriter,
QgsApplication,
QgsBlockingNetworkRequest,
QgsNetworkRequest,
)
from qgis.PyQt.QtCore import QByteArray, QUrl, QObject, QVariant


# Please see https://gis.stackexchange.com/questions/343126/performing-sync-or-async-network-request-in-pyqgis
# for the QgsBlockingNetworkRequest class and QgsNetworkRequest class
# notes on when to use them
Expand All @@ -30,15 +41,15 @@ def send_query(self):

# Send the POST request using QgsBlockingNetworkRequest
blocking_request = QgsBlockingNetworkRequest()
reply = blocking_request.fetch(request, QByteArray(self.query.encode('utf-8')))
reply = blocking_request.fetch(request, QByteArray(self.query.encode("utf-8")))

# Check for errors in the reply
if reply.error():
print(f"Network Error: {reply.errorMessage()}")
return None
else:
# Return the response data
return reply.content().data().decode('utf-8')
return reply.content().data().decode("utf-8")

def download_line_data(self):
"""
Expand All @@ -62,13 +73,13 @@ def download_line_data(self):

# Iterate over the ways and extract coordinates
for way in root.findall(".//way"):
osm_id = way.get('id')
osm_id = way.get("id")
coords = []
for nd in way.findall("nd"):
ref = nd.get('ref')
ref = nd.get("ref")
node = root.find(f".//node[@id='{ref}']")
lat = float(node.get('lat'))
lon = float(node.get('lon'))
lat = float(node.get("lat"))
lon = float(node.get("lon"))
coords.append(QgsPointXY(lon, lat))

# Create a feature
Expand All @@ -81,7 +92,9 @@ def download_line_data(self):
QgsProject.instance().addMapLayer(layer)

# Save to a shapefile
QgsVectorFileWriter.writeAsVectorFormat(layer, self.output_path, "UTF-8", crs, "ESRI Shapefile")
QgsVectorFileWriter.writeAsVectorFormat(
layer, self.output_path, "UTF-8", crs, "ESRI Shapefile"
)
print(f"Line-based shapefile saved to {self.output_path}")

def download_polygon_data(self):
Expand All @@ -106,13 +119,13 @@ def download_polygon_data(self):

# Iterate over the ways and extract coordinates (forming polygons)
for way in root.findall(".//way"):
osm_id = way.get('id')
osm_id = way.get("id")
coords = []
for nd in way.findall("nd"):
ref = nd.get('ref')
ref = nd.get("ref")
node = root.find(f".//node[@id='{ref}']")
lat = float(node.get('lat'))
lon = float(node.get('lon'))
lat = float(node.get("lat"))
lon = float(node.get("lon"))
coords.append(QgsPointXY(lon, lat))

# Close the polygon (by connecting the first and last points)
Expand All @@ -129,5 +142,7 @@ def download_polygon_data(self):
QgsProject.instance().addMapLayer(layer)

# Save to a shapefile
QgsVectorFileWriter.writeAsVectorFormat(layer, self.output_path, "UTF-8", crs, "ESRI Shapefile")
QgsVectorFileWriter.writeAsVectorFormat(
layer, self.output_path, "UTF-8", crs, "ESRI Shapefile"
)
print(f"Polygon-based shapefile saved to {self.output_path}")
Loading

0 comments on commit b872130

Please sign in to comment.