diff --git a/pygeoapi/Dockerfile b/pygeoapi/Dockerfile index 3622a4b..b4bfddd 100644 --- a/pygeoapi/Dockerfile +++ b/pygeoapi/Dockerfile @@ -1,25 +1,6 @@ -FROM geopython/pygeoapi:latest - -#Add data directory -RUN mkdir /data - -#last updated 2021-09-14 00:09:28 UTC-5 -ADD https://www.hydroshare.org/resource/4a22e88e689949afa1cf71ae009eaf1b/data/contents/merit_plus_simplify.gpkg /data/ - -#last updated 2021-09-14 00:019:50 UTC-5 -ADD https://www.hydroshare.org/resource/4a22e88e689949afa1cf71ae009eaf1b/data/contents/e_merit_cats.gpkg /data/ - -#last updated 2021-09-14 00:019:50 UTC-5 -ADD https://www.hydroshare.org/resource/4a22e88e689949afa1cf71ae009eaf1b/data/contents/w_merit_cats.gpkg /data/ +FROM webbben/pygeoapi-river-runner:latest #add requirements and mods COPY ./pygeoapi.config.yml /pygeoapi/local.config.yml COPY ./schemas.opengis.net /opt/schemas.opengis.net COPY ./pygeoapi-skin-dashboard /skin-dashboard - -#Add river runner plugin -COPY ./plugin.py /pygeoapi/pygeoapi/plugin.py -COPY ./flask_app.py /pygeoapi/pygeoapi/flask_app.py -COPY ./river_runner.py /pygeoapi/pygeoapi/process/river_runner.py -COPY ./sqlite.py /pygeoapi/pygeoapi/provider/sqlite.py -COPY ./map.html /pygeoapi/pygeoapi/templates/processes/map.html diff --git a/pygeoapi/Dockerfile_db b/pygeoapi/Dockerfile_db new file mode 100644 index 0000000..0002fa8 --- /dev/null +++ b/pygeoapi/Dockerfile_db @@ -0,0 +1,23 @@ +FROM ubuntu:20.04 AS ZIP + +#last updated 2021-09-22 00:09:28 UTC-5 +ADD https://prod-is-usgs-sb-prod-publish.s3.amazonaws.com/614a8864d34e0df5fb97572d/merit_plus_simplify.zip / + +#last updated 2021-09-14 00:019:50 UTC-5 +# ADD https://www.hydroshare.org/resource/4a22e88e689949afa1cf71ae009eaf1b/data/contents/e_merit_cats.gpkg /data/ + +#last updated 2021-09-14 00:019:50 UTC-5 +# ADD https://www.hydroshare.org/resource/4a22e88e689949afa1cf71ae009eaf1b/data/contents/w_merit_cats.gpkg /data/ + +RUN apt-get update \ + && apt-get install unzip \ + && unzip '/*.zip' + +FROM kartoza/postgis:latest + +# Add data directory +RUN mkdir /data/ + +COPY --from=ZIP /merit_plus_simplify.gpkg /data/merit_plus_simplify.gpkg + +COPY ./build.sh /docker-entrypoint-initdb.d/build.sh \ No newline at end of file diff --git a/pygeoapi/Dockerfile_gcp b/pygeoapi/Dockerfile_gcp index 6123c34..5eb13d2 100644 --- a/pygeoapi/Dockerfile_gcp +++ b/pygeoapi/Dockerfile_gcp @@ -1,25 +1,6 @@ -FROM geopython/pygeoapi:latest - -#Add data directory -RUN mkdir /data - -#last updated 2021-09-14 00:09:28 UTC-5 -ADD https://www.hydroshare.org/resource/4a22e88e689949afa1cf71ae009eaf1b/data/contents/merit_plus_simplify.gpkg /data/ - -#last updated 2021-09-14 00:019:50 UTC-5 -ADD https://www.hydroshare.org/resource/4a22e88e689949afa1cf71ae009eaf1b/data/contents/e_merit_cats.gpkg /data/ - -#last updated 2021-09-14 00:019:50 UTC-5 -ADD https://www.hydroshare.org/resource/4a22e88e689949afa1cf71ae009eaf1b/data/contents/w_merit_cats.gpkg /data/ +FROM webbben/pygeoapi-river-runner:latest #add requirements and mods COPY ./pygeoapi.config.gcp.yml /pygeoapi/local.config.yml COPY ./schemas.opengis.net /opt/schemas.opengis.net COPY ./pygeoapi-skin-dashboard /skin-dashboard - -#Add river runner plugin -COPY ./plugin.py /pygeoapi/pygeoapi/plugin.py -COPY ./flask_app.py /pygeoapi/pygeoapi/flask_app.py -COPY ./river_runner.py /pygeoapi/pygeoapi/process/river_runner.py -COPY ./sqlite.py /pygeoapi/pygeoapi/provider/sqlite.py -COPY ./map.html /pygeoapi/pygeoapi/templates/processes/map.html diff --git a/pygeoapi/build.sh b/pygeoapi/build.sh new file mode 100644 index 0000000..391b03a --- /dev/null +++ b/pygeoapi/build.sh @@ -0,0 +1,63 @@ +#!/bin/bash +# ================================================================= +# +# Authors: Just van den Broecke +# +# Copyright (c) 2019 Just van den Broecke +# +# Permission is hereby granted, free of charge, to any person +# obtaining a copy of this software and associated documentation +# files (the "Software"), to deal in the Software without +# restriction, including without limitation the rights to use, +# copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the +# Software is furnished to do so, subject to the following +# conditions: +# +# The above copyright notice and this permission notice shall be +# included in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +# OTHER DEALINGS IN THE SOFTWARE. +# +# ================================================================= +echo "START /build.sh" + +set +e +echo "Begining build" +# python3 /scripts/ogr2ogr.py \ +# -f PGDump -lco LAUNDER=NO -lco DROP_TABLE=OFF | gzip > /data/merit_.sql.gz \ +# /data/merit_plus_simplify.gpkg +while [ ! -f /data/merit_plus_simplify.gpkg ]; do echo 1; sleep 1; done +ogr2ogr \ + -f PostgreSQL \ + PG:"host='localhost' \ + user='${POSTGRES_USER}' \ + password='${POSTGRES_PASSWORD}' \ + dbname='${POSTGRES_DB}'" \ + /data/merit_plus_simplify.gpkg + +echo "Done" +# ogr2ogr \ +# -f PostgreSQL \ +# PG:"host='localhost' \ +# user='${POSTGRES_USER}' \ +# password='${POSTGRES_PASSWORD}' \ +# dbname='${POSTGRES_DB}'" \ +# /data/e_merit_cats.gpkg + +# ogr2ogr \ +# -f PostgreSQL \ +# PG:"host='localhost' \ +# user='${POSTGRES_USER}' \ +# password='${POSTGRES_PASSWORD}' \ +# dbname='${POSTGRES_DB}'" \ +# /data/w_merit_cats.gpkg + +# rm -rf /data/ diff --git a/pygeoapi/docker-compose.yml b/pygeoapi/docker-compose.yml index bd4c239..496b990 100644 --- a/pygeoapi/docker-compose.yml +++ b/pygeoapi/docker-compose.yml @@ -32,17 +32,33 @@ version: "3" services: pygeoapi: - image: geopython/pygeoapi:latest - ports: + build: + context: . + dockerfile: Dockerfile + restart: always + ports: - 5050:80 volumes: - ./pygeoapi.config.yml:/pygeoapi/local.config.yml - - ./merit_plus_simplify.gpkg:/data/merit_plus_simplify.gpkg - - ./e_merit_cats.gpkg:/data/e_merit_cats.gpkg - - ./w_merit_cats.gpkg:/data/w_merit_cats.gpkg - - ./plugin.py:/pygeoapi/pygeoapi/plugin.py - - ./flask_app.py:/pygeoapi/pygeoapi/flask_app.py - - ./river_runner.py:/pygeoapi/pygeoapi/process/river_runner.py - - ./sqlite.py:/pygeoapi/pygeoapi/provider/sqlite.py - - ./map.html:/pygeoapi/pygeoapi/templates/processes/map.html - ./schemas.opengis.net:/opt/schemas.opengis.net + + db: + build: + context: . + dockerfile: Dockerfile_db + restart: always + ports: + - 5432:5432 + environment: + POSTGRES_USER: root + POSTGRES_PASSWORD: password + POSTGRES_DB: merit + volumes: + - ./merit_plus_simplify.gpkg:/data/merit_plus_simplify.gpkg + - ./build.sh:/docker-entrypoint-initdb.d/build.sh + + adminer: + image: adminer + restart: always + ports: + - 8080:8080 diff --git a/pygeoapi/flask_app.py b/pygeoapi/flask_app.py deleted file mode 100644 index 12503d2..0000000 --- a/pygeoapi/flask_app.py +++ /dev/null @@ -1,450 +0,0 @@ -# ================================================================= -# -# Authors: Tom Kralidis -# Norman Barker -# -# Copyright (c) 2020 Tom Kralidis -# -# Permission is hereby granted, free of charge, to any person -# obtaining a copy of this software and associated documentation -# files (the "Software"), to deal in the Software without -# restriction, including without limitation the rights to use, -# copy, modify, merge, publish, distribute, sublicense, and/or sell -# copies of the Software, and to permit persons to whom the -# Software is furnished to do so, subject to the following -# conditions: -# -# The above copyright notice and this permission notice shall be -# included in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES -# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT -# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, -# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR -# OTHER DEALINGS IN THE SOFTWARE. -# -# ================================================================= - -""" Flask module providing the route paths to the api""" - -import os - -import click - -from flask import Flask, Blueprint, make_response, request, send_from_directory - -from pygeoapi.api import API -from pygeoapi.util import get_mimetype, render_j2_template, yaml_load - - -CONFIG = None - -if 'PYGEOAPI_CONFIG' not in os.environ: - raise RuntimeError('PYGEOAPI_CONFIG environment variable not set') - -with open(os.environ.get('PYGEOAPI_CONFIG'), encoding='utf8') as fh: - CONFIG = yaml_load(fh) - -STATIC_FOLDER = 'static' -if 'templates' in CONFIG['server']: - STATIC_FOLDER = CONFIG['server']['templates'].get('static', 'static') - -APP = Flask(__name__, static_folder=STATIC_FOLDER, static_url_path='/static') -APP.url_map.strict_slashes = False - -BLUEPRINT = Blueprint('pygeoapi', __name__, static_folder=STATIC_FOLDER) - -# CORS: optionally enable from config. -if CONFIG['server'].get('cors', False): - from flask_cors import CORS - CORS(APP) - -APP.config['JSONIFY_PRETTYPRINT_REGULAR'] = CONFIG['server'].get( - 'pretty_print', True) - -api_ = API(CONFIG) - -OGC_SCHEMAS_LOCATION = CONFIG['server'].get('ogc_schemas_location', None) - -if (OGC_SCHEMAS_LOCATION is not None and - not OGC_SCHEMAS_LOCATION.startswith('http')): - # serve the OGC schemas locally - - if not os.path.exists(OGC_SCHEMAS_LOCATION): - raise RuntimeError('OGC schemas misconfigured') - - @BLUEPRINT.route('/schemas/', methods=['GET']) - def schemas(path): - """ - Serve OGC schemas locally - - :param path: path of the OGC schema document - - :returns: HTTP response - """ - - full_filepath = os.path.join(OGC_SCHEMAS_LOCATION, path) - dirname_ = os.path.dirname(full_filepath) - basename_ = os.path.basename(full_filepath) - - # TODO: better sanitization? - path_ = dirname_.replace('..', '').replace('//', '') - return send_from_directory(path_, basename_, - mimetype=get_mimetype(basename_)) - - -def get_response(result: tuple): - """ - Creates a Flask Response object and updates matching headers. - - :param result: The result of the API call. - This should be a tuple of (headers, status, content). - - :returns: A Response instance. - """ - - headers, status, content = result - response = make_response(content, status) - - if headers: - response.headers = headers - return response - - -@BLUEPRINT.route('/') -def landing_page(): - """ - OGC API landing page endpoint - - :returns: HTTP response - """ - return get_response(api_.landing_page(request)) - - -@BLUEPRINT.route('/openapi') -def openapi(): - """ - OpenAPI endpoint - - :returns: HTTP response - """ - with open(os.environ.get('PYGEOAPI_OPENAPI'), encoding='utf8') as ff: - if os.environ.get('PYGEOAPI_OPENAPI').endswith(('.yaml', '.yml')): - openapi_ = yaml_load(ff) - else: # JSON file, do not transform - openapi_ = ff - - return get_response(api_.openapi(request, openapi_)) - - -@BLUEPRINT.route('/conformance') -def conformance(): - """ - OGC API conformance endpoint - - :returns: HTTP response - """ - return get_response(api_.conformance(request)) - - -@BLUEPRINT.route('/collections') -@BLUEPRINT.route('/collections/') -def collections(collection_id=None): - """ - OGC API collections endpoint - - :param collection_id: collection identifier - - :returns: HTTP response - """ - return get_response(api_.describe_collections(request, collection_id)) - - -@BLUEPRINT.route('/collections//queryables') -def collection_queryables(collection_id=None): - """ - OGC API collections querybles endpoint - - :param collection_id: collection identifier - - :returns: HTTP response - """ - return get_response(api_.get_collection_queryables(request, collection_id)) - - -@BLUEPRINT.route('/collections//items', methods=['GET', 'POST']) -@BLUEPRINT.route('/collections//items/') -def collection_items(collection_id, item_id=None): - """ - OGC API collections items endpoint - - :param collection_id: collection identifier - :param item_id: item identifier - - :returns: HTTP response - """ - if item_id is None: - if request.method == 'GET': # list items - return get_response( - api_.get_collection_items(request, collection_id)) - elif request.method == 'POST': # filter items - return get_response( - api_.post_collection_items(request, collection_id)) - - else: - return get_response( - api_.get_collection_item(request, collection_id, item_id)) - - -@BLUEPRINT.route('/collections//coverage') -def collection_coverage(collection_id): - """ - OGC API - Coverages coverage endpoint - - :param collection_id: collection identifier - - :returns: HTTP response - """ - return get_response(api_.get_collection_coverage(request, collection_id)) - - -@BLUEPRINT.route('/collections//coverage/domainset') -def collection_coverage_domainset(collection_id): - """ - OGC API - Coverages coverage domainset endpoint - - :param collection_id: collection identifier - - :returns: HTTP response - """ - return get_response(api_.get_collection_coverage_domainset( - request, collection_id)) - - -@BLUEPRINT.route('/collections//coverage/rangetype') -def collection_coverage_rangetype(collection_id): - """ - OGC API - Coverages coverage rangetype endpoint - - :param collection_id: collection identifier - - :returns: HTTP response - """ - return get_response(api_.get_collection_coverage_rangetype( - request, collection_id)) - - -@BLUEPRINT.route('/collections//tiles') -def get_collection_tiles(collection_id=None): - """ - OGC open api collections tiles access point - - :param collection_id: collection identifier - - :returns: HTTP response - """ - return get_response(api_.get_collection_tiles( - request, collection_id)) - - -@BLUEPRINT.route('/collections//tiles//metadata') # noqa -def get_collection_tiles_metadata(collection_id=None, tileMatrixSetId=None): - """ - OGC open api collection tiles service metadata - - :param collection_id: collection identifier - :param tileMatrixSetId: identifier of tile matrix set - - :returns: HTTP response - """ - return get_response(api_.get_collection_tiles_metadata( - request, collection_id, tileMatrixSetId)) - - -@BLUEPRINT.route('/collections//tiles/\ -///') -def get_collection_tiles_data(collection_id=None, tileMatrixSetId=None, - tileMatrix=None, tileRow=None, tileCol=None): - """ - OGC open api collection tiles service data - - :param collection_id: collection identifier - :param tileMatrixSetId: identifier of tile matrix set - :param tileMatrix: identifier of {z} matrix index - :param tileRow: identifier of {y} matrix index - :param tileCol: identifier of {x} matrix index - - :returns: HTTP response - """ - return get_response(api_.get_collection_tiles_data( - request, collection_id, tileMatrixSetId, tileMatrix, tileRow, tileCol)) - - -@BLUEPRINT.route('/processes') -@BLUEPRINT.route('/processes/') -def get_processes(process_id=None): - """ - OGC API - Processes description endpoint - - :param process_id: process identifier - - :returns: HTTP response - """ - return get_response(api_.describe_processes(request, process_id)) - - -@BLUEPRINT.route('/processes//map') -def get_processes_map(process_id=None): - """ - OGC API - Processes map endpoint - - :param process_id: process identifier - - :returns: HTTP response - """ - return render_j2_template(CONFIG, 'processes/map.html', {}, 'en-US') - - -@BLUEPRINT.route('/processes//jobs') -@BLUEPRINT.route('/processes//jobs/', - methods=['GET', 'DELETE']) -def get_process_jobs(process_id=None, job_id=None): - """ - OGC API - Processes jobs endpoint - - :param process_id: process identifier - :param job_id: job identifier - - :returns: HTTP response - """ - - if job_id is None: - return get_response(api_.get_process_jobs(request, process_id)) - else: - if request.method == 'DELETE': # dismiss job - return get_response(api_.delete_process_job(process_id, job_id)) - else: # Return status of a specific job - return get_response(api_.get_process_jobs( - request, process_id, job_id)) - - -@BLUEPRINT.route('/processes//execution', methods=['GET', 'POST']) -def execute_process_jobs(process_id): - """ - OGC API - Processes execution endpoint - - :param process_id: process identifier - - :returns: HTTP response - """ - if request.method == 'GET': - from json import dumps - request.data = dumps({'inputs': request.args}).encode('UTF-8') - - return get_response(api_.execute_process(request, process_id)) - - -@BLUEPRINT.route('/processes//jobs//results', - methods=['GET']) -def get_process_job_result(process_id=None, job_id=None): - """ - OGC API - Processes job result endpoint - - :param process_id: process identifier - :param job_id: job identifier - - :returns: HTTP response - """ - return get_response(api_.get_process_job_result( - request, process_id, job_id)) - - -@BLUEPRINT.route('/processes//jobs//results/', - methods=['GET']) -def get_process_job_result_resource(process_id, job_id, resource): - """ - OGC API - Processes job result resource endpoint - - :param process_id: process identifier - :param job_id: job identifier - :param resource: job resource - - :returns: HTTP response - """ - return get_response(api_.get_process_job_result_resource( - request, process_id, job_id, resource)) - - -@BLUEPRINT.route('/collections//position') -@BLUEPRINT.route('/collections//area') -@BLUEPRINT.route('/collections//cube') -@BLUEPRINT.route('/collections//trajectory') -@BLUEPRINT.route('/collections//corridor') -@BLUEPRINT.route('/collections//instances//position') # noqa -@BLUEPRINT.route('/collections//instances//area') -@BLUEPRINT.route('/collections//instances//cube') -@BLUEPRINT.route('/collections//instances//trajectory') # noqa -@BLUEPRINT.route('/collections//instances//corridor') # noqa -def get_collection_edr_query(collection_id, instance_id=None): - """ - OGC EDR API endpoints - - :param collection_id: collection identifier - :param instance_id: instance identifier - - :returns: HTTP response - """ - query_type = request.path.split('/')[-1] - return get_response(api_.get_collection_edr_query(request, collection_id, - instance_id, query_type)) - - -@BLUEPRINT.route('/stac') -def stac_catalog_root(): - """ - STAC root endpoint - - :returns: HTTP response - """ - return get_response(api_.get_stac_root(request)) - - -@BLUEPRINT.route('/stac/') -def stac_catalog_path(path): - """ - STAC path endpoint - - :param path: path - - :returns: HTTP response - """ - return get_response(api_.get_stac_path(request, path)) - - -APP.register_blueprint(BLUEPRINT) - - -@click.command() -@click.pass_context -@click.option('--debug', '-d', default=False, is_flag=True, help='debug') -def serve(ctx, server=None, debug=False): - """ - Serve pygeoapi via Flask. Runs pygeoapi - as a flask server. Not recommend for production. - - :param server: `string` of server type - :param debug: `bool` of whether to run in debug mode - - :returns: void - """ - - # setup_logger(CONFIG['logging']) - APP.run(debug=True, host=api_.config['server']['bind']['host'], - port=api_.config['server']['bind']['port']) - - -if __name__ == '__main__': # run locally, for testing - serve() diff --git a/pygeoapi/map.html b/pygeoapi/map.html deleted file mode 100644 index bb75bdf..0000000 --- a/pygeoapi/map.html +++ /dev/null @@ -1,83 +0,0 @@ -{% extends "_base.html" %} -{% block title %}{{ super() }} {{ data['title'] }} {% endblock %} - -{% block extrahead %} - - - - -{% endblock %} - -{% block body %} -
-
-
-
-
-
-
-
-{% endblock %} - -{% block extrafoot %} - - -{% endblock %} \ No newline at end of file diff --git a/pygeoapi/plugin.py b/pygeoapi/plugin.py deleted file mode 100644 index 9db51ab..0000000 --- a/pygeoapi/plugin.py +++ /dev/null @@ -1,113 +0,0 @@ -# ================================================================= -# -# Authors: Tom Kralidis -# -# Copyright (c) 2021 Tom Kralidis -# -# Permission is hereby granted, free of charge, to any person -# obtaining a copy of this software and associated documentation -# files (the "Software"), to deal in the Software without -# restriction, including without limitation the rights to use, -# copy, modify, merge, publish, distribute, sublicense, and/or sell -# copies of the Software, and to permit persons to whom the -# Software is furnished to do so, subject to the following -# conditions: -# -# The above copyright notice and this permission notice shall be -# included in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES -# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT -# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, -# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR -# OTHER DEALINGS IN THE SOFTWARE. -# -# ================================================================= -"""Plugin loader""" - -import importlib -import logging - -LOGGER = logging.getLogger(__name__) - -#: Loads provider plugins to be used by pygeoapi,\ -#: formatters and processes available -PLUGINS = { - 'provider': { - 'CSV': 'pygeoapi.provider.csv_.CSVProvider', - 'Elasticsearch': 'pygeoapi.provider.elasticsearch_.ElasticsearchProvider', # noqa - 'ElasticsearchCatalogue': 'pygeoapi.provider.elasticsearch_.ElasticsearchCatalogueProvider', # noqa - 'GeoJSON': 'pygeoapi.provider.geojson.GeoJSONProvider', - 'OGR': 'pygeoapi.provider.ogr.OGRProvider', - 'PostgreSQL': 'pygeoapi.provider.postgresql.PostgreSQLProvider', - 'SQLiteGPKG': 'pygeoapi.provider.sqlite.SQLiteGPKGProvider', - 'MongoDB': 'pygeoapi.provider.mongo.MongoProvider', - 'FileSystem': 'pygeoapi.provider.filesystem.FileSystemProvider', - 'rasterio': 'pygeoapi.provider.rasterio_.RasterioProvider', - 'xarray': 'pygeoapi.provider.xarray_.XarrayProvider', - 'MVT': 'pygeoapi.provider.mvt.MVTProvider', - 'TinyDBCatalogue': 'pygeoapi.provider.tinydb_.TinyDBCatalogueProvider', - 'SensorThings': 'pygeoapi.provider.sensorthings.SensorThingsProvider', - 'xarray-edr': 'pygeoapi.provider.xarray_edr.XarrayEDRProvider' - }, - 'formatter': { - 'CSV': 'pygeoapi.formatter.csv_.CSVFormatter' - }, - 'process': { - 'RiverRunner': 'pygeoapi.process.river_runner.RiverRunnerProcessor', - 'HelloWorld': 'pygeoapi.process.hello_world.HelloWorldProcessor' - }, - 'process_manager': { - 'Dummy': 'pygeoapi.process.manager.dummy.DummyManager', - 'TinyDB': 'pygeoapi.process.manager.tinydb_.TinyDBManager' - } -} - - -def load_plugin(plugin_type, plugin_def): - """ - loads plugin by name - - :param plugin_type: type of plugin (provider, formatter) - :param plugin_def: plugin definition - - :returns: plugin object - """ - - name = plugin_def['name'] - - if plugin_type not in PLUGINS.keys(): - msg = 'Plugin type {} not found'.format(plugin_type) - LOGGER.exception(msg) - raise InvalidPluginError(msg) - - plugin_list = PLUGINS[plugin_type] - - LOGGER.debug('Plugins: {}'.format(plugin_list)) - - if '.' not in name and name not in plugin_list.keys(): - msg = 'Plugin {} not found'.format(name) - LOGGER.exception(msg) - raise InvalidPluginError(msg) - - if '.' in name: # dotted path - packagename, classname = name.rsplit('.', 1) - else: # core formatter - packagename, classname = plugin_list[name].rsplit('.', 1) - - LOGGER.debug('package name: {}'.format(packagename)) - LOGGER.debug('class name: {}'.format(classname)) - - module = importlib.import_module(packagename) - class_ = getattr(module, classname) - plugin = class_(plugin_def) - - return plugin - - -class InvalidPluginError(Exception): - """Invalid plugin""" - pass diff --git a/pygeoapi/pygeoapi.config.gcp.yml b/pygeoapi/pygeoapi.config.gcp.yml index 3ff809b..c4b435e 100644 --- a/pygeoapi/pygeoapi.config.gcp.yml +++ b/pygeoapi/pygeoapi.config.gcp.yml @@ -100,12 +100,22 @@ resources: - rivers context: - schema: https://schema.org/ - nameID: schema:name + nameid: schema:name links: - type: application/html rel: canonical - title: data source - href: https://www.hydroshare.org/resource/4a22e88e689949afa1cf71ae009eaf1b/data/contents/merit_plus_simplify.gpkg + title: source + href: https://www.sciencebase.gov/catalog/item/614a8864d34e0df5fb97572d + hreflang: en-US + - type: application/html + rel: canonical + title: download + href: https://prod-is-usgs-sb-prod-publish.s3.amazonaws.com/614a8864d34e0df5fb97572d/merit_plus_simplify.zip + hreflang: en-US + - type: application/html + rel: canonical + title: information + href: https://www.usgs.gov/core-science-systems/ngp/national-hydrography/value-added-attributes-vaas hreflang: en-US extents: spatial: @@ -116,76 +126,136 @@ resources: end: null providers: - type: feature - name: SQLiteGPKG - data: /data/merit_plus_simplify.gpkg + name: PostgreSQL + data: + host: ${POSTGRES_HOST} + dbname: ${POSTGRES_DB} + user: ${POSTGRES_USER} + password: ${POSTGRES_PASSWORD} + search_path: [public] + geom_field: wkb_geometry id_field: comid table: merit_plus - eastcatchments: - type: collection - title: - en: Eastern Hemisphere Merit Catchments - description: - en: Simplified Eastern Hemisphere Merit Catchments + river-runner: + type: process + version: 0.1.0 + id: river-runner + title: + en: River Runner + description: + en: A process that takes a set of coordinates in the world, and returns the largest flowpath from it to its terminal flowpoint. keywords: en: - - catchments - context: - - schema: https://schema.org/ - COMID: schema:identifier + - rivers + - river-runner links: - type: application/html rel: canonical - title: data source - href: https://www.hydroshare.org/resource/4a22e88e689949afa1cf71ae009eaf1b/data/contents/e_merit_cats.gpkg + title: source + href: https://www.sciencebase.gov/catalog/item/614a8864d34e0df5fb97572d hreflang: en-US - extents: - spatial: - bbox: [-180,-90,180,90] - crs: http://www.opengis.net/def/crs/OGC/1.3/CRS84 - temporal: - begin: null - end: null - providers: - - type: feature - name: SQLiteGPKG - data: /data/e_merit_cats.gpkg - id_field: COMID - table: merit_cats - - westcatchments: - type: collection - title: - en: Western Hemisphere Merit Catchments - description: - en: Simplified Western Hemisphere Merit Catchments - keywords: - en: - - catchments - context: - - schema: https://schema.org/ - COMID: schema:identifier - links: - type: application/html - rel: canonical - title: data source - href: https://www.hydroshare.org/resource/4a22e88e689949afa1cf71ae009eaf1b/data/contents/w_merit_cats.gpkg + rel: cannonical + title: github + href: https://github.com/ksonda/global-river-runner + hreflang: en-US + - type: application/html + rel: cannonical + title: application + href: https://river-runner.samlearner.com/ hreflang: en-US - extents: - spatial: - bbox: [-180,-90,180,90] - crs: http://www.opengis.net/def/crs/OGC/1.3/CRS84 - temporal: - begin: null - end: null - providers: - - type: feature - name: SQLiteGPKG - data: /data/w_merit_cats.gpkg - id_field: COMID - table: merit_cats - - river-runner: - type: process processor: name: RiverRunner + inputs: + bbox: + title: + en: Bounding Box + description: + en: Boundary box to begin a river runner query from + keywords: + en: [box, coordinates] + schema: + type: object + default: [] + minOccurs: 0 + maxOccurs: 1 + metadata: null + lat: + title: + en: Latitude + description: + en: Latitude of a point + keywords: + en: [latitude, coordinate, eastwest] + schema: + type: number + default: null + minOccurs: 0 + maxOccurs: 1 + metadata: null + lng: + title: + en: Longitude + description: + en: Longitude of a point + keywords: + en: [longitude, coordinate, northsouth] + schema: + type: number + default: null + minOccurs: 0 + maxOccurs: 1 + metadata: null + latlng: + title: + en: Latitude and Longitude + description: + en: Coordinates in order [Long, Lat] + keywords: + en: [coordinates, world, point] + schema: + type: object + default: [] + minOccurs: 0 + maxOccurs: 1 + metadata: null + sorted: + title: + en: Sorted + description: + en: 'Sort features by flow direction' + keywords: + en: [downstream, upstream, unset] + schema: + type: string + default: downstream + minOccurs: 0 + maxOccurs: 1 + metadata: null + sortby: + title: + en: Sort By + description: + en: 'Property to sort featurs with' + keywords: + en: [sort, hydroseq, nameid, comid] + schema: + type: string + default: hydroseq + minOccurs: 0 + maxOccurs: 1 + metadata: null + outputs: + echo: + title: + en: Feature Collection + description: + en: A geoJSON Feature Collection of the River Runner process + schema: + type: object + contentMediaType: application/json + example: + inputs: + bbox: [-86.2, 39.7, -86.15, 39.75] + sorted: downstream diff --git a/pygeoapi/pygeoapi.config.yml b/pygeoapi/pygeoapi.config.yml index 72ac6ee..f5567d7 100644 --- a/pygeoapi/pygeoapi.config.yml +++ b/pygeoapi/pygeoapi.config.yml @@ -100,12 +100,22 @@ resources: - rivers context: - schema: https://schema.org/ - nameID: schema:name + nameid: schema:name links: - type: application/html rel: canonical - title: data source - href: https://www.hydroshare.org/resource/4a22e88e689949afa1cf71ae009eaf1b/data/contents/merit_plus_simplify.gpkg + title: source + href: https://www.sciencebase.gov/catalog/item/614a8864d34e0df5fb97572d + hreflang: en-US + - type: application/html + rel: canonical + title: download + href: https://prod-is-usgs-sb-prod-publish.s3.amazonaws.com/614a8864d34e0df5fb97572d/merit_plus_simplify.zip + hreflang: en-US + - type: application/html + rel: canonical + title: information + href: https://www.usgs.gov/core-science-systems/ngp/national-hydrography/value-added-attributes-vaas hreflang: en-US extents: spatial: @@ -116,77 +126,136 @@ resources: end: null providers: - type: feature - name: SQLiteGPKG - data: /data/merit_plus_simplify.gpkg + name: PostgreSQL + data: + host: ${POSTGRES_HOST} + dbname: ${POSTGRES_DB} + user: ${POSTGRES_USER} + password: ${POSTGRES_PASSWORD} + search_path: [public] + geom_field: wkb_geometry id_field: comid table: merit_plus - eastcatchments: - type: collection - title: - en: Eastern Hemisphere Merit Catchments - description: - en: Simplified Eastern Hemisphere Merit Catchments + river-runner: + type: process + version: 0.1.0 + id: river-runner + title: + en: River Runner + description: + en: A process that takes a set of coordinates in the world, and returns the largest flowpath from it to its terminal flowpoint. keywords: en: - - catchments - context: - - schema: https://schema.org/ - COMID: schema:identifier + - rivers + - river-runner links: - type: application/html rel: canonical - title: data source - href: https://www.hydroshare.org/resource/4a22e88e689949afa1cf71ae009eaf1b/data/contents/e_merit_cats.gpkg + title: source + href: https://www.sciencebase.gov/catalog/item/614a8864d34e0df5fb97572d hreflang: en-US - extents: - spatial: - bbox: [-180,-90,180,90] - crs: http://www.opengis.net/def/crs/OGC/1.3/CRS84 - temporal: - begin: null - end: null - providers: - - type: feature - name: SQLiteGPKG - data: /data/e_merit_cats.gpkg - id_field: COMID - table: merit_cats - - westcatchments: - type: collection - title: - en: Western Hemisphere Merit Catchments - description: - en: Simplified Western Hemisphere Merit Catchments - keywords: - en: - - catchments - context: - - schema: https://schema.org/ - COMID: schema:identifier - links: - type: application/html - rel: canonical - title: data source - href: https://www.hydroshare.org/resource/4a22e88e689949afa1cf71ae009eaf1b/data/contents/w_merit_cats.gpkg + rel: cannonical + title: github + href: https://github.com/ksonda/global-river-runner + hreflang: en-US + - type: application/html + rel: cannonical + title: application + href: https://river-runner.samlearner.com/ hreflang: en-US - extents: - spatial: - bbox: [-180,-90,180,90] - crs: http://www.opengis.net/def/crs/OGC/1.3/CRS84 - temporal: - begin: null - end: null - providers: - - type: feature - name: SQLiteGPKG - data: /data/w_merit_cats.gpkg - id_field: COMID - table: merit_cats - - river-runner: - type: process processor: name: RiverRunner - + inputs: + bbox: + title: + en: Bounding Box + description: + en: Boundary box to begin a river runner query from + keywords: + en: [box, coordinates] + schema: + type: object + default: [] + minOccurs: 0 + maxOccurs: 1 + metadata: null + lat: + title: + en: Latitude + description: + en: Latitude of a point + keywords: + en: [latitude, coordinate, eastwest] + schema: + type: number + default: null + minOccurs: 0 + maxOccurs: 1 + metadata: null + lng: + title: + en: Longitude + description: + en: Longitude of a point + keywords: + en: [longitude, coordinate, northsouth] + schema: + type: number + default: null + minOccurs: 0 + maxOccurs: 1 + metadata: null + latlng: + title: + en: Latitude and Longitude + description: + en: Coordinates in order [Long, Lat] + keywords: + en: [coordinates, world, point] + schema: + type: object + default: [] + minOccurs: 0 + maxOccurs: 1 + metadata: null + sorted: + title: + en: Sorted + description: + en: 'Sort features by flow direction' + keywords: + en: [downstream, upstream, unset] + schema: + type: string + default: downstream + minOccurs: 0 + maxOccurs: 1 + metadata: null + sortby: + title: + en: Sort By + description: + en: 'Property to sort featurs with' + keywords: + en: [sort, hydroseq, nameid, comid] + schema: + type: string + default: hydroseq + minOccurs: 0 + maxOccurs: 1 + metadata: null + outputs: + echo: + title: + en: Feature Collection + description: + en: A geoJSON Feature Collection of the River Runner process + schema: + type: object + contentMediaType: application/json + example: + inputs: + bbox: [-86.2, 39.7, -86.15, 39.75] + sorted: downstream diff --git a/pygeoapi/river_runner.py b/pygeoapi/river_runner.py deleted file mode 100644 index ac60c99..0000000 --- a/pygeoapi/river_runner.py +++ /dev/null @@ -1,235 +0,0 @@ -# ================================================================= -# -# Authors: Benjamin Webb -# -# Copyright (c) 2021 Benjamin Webb -# -# Permission is hereby granted, free of charge, to any person -# obtaining a copy of this software and associated documentation -# files (the "Software"), to deal in the Software without -# restriction, including without limitation the rights to use, -# copy, modify, merge, publish, distribute, sublicense, and/or sell -# copies of the Software, and to permit persons to whom the -# Software is furnished to do so, subject to the following -# conditions: -# -# The above copyright notice and this permission notice shall be -# included in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES -# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT -# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, -# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR -# OTHER DEALINGS IN THE SOFTWARE. -# -# ================================================================= - -import os -import logging - -from pygeoapi.util import yaml_load -from pygeoapi.plugin import load_plugin -from pygeoapi.process.base import BaseProcessor, ProcessorExecuteError - - -LOGGER = logging.getLogger(__name__) -CONFIG_ = '' - -with open(os.getenv('PYGEOAPI_CONFIG'), encoding='utf8') as fh: - CONFIG_ = yaml_load(fh) - -PROVIDER_DEF = CONFIG_['resources']['merit']['providers'][0] -P = 'properties' -#: Process metadata and description -PROCESS_METADATA = { - 'version': '0.1.0', - 'id': 'river-runner', - 'title': { - 'en': 'River Runner' - }, - 'description': { - 'en': 'A simple process that takes a lat/lng or bbox as input, ' - 'and returns the largest flowpath.' - }, - 'keywords': ['river runner', 'rivers'], - 'links': [{ - 'type': 'text/html', - 'rel': 'canonical', - 'title': 'information', - 'href': 'https://example.org/process', - 'hreflang': 'en-US' - }], - 'inputs': { - 'bbox': { - 'title': 'Boundary Box', - 'description': 'A set of four coordinates', - 'schema': { - 'type': 'object', - }, - 'minOccurs': 0, - 'maxOccurs': 1, - 'metadata': None, # TODO how to use? - 'keywords': ['coordinates', 'geography'] - }, - 'latlng': { - 'title': 'Latitude & Longitude', - 'description': 'A set of two coordinates', - 'schema': { - 'type': 'object', - }, - 'minOccurs': 0, - 'maxOccurs': 1, - 'metadata': None, # TODO how to use? - 'keywords': ['coordinates', 'latitude', 'longitude'] - }, - 'lat': { - 'title': 'Latitude', - 'description': 'Latitude of a point', - 'schema': { - 'type': 'number', - }, - 'minOccurs': 0, - 'maxOccurs': 1, - 'metadata': None, # TODO how to use? - 'keywords': ['coordinates', 'latitude'] - }, - 'lng': { - 'title': 'Longitude', - 'description': 'Longitude of a point', - 'schema': { - 'type': 'number', - }, - 'minOccurs': 0, - 'maxOccurs': 1, - 'metadata': None, # TODO how to use? - 'keywords': ['coordinates', 'longitude'] - } - }, - 'outputs': { - 'echo': { - 'title': 'Feature Collection', - 'description': 'A geoJSON Feature Collection of River Runner', - 'schema': { - 'type': 'Object', - 'contentMediaType': 'application/json' - } - } - }, - 'example': { - 'inputs': { - 'bbox': [-86.2, 39.7, -86.15, 39.75] - } - } -} - - -class RiverRunnerProcessor(BaseProcessor): - """River Runner Processor example""" - - def __init__(self, processor_def): - """ - Initialize object - - :param processor_def: provider definition - - :returns: pygeoapi.process.river_runner.RiverRunnerProcessor - """ - super().__init__(processor_def, PROCESS_METADATA) - - def execute(self, data): - mimetype = 'application/json' - outputs = { - 'id': 'echo', - 'code': 'success', - 'value': { - 'type': 'FeatureCollection', - 'features': [] - } - } - - if not data.get('bbox') and not data.get('latlng') and \ - (not data.get('lat') and not data.get('lng')): - raise ProcessorExecuteError(f'Invalid input: { {{data.items()}} }') - - for k, v in data.items(): - if isinstance(v, str): - data[k] = ','.join(v.split(',')).strip('()[]') - if k in ['latlng', 'bbox']: - data[k] = data[k].split(',') - - if data.get('bbox', data.get('latlng')): - bbox = data.get('bbox', data.get('latlng')) - else: - bbox = (data.get('lng'), data.get('lat')) - - bbox = bbox * 2 if len(bbox) == 2 else bbox - bbox = self._expand_bbox(bbox) - - p = load_plugin('provider', PROVIDER_DEF) - value = p.query(bbox=bbox) - if len(value['features']) < 1: - LOGGER.debug(f'No features in bbox {bbox}, expanding') - bbox = self._expand_bbox(bbox, e=0.5) - value = p.query(bbox=bbox) - - if len(value['features']) < 1: - LOGGER.debug('No features found') - return mimetype, outputs - - LOGGER.debug('fetching downstream features') - mh = self._compare(value, 'hydroseq', min) - levelpaths = [] - for i in (mh[P]['levelpathi'], - *mh[P]['down_levelpaths'].split(',')): - try: - i = int(float(i)) - levelpaths.append(str(i)) - except ValueError: - LOGGER.debug(f'No Downstem Rivers found {i}') - - d = p.query( - properties=[('levelpathi', i) for i in levelpaths], - limit=100000, comp='OR' - ) - - mins = {level: {} for level in levelpaths} - for f in d['features']: - key = str(f[P]['levelpathi']) - prev = mins[key].get(P, {}).get('hydroseq', None) - - if prev is None or \ - min(prev, f[P]['hydroseq']) != prev: - mins[key] = f - - trim = [(mh[P]['levelpathi'], mh[P]['hydroseq'])] - for k, v in mins.items(): - trim.append((v[P]['dnlevelpat'], v[P]['dnhydroseq'])) - - LOGGER.debug('keeping only mainstem flowpath') - outm = [] - for f in d['features']: - for t in trim: - if f[P]['levelpathi'] == t[0] and \ - f[P]['hydroseq'] <= t[1]: - outm.append(f) - - value.update({'features': outm}) - outputs.update({'value': value}) - return mimetype, outputs - - def _compare(self, fc, prop, dir): - val = fc['features'][0] - for f in fc['features']: - if dir(f[P][prop], val[P][prop]) != val[P][prop]: - val = f - return val - - def _expand_bbox(self, bbox, e=0.25): - return [float(b) + e if i < 2 else float(b) - e - for (i, b) in enumerate(bbox)] - - def __repr__(self): - return ' {}'.format(self.name) diff --git a/pygeoapi/sqlite.py b/pygeoapi/sqlite.py deleted file mode 100644 index e1334fa..0000000 --- a/pygeoapi/sqlite.py +++ /dev/null @@ -1,344 +0,0 @@ -# ================================================================= -# -# Authors: Jorge Samuel Mendes de Jesus -# Tom Kralidis -# Francesco Bartoli -# -# Copyright (c) 2018 Jorge Samuel Mendes de Jesus -# Copyright (c) 2021 Tom Kralidis -# Copyright (c) 2020 Francesco Bartoli -# -# Permission is hereby granted, free of charge, to any person -# obtaining a copy of this software and associated documentation -# files (the "Software"), to deal in the Software without -# restriction, including without limitation the rights to use, -# copy, modify, merge, publish, distribute, sublicense, and/or sell -# copies of the Software, and to permit persons to whom the -# Software is furnished to do so, subject to the following -# conditions: -# -# The above copyright notice and this permission notice shall be -# included in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES -# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT -# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, -# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR -# OTHER DEALINGS IN THE SOFTWARE. -# -# ================================================================= - -import sqlite3 -import logging -import os -import json -from pygeoapi.plugin import InvalidPluginError -from pygeoapi.provider.base import (BaseProvider, ProviderConnectionError, - ProviderItemNotFoundError) - -LOGGER = logging.getLogger(__name__) - - -SPATIALITE_EXTENSION = os.getenv('SPATIALITE_LIBRARY_PATH', - 'mod_spatialite.so') - - -class SQLiteGPKGProvider(BaseProvider): - """Generic provider for SQLITE and GPKG using sqlite3 module. - This module requires install of libsqlite3-mod-spatialite - TODO: DELETE, UPDATE, CREATE - """ - - def __init__(self, provider_def): - """ - SQLiteGPKGProvider Class constructor - - :param provider_def: provider definitions from yml pygeoapi-config. - data,id_field, name set in parent class - - :returns: pygeoapi.provider.base.SQLiteProvider - """ - super().__init__(provider_def) - - self.table = provider_def['table'] - self.application_id = None - self.geom_col = None - - LOGGER.debug('Setting SQLite properties:') - LOGGER.debug('Data source: {}'.format(self.data)) - LOGGER.debug('Name: {}'.format(self.name)) - LOGGER.debug('ID_field: {}'.format(self.id_field)) - LOGGER.debug('Table: {}'.format(self.table)) - - self.cursor = self.__load() - - LOGGER.debug('Got cursor from DB') - LOGGER.debug('Get available fields/properties') - - self.get_fields() - - def get_fields(self): - """ - Get fields from sqlite table (columns are field) - - :returns: dict of fields - """ - - if not self.fields: - results = self.cursor.execute( - 'PRAGMA table_info({})'.format(self.table)).fetchall() - for item in results: - self.fields[item['name']] = {'type': item['type']} - - return self.fields - - def __get_where_clauses(self, properties=[], bbox=[], - comp='AND', **kwargs): - """ - Generarates WHERE conditions to be implemented in query. - Private method mainly associated with query method. - - Method returns part of the SQL query, plus tupple to be used - in the sqlite query method - - :param properties: list of tuples (name, value) - :param bbox: bounding box [minx,miny,maxx,maxy] - - :returns: str, tuple - """ - - where_values = tuple() - where_clause = " WHERE " if (properties or bbox) else "" - if not where_clause: - return where_clause, where_values - - if properties: - where_clause += f" {comp} ".join( - ["{}=?".format(k) for k, v in properties]) - where_values += where_values + tuple((v for k, v in properties)) - - if bbox: - if properties: - where_clause += " AND " - where_clause += " Intersects({}, \ - BuildMbr(?,?,?,?)) ".format(self.geom_col) - where_values += tuple(bbox) - # WHERE continent=? : ('Europe',) - return where_clause, where_values - - def __response_feature(self, row_data): - """ - Assembles GeoJSON output from DB query - - :param row_data: DB row result - - :returns: `dict` of GeoJSON Feature - """ - - if row_data: - rd = dict(row_data) # sqlite3.Row is doesnt support pop - feature = { - 'type': 'Feature' - } - feature["geometry"] = json.loads( - rd.pop('AsGeoJSON({})'.format(self.geom_col)) - ) - feature['properties'] = rd - feature['id'] = feature['properties'].pop(self.id_field) - - return feature - else: - return None - - def __response_feature_hits(self, hits): - """Assembles GeoJSON/Feature number - - :returns: GeoJSON FeaturesCollection - """ - - feature_collection = {"features": [], - "type": "FeatureCollection"} - feature_collection['numberMatched'] = hits - - return feature_collection - - def __load(self): - """ - Private method for loading spatiallite, - get the table structure and dump geometry - - :returns: sqlite3.Cursor - """ - - if (os.path.exists(self.data)): - conn = sqlite3.connect(self.data) - else: - LOGGER.error('Path to sqlite does not exist') - raise InvalidPluginError() - - try: - conn.enable_load_extension(True) - except AttributeError as err: - LOGGER.error('Extension loading not enabled: {}'.format(err)) - raise ProviderConnectionError() - - conn.row_factory = sqlite3.Row - conn.enable_load_extension(True) - # conn.set_trace_callback(LOGGER.debug) - cursor = conn.cursor() - try: - cursor.execute("SELECT load_extension('{}')".format( - SPATIALITE_EXTENSION)) - except sqlite3.OperationalError as err: - LOGGER.error('Extension loading error: {}'.format(err)) - raise ProviderConnectionError() - result = cursor.fetchall() - - # Checking for geopackage - cursor.execute("PRAGMA application_id") - result = cursor.fetchone() - - self.application_id = result["application_id"] - if self.application_id == 1196444487: - LOGGER.info("Detected GPKG 1.2 and greater") - elif self.application_id == 1196437808: - LOGGER.info("Detected GPKG 1.0 or 1.1") - else: - LOGGER.info("No GPKG detected assuming spatial sqlite3") - self.application_id = 0 - - if self.application_id: - cursor.execute("SELECT AutoGPKGStart()") - result = cursor.fetchall() - if result[0][0] >= 1: - LOGGER.info("Loaded Geopackage support") - else: - LOGGER.info("SELECT AutoGPKGStart() returned 0." + - "Detected GPKG but couldn't load support") - raise InvalidPluginError - - if self.application_id: - self.geom_col = "geom" - else: - self.geom_col = "geometry" - - try: - cursor.execute('PRAGMA table_info({})'.format(self.table)) - result = cursor.fetchall() - except sqlite3.OperationalError: - LOGGER.error('Couldnt find table: {}'.format(self.table)) - raise ProviderConnectionError() - - try: - assert len(result), 'Table not found' - assert len([item for item in result - if self.id_field in item]), 'id_field not present' - - except AssertionError: - raise InvalidPluginError - - self.columns = [item[1] for item in result if item[1] - not in [self.geom_col, self.geom_col.upper()]] - self.columns = ','.join(self.columns)+',AsGeoJSON({})'.format( - self.geom_col) - - if self.application_id: - self.table = "vgpkg_{}".format(self.table) - - return cursor - - def query(self, startindex=0, limit=10, resulttype='results', - bbox=[], datetime_=None, properties=[], sortby=[], - select_properties=[], skip_geometry=False, q=None, **kwargs): - """ - Query SQLite/GPKG for all the content. - e,g: http://localhost:5000/collections/countries/items? - limit=5&startindex=2&resulttype=results&continent=Europe&admin=Albania&bbox=29.3373,-3.4099,29.3761,-3.3924 - http://localhost:5000/collections/countries/items?continent=Africa&bbox=29.3373,-3.4099,29.3761,-3.3924 - - :param startindex: starting record to return (default 0) - :param limit: number of records to return (default 10) - :param resulttype: return results or hit limit (default results) - :param bbox: bounding box [minx,miny,maxx,maxy] - :param datetime_: temporal (datestamp or extent) - :param properties: list of tuples (name, value) - :param sortby: list of dicts (property, order) - :param select_properties: list of property names - :param skip_geometry: bool of whether to skip geometry (default False) - :param q: full-text search term(s) - - :returns: GeoJSON FeaturesCollection - """ - LOGGER.debug('Querying SQLite/GPKG') - - where_clause, where_values = self.__get_where_clauses( - properties=properties, bbox=bbox, **kwargs) - - if resulttype == 'hits': - - sql_query = "SELECT COUNT(*) as hits FROM {} {} ".format( - self.table, where_clause) - - res = self.cursor.execute(sql_query, where_values) - - hits = res.fetchone()["hits"] - return self.__response_feature_hits(hits) - - sql_query = "SELECT DISTINCT {} from \ - {} {} limit ? offset ?".format( - self.columns, self.table, where_clause) - - end_index = startindex + limit - - LOGGER.debug('SQL Query: {}'.format(sql_query)) - LOGGER.debug('Start Index: {}'.format(startindex)) - LOGGER.debug('End Index: {}'.format(end_index)) - - row_data = self.cursor.execute( - sql_query, where_values + (limit, startindex)) - - feature_collection = { - 'type': 'FeatureCollection', - 'features': [] - } - - for rd in row_data: - feature_collection['features'].append( - self.__response_feature(rd)) - - return feature_collection - - def get(self, identifier, **kwargs): - """ - Query the provider for a specific - feature id e.g: /collections/countries/items/1 - - :param identifier: feature id - - :returns: GeoJSON FeaturesCollection - """ - - LOGGER.debug('Get item from SQLite/GPKG') - - sql_query = 'SELECT {} FROM \ - {} WHERE {}==?;'.format( - self.columns, self.table, self.id_field) - - LOGGER.debug('SQL Query: {}'.format(sql_query)) - LOGGER.debug('Identifier: {}'.format(identifier)) - - row_data = self.cursor.execute(sql_query, (identifier, )).fetchone() - - feature = self.__response_feature(row_data) - if feature: - return feature - else: - err = 'item {} not found'.format(identifier) - LOGGER.error(err) - raise ProviderItemNotFoundError(err) - - def __repr__(self): - return ' {}, {}'.format(self.data, self.table)