diff --git a/examples/open_tiff_https.py b/examples/open_tiff_https.py new file mode 100644 index 0000000..889e08e --- /dev/null +++ b/examples/open_tiff_https.py @@ -0,0 +1,12 @@ +from xcube.core.store.store import new_data_store + + +url = "https://s3.eu-central-1.wasabisys.com/stac/odse/catalog.json" +store = new_data_store("stac", url=url) + +# open data without open_params +ds = store.open_data( + "lcv_blue_landsat.glad.ard/lcv_blue_landsat.glad.ard_1999.12.02..2000.03.20/lcv_blue_landsat.glad.ard_1999.12.02..2000.03.20.json", + asset_names=["blue_p50"], +) +print(ds) diff --git a/xcube_stac/store.py b/xcube_stac/store.py index 61225a8..e89b5a2 100644 --- a/xcube_stac/store.py +++ b/xcube_stac/store.py @@ -155,7 +155,7 @@ def get_open_data_params_schema( ) if opener_id is not None: if len(opener_ids) != 1 and opener_ids[0] != opener_id: - DataStoreError( + raise DataStoreError( f"The data ID {data_id} can be opened by the " f"data opener {opener_ids}, but 'opener_id' " f"is set to {opener_id}." @@ -210,7 +210,7 @@ def describe_data( None, ) else: - DataStoreError( + raise DataStoreError( "Either 'start_datetime' and 'end_datetime' or 'datetime' " "needs to be determine in the STAC item." ) @@ -403,7 +403,7 @@ def _is_datetime_in_range(self, item: pystac.Item, **open_params) -> bool: dt_data = self._convert_str2datetime(item.properties["datetime"]) return dt_start <= dt_data <= dt_end else: - DataStoreError( + raise DataStoreError( "Either 'start_datetime' and 'end_datetime' or 'datetime' " "needs to be determined in the STAC item." ) @@ -444,7 +444,7 @@ def _access_item(self, data_id: str) -> pystac.Item: preserve_dict=False, ) else: - DataStoreError(response.raise_for_status()) + raise DataStoreError(response.raise_for_status()) def _get_assets_from_item( self, @@ -585,13 +585,13 @@ def _decode_href(self, href: str) -> Tuple[str, str, str, str]: if root is not None: if re.search(AWS_REGEX_BUCKET_NAME, root) is None: - DataStoreError( + raise DataStoreError( f"Bucket name '{root}' extracted from the href {href} " "does not follow the AWS S3 bucket naming rules." ) if region_name is not None: if region_name not in AWS_REGION_NAMES: - DataStoreError( + raise DataStoreError( f"Region name '{region_name}' extracted from the " "href {href} is not supported by AWS S3" ) @@ -641,7 +641,7 @@ def _build_dataset( ds_asset = self._store_https.open_data( fs_path, opener_id=opener_id_asset[0], **open_params ) - if protocol == "s3": + elif protocol == "s3": if self._store_s3 is None: self._initialize_new_s3_data_store(root, region_name) else: @@ -662,6 +662,8 @@ def _build_dataset( ds_asset = self._store_s3.open_data( fs_path, opener_id=opener_id_asset[1], **open_params ) + else: + raise DataStoreError("Only 's3' and 'https' protocols are supported.") for varname, da in ds_asset.data_vars.items(): if len(ds_asset) == 1: