diff --git a/CHANGES.md b/CHANGES.md index fc456f13d..9d9cd99e9 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,4 +1,11 @@ -## Version 2.0.0.dev24 (in development) +## Version 2.0.0.dev25 (in development) + +* Increased default time-out for data downloads from 10 to 90 seconds. Addresses (but not fixes) + [#835](https://github.com/CCI-Tools/cate/issues/835) +* Fixed installation problem with latest Miniconda 4.5.12 + [#831](https://github.com/CCI-Tools/cate/issues/831) + +## Version 2.0.0.dev24 * Loading SeaIce data throws a ValueError: The truth value of an array with more than one element is ambiguous. [#832](https://github.com/CCI-Tools/cate/issues/832) diff --git a/cate/core/ds.py b/cate/core/ds.py index e8e97e571..6ed8a6ea4 100644 --- a/cate/core/ds.py +++ b/cate/core/ds.py @@ -81,10 +81,11 @@ import datetime import glob import itertools +import logging import re from abc import ABCMeta, abstractmethod from enum import Enum -from typing import Sequence, Optional, Union, Any, Dict, Set +from typing import Sequence, Optional, Union, Any, Dict, Set, List import xarray as xr @@ -105,6 +106,8 @@ r'(?::\d+)?' # optional port r'(?:/?|[/?]\S+)$', re.IGNORECASE) +_LOG = logging.getLogger('cate') + class DataAccessWarning(UserWarning): """ @@ -314,23 +317,25 @@ def _repr_html_(self): def _cannot_access_error(self, time_range=None, region=None, var_names=None, verb="open", cause: BaseException = None, error_cls=DataAccessError): error_message = f'Failed to {verb} data source "{self.id}"' - contraints = [] + constraints = [] if time_range is not None and time_range != "": - contraints.append("time range") + constraints.append("time range") if region is not None and region != "": - contraints.append("region") + constraints.append("region") if var_names is not None and var_names != "": - contraints.append("variable names") - if contraints: - error_message += " for given " + ", ".join(contraints) + constraints.append("variable names") + if constraints: + error_message += " for given " + ", ".join(constraints) if cause is not None: error_message += f": {cause}" + _LOG.info(error_message) return error_cls(error_message) def _empty_error(self, time_range=None): error_message = f'Data source "{self.id}" does not seem to have any datasets' if time_range is not None: error_message += f' in given time range {TimeRangeLike.format(time_range)}' + _LOG.info(error_message) return DataAccessError(error_message) @@ -348,6 +353,56 @@ class DataSourceStatus(Enum): CANCELLED = "CANCELLED" +class DataStoreNotice: + """ + A short notice that can be exposed to users by data stores. + """ + + def __init__(self, id: str, title: str, content: str, intent: str = None, icon: str = None): + """ + A short notice that can be exposed to users by data stores. + + :param id: Notice ID. + :param title: A human-readable, plain text title. + :param content: A human-readable, plain text title that may be formatted using Markdown. + :param intent: Notice intent, may be one of "default", "primary", "success", "warning", "danger" + :param icon: An option icon name. See https://blueprintjs.com/docs/versions/1/#core/icons + """ + if id is None or id == "": + raise ValueError("invalid id") + if title is None or title == "": + raise ValueError("invalid title") + if content is None or content == "": + raise ValueError("invalid content") + if intent not in {None, "default", "primary", "success", "warning", "danger"}: + raise ValueError("invalid intent") + + self._dict = dict(id=id, title=title, content=content, icon=icon, intent=intent) + + @property + def id(self): + return self._dict["id"] + + @property + def title(self): + return self._dict["title"] + + @property + def content(self): + return self._dict["content"] + + @property + def intent(self): + return self._dict["intent"] + + @property + def icon(self): + return self._dict["icon"] + + def to_dict(self): + return dict(self._dict) + + class DataStore(metaclass=ABCMeta): """ Represents a data store of data sources. @@ -375,6 +430,23 @@ def title(self) -> str: """ return self._title + @property + def description(self) -> Optional[str]: + """ + Return an optional, human-readable description for this data store as plain text. + + The text may use Markdown formatting. + """ + return None + + @property + def notices(self) -> List[DataStoreNotice]: + """ + Return an optional list of notices for this data store that can be used to inform users about the + conventions, standards, and data extent used in this data store or upcoming service outages. + """ + return [] + @property def is_local(self) -> bool: """ @@ -612,7 +684,7 @@ def open_xarray_dataset(paths, var_names: VarNamesLike.TYPE = None, monitor: Monitor = Monitor.NONE, **kwargs) -> xr.Dataset: - """ + r""" Open multiple files as a single dataset. This uses dask. If each individual file of the dataset is small, one Dask chunk will coincide with one temporal slice, e.g. the whole array in the file. Otherwise smaller dask chunks will be used diff --git a/cate/core/op.py b/cate/core/op.py index afb292b10..46e373d23 100644 --- a/cate/core/op.py +++ b/cate/core/op.py @@ -681,7 +681,7 @@ def new_subprocess_op(op_meta_info: OpMetaInfo, started: Union[str, Callable] = None, progress: Union[str, Callable] = None, done: Union[str, Callable] = None) -> Operation: - """ + r""" Create an operation for a child program run in a new process. :param op_meta_info: Meta-information about the resulting operation and the operation's inputs and outputs. diff --git a/cate/core/workflow.py b/cate/core/workflow.py index 3fdfcbfab..1db42e171 100644 --- a/cate/core/workflow.py +++ b/cate/core/workflow.py @@ -1092,7 +1092,7 @@ def __repr__(self): class SubProcessStep(OpStepBase): - """ + r""" A ``SubProcessStep`` is a step node that computes its output by a sub-process created from the given *program*. diff --git a/cate/ds/esa_cci_odp.py b/cate/ds/esa_cci_odp.py index 4b1ad46ff..4fefb80e4 100644 --- a/cate/ds/esa_cci_odp.py +++ b/cate/ds/esa_cci_odp.py @@ -38,6 +38,7 @@ ========== """ import json +import logging import os import re import socket @@ -47,7 +48,7 @@ from collections import OrderedDict from datetime import datetime, timedelta from math import ceil -from typing import Sequence, Tuple, Optional, Any, Dict +from typing import Sequence, Tuple, Optional, Any, Dict, List from urllib.error import URLError, HTTPError import pandas as pd @@ -58,7 +59,7 @@ from cate.conf import get_config_value, get_data_stores_path from cate.conf.defaults import NETCDF_COMPRESSION_LEVEL from cate.core.ds import DATA_STORE_REGISTRY, DataAccessError, NetworkError, DataStore, DataSource, Schema, \ - open_xarray_dataset + open_xarray_dataset, DataStoreNotice from cate.core.opimpl import subset_spatial_impl, normalize_impl, adjust_spatial_attrs_impl from cate.core.types import PolygonLike, TimeLike, TimeRange, TimeRangeLike, VarNamesLike from cate.ds.local import add_to_data_store_registry, LocalDataSource, LocalDataStore @@ -105,8 +106,10 @@ _CSW_METADATA_CACHE_FILE = 'catalogue_metadata.xml' _CSW_CACHE_FILE = 'catalogue.xml' +_LOG = logging.getLogger('cate') + # by default there is no timeout -socket.setdefaulttimeout(10) +socket.setdefaulttimeout(90) def get_data_store_path(): @@ -332,6 +335,68 @@ def __init__(self, self._csw_data = None + @property + def description(self) -> Optional[str]: + """ + Return a human-readable description for this data store as plain text. + + The text may use Markdown formatting. + """ + return ("This data store represents the [ESA CCI Open Data Portal](http://cci.esa.int/data)" + " in the CCI Toolbox.\n" + "It currently provides all CCI data that are published through the " + "[ESGF-CEDA services](https://esgf-index1.ceda.ac.uk/search/esacci-ceda/) " + "(gridded data stored as NetCDF files). " + "The store will be extended shortly to also provide TIFF and Shapefile Data, see usage " + "notes.\n" + "Remote data downloaded to your computer is made available through the *Local Data Store*.") + + @property + def notices(self) -> Optional[List[DataStoreNotice]]: + """ + Return an optional list of notices for this data store that can be used to inform users about the + conventions, standards, and data extent used in this data store or upcoming service outages. + """ + return [ + DataStoreNotice("terminologyClarification", + "Terminology Clarification", + "The ESA CCI Open Data Portal (ODP) utilises an " + "[ontology](http://vocab-test.ceda.ac.uk/ontology/cci/cci-content/index.html) whose terms " + "might slightly differ from the ones used in this software." + "\n" + "For example, a *Dataset* in the CCI terminology may refer to all data products " + "generated by a certain CCI project using a specific configuration of algorithms " + "and auxiliary data." + "\n" + "In this software, a *Data Source* refers to a subset (a file set) " + "of a given ODP dataset whose data share a common spatio-temporal grid and/or share " + "other common properties, e.g. the instrument used for the original measurements." + "\n" + "In addition, Cate uses the term *Dataset* to represent in-memory " + "instances of gridded data sources or subsets of them.", + intent="primary", + icon="info-sign"), + DataStoreNotice("dataCompleteness", + "Data Completeness", + "This data store currently provides **only a subset of all datasets** provided by the " + "ESA CCI Open Data Portal (ODP), namely gridded datasets originally stored in NetCDF " + "format." + "\n" + "In upcoming versions of Cate, the ODP data store will also allow for browsing " + "and accessing the remaining ODP datasets. This includes gridded data in TIFF format and " + "also vector data using ESRI Shapefile format." + "\n" + "For time being users can download the missing vector data from the " + "[ODP FTP server](http://cci.esa.int/data#ftp) `ftp://anon-ftp.ceda.ac.uk/neodc/esacci/` " + "and then use operation `read_geo_data_frame()` in Cate to read the " + "downloaded Shapefiles:" + "\n" + "* CCI Glaciers in FTP directory `glaciers`\n" + "* CCI Ice Sheets in FTP directories `ice_sheets_antarctica` and `ice_sheets_greenland`\n", + intent="warning", + icon="warning-sign"), + ] + @property def index_cache_used(self): return self._index_cache_used @@ -857,8 +922,10 @@ def _make_local(self, child_monitor.progress(work=20) if var_names: - remote_dataset = remote_dataset.drop([var_name for var_name in remote_dataset.data_vars.keys() - if var_name not in var_names]) + remote_dataset = remote_dataset.drop( + [var_name for var_name in remote_dataset.data_vars.keys() + if var_name not in var_names] + ) if region: remote_dataset = normalize_impl(remote_dataset) remote_dataset = subset_spatial_impl(remote_dataset, region) @@ -927,7 +994,9 @@ def reporthook(block_number, read_size, total_file_size): sub_monitor_msg = "file %d of %d" % (file_number, len(outdated_file_list)) with child_monitor.starting(sub_monitor_msg, file_size): - urllib.request.urlretrieve(url[protocol], filename=dataset_file, reporthook=reporthook) + actual_url = url[protocol] + _LOG.info(f"Downloading {actual_url} to {dataset_file}") + urllib.request.urlretrieve(actual_url, filename=dataset_file, reporthook=reporthook) file_number += 1 local_ds.add_dataset(os.path.join(local_id, filename), (coverage_from, coverage_to)) diff --git a/cate/ds/local.py b/cate/ds/local.py index 4aaa04e8c..02c94748a 100644 --- a/cate/ds/local.py +++ b/cate/ds/local.py @@ -47,7 +47,7 @@ from collections import OrderedDict from datetime import datetime from glob import glob -from typing import Optional, Sequence, Union, Any, Tuple +from typing import Optional, Sequence, Union, Any, Tuple, List from urllib.error import URLError, HTTPError import psutil @@ -55,10 +55,10 @@ import xarray as xr from dateutil import parser -from cate.conf import get_config_value, get_data_stores_path +from cate.conf import get_config_value, get_data_stores_path, GLOBAL_CONF_FILE from cate.conf.defaults import NETCDF_COMPRESSION_LEVEL from cate.core.ds import DATA_STORE_REGISTRY, DataAccessError, NetworkError, DataAccessWarning, DataSourceStatus, \ - DataStore, DataSource, open_xarray_dataset + DataStore, DataSource, open_xarray_dataset, DataStoreNotice from cate.core.opimpl import subset_spatial_impl, normalize_impl, adjust_spatial_attrs_impl from cate.core.types import PolygonLike, TimeRange, TimeRangeLike, VarNames, VarNamesLike, ValidationError from cate.util.monitor import Monitor @@ -572,6 +572,38 @@ def __init__(self, ds_id: str, store_dir: str): self._store_dir = store_dir self._data_sources = None + @property + def description(self) -> Optional[str]: + """ + Return a human-readable description for this data store as plain text. + + The text may use Markdown formatting. + """ + return ("The local data store represents " + "all the data sources in your local file system known by Cate. " + "It contains any downloaded remote data sources or files in your file system " + "manually added.") + + @property + def notices(self) -> Optional[List[DataStoreNotice]]: + """ + Return an optional list of notices for this data store that can be used to inform users about the + conventions, standards, and data extent used in this data store or upcoming service outages. + """ + return [ + DataStoreNotice("localDataStorage", + "Local Data Storage", + "The local data store is currently configured to synchronize remote data in the " + f"`{get_data_stores_path()}`.\n" + "You can change this location either " + f"in Cate's configuration file `{GLOBAL_CONF_FILE}` " + "or in the user preference settings of Cate Desktop.\n" + "In order to keep your data, move your old directory to the new location, before " + "changing the location.", + intent="primary", + icon="info-sign"), + ] + def add_pattern(self, data_source_id: str, files: Union[str, Sequence[str]] = None) -> 'DataSource': data_source = self.create_data_source(data_source_id) if isinstance(files, str) and len(files) > 0: diff --git a/cate/ops/animate.py b/cate/ops/animate.py index 0558858dc..70e579b0d 100644 --- a/cate/ops/animate.py +++ b/cate/ops/animate.py @@ -44,11 +44,13 @@ Supported formats: html """ + import os # import matplotlib # noinspection PyBroadException # try: +# import matplotlib # matplotlib.use('Qt5Agg') # has_qt5agg = True # except Exception: diff --git a/cate/util/process.py b/cate/util/process.py index 16bc2fc78..292e49bfb 100644 --- a/cate/util/process.py +++ b/cate/util/process.py @@ -138,7 +138,7 @@ def _cancel(process: subprocess.Popen, kill_on_cancel: bool): class ProcessOutputMonitor: - """ + r""" A stdout handler for :py:func:`execute` the delegates extracted progress information to a monitor. Information is extracted using regular expressions or a callable that extracts the information. diff --git a/cate/version.py b/cate/version.py index 5e61a2b4f..22c387860 100644 --- a/cate/version.py +++ b/cate/version.py @@ -21,7 +21,7 @@ # SOFTWARE. # Cate version string (PEP440-compatible), e.g. "0.8.0", "0.8.0.dev1", "0.8.0rc1", "0.8.0rc1.dev1" -__version__ = '2.0.0.dev24' +__version__ = '2.0.0.dev25' # Other package metainfo __title__ = 'cate' diff --git a/cate/webapi/websocket.py b/cate/webapi/websocket.py index 82841c23c..39b120a5f 100644 --- a/cate/webapi/websocket.py +++ b/cate/webapi/websocket.py @@ -30,8 +30,8 @@ from cate.core.op import OP_REGISTRY from cate.core.workspace import OpKwArgs from cate.core.wsmanag import WorkspaceManager -from cate.util.monitor import Monitor from cate.util.misc import cwd, filter_fileset +from cate.util.monitor import Monitor from cate.util.sround import sround_range __author__ = "Norman Fomferra (Brockmann Consult GmbH), " \ @@ -104,7 +104,7 @@ def set_config(self, config: dict) -> None: with open(GLOBAL_CONF_FILE, 'w') as fp: fp.write(conf_text) - def get_data_stores(self) -> list: + def get_data_stores(self) -> List[Dict[str, Any]]: """ Get registered data stores. @@ -113,9 +113,11 @@ def get_data_stores(self) -> list: data_stores = sorted(DATA_STORE_REGISTRY.get_data_stores(), key=lambda ds: ds.title or ds.id) return [dict(id=data_store.id, title=data_store.title, - isLocal=data_store.is_local) for data_store in data_stores] + isLocal=data_store.is_local, + description=data_store.description, + notices=[notice.to_dict() for notice in data_store.notices]) for data_store in data_stores] - def get_data_sources(self, data_store_id: str, monitor: Monitor) -> list: + def get_data_sources(self, data_store_id: str, monitor: Monitor) -> List[Dict[str, Any]]: """ Get data sources for a given data store. @@ -143,7 +145,8 @@ def get_data_sources(self, data_store_id: str, monitor: Monitor) -> list: title=data_source.title, meta_info=data_source.meta_info) for data_source in data_sources] - def get_data_source_temporal_coverage(self, data_store_id: str, data_source_id: str, monitor: Monitor) -> dict: + def get_data_source_temporal_coverage(self, data_store_id: str, data_source_id: str, monitor: Monitor) \ + -> Dict[str, Any]: """ Get the temporal coverage of the data source. diff --git a/environment.yml b/environment.yml index a5dd5b65e..766948467 100644 --- a/environment.yml +++ b/environment.yml @@ -3,26 +3,25 @@ channels: - conda-forge - defaults dependencies: - - python=3.7.1 + - python=3.7 + - conda=4.5 # Runtime libs - bokeh=1.0.2 - boto3=1.9.65 - botocore=1.12.66 - cartopy=0.17.0 - - conda=4.5.11 - cython=0.29.2 - dask=1.0.0 - fiona=1.8.4 - gdal=2.3.2 - geopandas=0.4.0 - - geos=3.6.2 + - geos=3.7.0 - geotiff=1.4.2 - h5netcdf=0.6.2 - h5py=2.8.0 - hdf4=4.2.13 - - hdf5=1.10.3 + - hdf5=1.10.4 - jdcal=1.4 - - kealib=1.4.10 - matplotlib=3.0.2 - numba=0.41.0 - numpy=1.15.4 @@ -31,7 +30,7 @@ dependencies: - pandas=0.23.4 - pillow=5.3.0 - pip=18.1 - - proj4=4.9.3 + - proj4=5.2.0 - psutil=5.4.8 - pyepsg=0.4.0 - pyproj=1.9.5 diff --git a/test/core/test_workspace.py b/test/core/test_workspace.py index 52eee6d0d..abf0a6c94 100644 --- a/test/core/test_workspace.py +++ b/test/core/test_workspace.py @@ -28,7 +28,7 @@ def test_utilities(self): self.assertEqual(mk_op_args(), []) self.assertEqual(mk_op_args(1, '2', 'a', '@b'), - [{'value': 1}, {'value': '2'}, {'value': 'a'}, {'source': 'b'}]) + [{'value': 1}, {'value': '2'}, {'value': 'a'}, {'source': 'b'}]) self.assertEqual(mk_op_kwargs(a=1), OrderedDict([('a', {'value': 1})])) self.assertEqual(mk_op_kwargs(a=1, b='@c'), OrderedDict([('a', {'value': 1}), ('b', {'source': 'c'})])) @@ -314,9 +314,9 @@ def str_op() -> str: res_scalar_gdf_vars[2].get('name'): res_scalar_gdf_vars[2].get('value'), res_scalar_gdf_vars[3].get('name'): res_scalar_gdf_vars[3].get('value')} self.assertEqual(scalar_values, {'name': (1000 * 'A') + '...', - 'lat': 45, - 'lon': -120, - 'geometry': 'POINT (-120 45)'}) + 'lat': 45, + 'lon': -120, + 'geometry': 'POINT (-120 45)'}) res_empty_ds = l_res[6] res_empty_ds_vars = res_empty_ds.get('variables') @@ -358,6 +358,7 @@ def str_op() -> str: OP_REGISTRY.remove_op(int_op) OP_REGISTRY.remove_op(str_op) + # noinspection PyMethodMayBeStatic def test_execute_empty_workflow(self): ws = Workspace('/path', Workflow(OpMetaInfo('workspace_workflow', header=dict(description='Test!')))) ws.execute_workflow() @@ -577,9 +578,9 @@ def test_example(self): mk_op_kwargs(ds="@p", point="iih!", var="precipitation"), res_name='ts2', validate_args=True) self.assertEqual(str(e.exception), - "Input 'point' for operation 'cate.ops.timeseries.tseries_point': " - "Value cannot be converted into a 'PointLike': " - "Invalid geometry WKT format.") + "Input 'point' for operation 'cate.ops.timeseries.tseries_point': " + "Value cannot be converted into a 'PointLike': " + "Invalid geometry WKT format.") ws2 = Workspace.from_json_dict(ws.to_json_dict()) self.assertEqual(ws2.base_dir, ws.base_dir) diff --git a/test/ds/test_esa_cci_odp.py b/test/ds/test_esa_cci_odp.py index ab4026610..a9d17924e 100644 --- a/test/ds/test_esa_cci_odp.py +++ b/test/ds/test_esa_cci_odp.py @@ -8,7 +8,7 @@ import unittest.mock import urllib.request -from cate.core.ds import DATA_STORE_REGISTRY, DataAccessError, format_variables_info_string +from cate.core.ds import DATA_STORE_REGISTRY, DataAccessError, format_variables_info_string, DataStoreNotice from cate.core.types import PolygonLike, TimeRangeLike, VarNamesLike from cate.ds.esa_cci_odp import EsaCciOdpDataStore, find_datetime_format, _DownloadStatistics from cate.ds.local import LocalDataStore @@ -50,6 +50,30 @@ def test_id_title_and_is_local(self): self.assertEqual(self.data_store.title, 'ESA CCI Open Data Portal') self.assertEqual(self.data_store.is_local, False) + def test_description(self): + self.assertIsNotNone(self.data_store.description) + self.assertTrue(len(self.data_store.description) > 40) + + def test_notices(self): + self.assertIsInstance(self.data_store.notices, list) + self.assertEqual(2, len(self.data_store.notices)) + + notice0 = self.data_store.notices[0] + self.assertIsInstance(notice0, DataStoreNotice) + self.assertEqual(notice0.id, "terminologyClarification") + self.assertEqual(notice0.title, "Terminology Clarification") + self.assertEqual(notice0.icon, "info-sign") + self.assertEqual(notice0.intent, "primary") + self.assertTrue(len(notice0.content) > 20) + + notice1 = self.data_store.notices[1] + self.assertIsInstance(notice0, DataStoreNotice) + self.assertEqual(notice1.id, "dataCompleteness") + self.assertEqual(notice1.title, "Data Completeness") + self.assertEqual(notice1.icon, "warning-sign") + self.assertEqual(notice1.intent, "warning") + self.assertTrue(len(notice1.content) > 20) + def test_query(self): data_sources = self.data_store.query() self.assertIsNotNone(data_sources) diff --git a/test/ds/test_local.py b/test/ds/test_local.py index 06b52bef8..5e9d55fa5 100644 --- a/test/ds/test_local.py +++ b/test/ds/test_local.py @@ -6,7 +6,7 @@ import datetime import shutil import json -from cate.core.ds import DATA_STORE_REGISTRY, DataAccessError +from cate.core.ds import DATA_STORE_REGISTRY, DataAccessError, DataStoreNotice from cate.core.types import PolygonLike, TimeRangeLike, VarNamesLike from cate.ds.local import LocalDataStore, LocalDataSource from cate.ds.esa_cci_odp import EsaCciOdpDataStore @@ -36,6 +36,22 @@ def test_name_title_and_is_local(self): self.assertEqual(self.data_store.title, 'Local Data Sources') self.assertEqual(self.data_store.is_local, True) + def test_description(self): + self.assertIsNotNone(self.data_store.description) + self.assertTrue(len(self.data_store.description) > 40) + + def test_notices(self): + self.assertIsInstance(self.data_store.notices, list) + self.assertEqual(1, len(self.data_store.notices)) + + notice0 = self.data_store.notices[0] + self.assertIsInstance(notice0, DataStoreNotice) + self.assertEqual(notice0.id, "localDataStorage") + self.assertEqual(notice0.title, "Local Data Storage") + self.assertEqual(notice0.icon, "info-sign") + self.assertEqual(notice0.intent, "primary") + self.assertTrue(len(notice0.content) > 20) + def test_create_data_source(self): new_ds_id = 'test_name.2008' new_ds = self.data_store.create_data_source(new_ds_id) diff --git a/test/util/test_process.py b/test/util/test_process.py index 8aeaf1440..ff63227c3 100644 --- a/test/util/test_process.py +++ b/test/util/test_process.py @@ -90,8 +90,8 @@ def test_execute_with_handler_and_kill_cancellation(self): def test_execute_with_monitor(self): handler = ProcessOutputMonitor(self.monitor, - started='mkentropy: Running (?P\d+)', - progress='mkentropy: Did (?P\d+)', + started=r'mkentropy: Running (?P\d+)', + progress=r'mkentropy: Did (?P\d+)', done=lambda line: 'Done' in line) exit_code = run_subprocess([sys.executable, MAKE_ENTROPY, '5', '0.1'], stdout_handler=handler) self.assertEqual(exit_code, 0) diff --git a/test/util/test_sround.py b/test/util/test_sround.py index c3983455c..5f0967be0 100644 --- a/test/util/test_sround.py +++ b/test/util/test_sround.py @@ -33,7 +33,7 @@ def test_sround_samples(self): for i in range(len(test_data)): value, ndigits, int_part, expected_result = test_data[i] self.assertEqual(sround(value, ndigits=ndigits, int_part=int_part), - expected_result, f"at index #{i}") + expected_result, f"at index #{i}") def test_sround_has_limits(self): self.assertEqual(sround(1.4825723452345623455e-324, ndigits=10), 0.0) @@ -43,32 +43,32 @@ def test_sround_has_limits(self): def test_sround_range(self): self.assertEqual(sround_range((-0.000067128731732, 6.362984893743), - ndigits=1), - (0.0, 6.4)) + ndigits=1), + (0.0, 6.4)) self.assertEqual(sround_range((-0.000067128731732, 6362.984893743), - ndigits=3), - (0.0, 6362.985)) + ndigits=3), + (0.0, 6362.985)) self.assertEqual(sround_range((6361.239852345, 6362.68923), - ndigits=0), - (6361.0, 6363.0)) + ndigits=0), + (6361.0, 6363.0)) self.assertEqual(sround_range((6361.239852345, 6362.68923), - ndigits=2), - (6361.24, 6362.69)) + ndigits=2), + (6361.24, 6362.69)) self.assertEqual(sround_range((-6362.68923, +6361.239852345), - ndigits=2), - (-6362.69, 6361.24)) + ndigits=2), + (-6362.69, 6361.24)) self.assertEqual(sround_range((-0.000067128731732, +0.0027635092345), - ndigits=2), - (-0.00007, - +0.00276)) + ndigits=2), + (-0.00007, + +0.00276)) self.assertEqual(sround_range((-0.000067128731732, +0.0027635092345), - ndigits=1), - (-0.0001, - +0.0028)) + ndigits=1), + (-0.0001, + +0.0028)) self.assertEqual(sround_range((-0.000067128731732, +0.0027635092345), - ndigits=0), - (0.0, - +0.003)) + ndigits=0), + (0.0, + +0.003)) diff --git a/test/util/web/test_webapi.py b/test/util/web/test_webapi.py index 851df1328..1cc31085d 100644 --- a/test/util/web/test_webapi.py +++ b/test/util/web/test_webapi.py @@ -24,14 +24,15 @@ def test_url_pattern_works(self): self.assertEqual(matcher.groupdict(), {'base_dir': x, 'res_name': 'SST'}) def test_url_pattern_ok(self): + name_chars_pattern = "[^\\;\\/\\?\\:\\@\\&\\=\\+\\$\\,]+" self.assertEqual(webapi.url_pattern('/version'), '/version') self.assertEqual(webapi.url_pattern('{{num}}/get'), - '(?P[^\;\/\?\:\@\&\=\+\$\,]+)/get') + f'(?P{name_chars_pattern})/get') self.assertEqual(webapi.url_pattern('/open/{{ws_name}}'), - '/open/(?P[^\;\/\?\:\@\&\=\+\$\,]+)') + f'/open/(?P{name_chars_pattern})') self.assertEqual(webapi.url_pattern('/open/ws{{id1}}/wf{{id2}}'), - '/open/ws(?P[^\;\/\?\:\@\&\=\+\$\,]+)/wf(?P[^\;\/\?\:\@\&\=\+\$\,]+)') + f'/open/ws(?P{name_chars_pattern})/wf(?P{name_chars_pattern})') def test_url_pattern_fail(self): with self.assertRaises(ValueError) as cm: diff --git a/test/webapi/test_websocket.py b/test/webapi/test_websocket.py index 8015a7fa1..dbf7e84f1 100644 --- a/test/webapi/test_websocket.py +++ b/test/webapi/test_websocket.py @@ -24,6 +24,14 @@ def test_get_data_stores(self): data_stores = self.service.get_data_stores() self.assertIsInstance(data_stores, list) self.assertGreater(len(data_stores), 1) + for ds in data_stores: + self.assertIn('id', ds) + self.assertIsInstance(ds['id'], str) + self.assertIn('isLocal', ds) + self.assertIsInstance(ds['isLocal'], bool) + self.assertIn('description', ds) + self.assertIn('notices', ds) + self.assertIsInstance(ds['notices'], list) self.assertIn('local', [ds['id'] for ds in data_stores]) @unittest.skipIf(os.environ.get('CATE_DISABLE_WEB_TESTS', None) == '1', 'CATE_DISABLE_WEB_TESTS = 1')