Skip to content

Commit

Permalink
Merge branch 'master' of github.com:ivargr/climate_health
Browse files Browse the repository at this point in the history
  • Loading branch information
ivargr committed Sep 25, 2024
2 parents 66dc35b + 7c1b7a5 commit 8e35179
Show file tree
Hide file tree
Showing 11 changed files with 46 additions and 30 deletions.
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ The following shows basic usage of the platform. Follow the link to the document
CHAP supports evaluating models that are defined using the MLflow specification for machine learning models (link coming). Such models can e.g. exist in Github repositories. CHAP also has some built-in example data that can be used to evaluate models. The following example shows how to evaluate an Ewars model located on Github ([https://github.com/sandvelab/chap_auto_ewars](https://github.com/sandvelab/chap_auto_ewars)) using the ISMIP dataset:

```bash
chap-cli evaluate --model-name https://github.com/sandvelab/chap_auto_ewars --dataset-name ISIMIP_dengue_harmonized --dataset-country brazil
chap evaluate --model-name https://github.com/sandvelab/chap_auto_ewars --dataset-name ISIMIP_dengue_harmonized --dataset-country brazil
```

The above example requires that you have installed chap with pip and also that you have Docker available.
Expand Down
2 changes: 1 addition & 1 deletion chap_core/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

__author__ = """Sandvelab"""
__email__ = "[email protected]"
__version__ = "0.0.5"
__version__ = "0.0.7"

from . import fetch
from . import data
2 changes: 1 addition & 1 deletion chap_core/chap_cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ def harmonize(input_filename: Path, output_filename: Path):
with open(input_filename, "r") as f:
text = f.read()
request_data = RequestV1.model_validate_json(text)
dataset = dataset_from_request_v1(request_data, target_name="disease")
dataset = dataset_from_request_v1(request_data, target_name="disease", usecwd_for_credentials=True)
dataset.to_csv(output_filename)


Expand Down
6 changes: 4 additions & 2 deletions chap_core/google_earth_engine/gee_era5.py
Original file line number Diff line number Diff line change
Expand Up @@ -167,13 +167,15 @@ def gee_properties_to_fields(property_dicts: list[dict]) -> dict[str, DataSet]:


class Era5LandGoogleEarthEngine:
def __init__(self):
def __init__(self, usecwd=False):
self.gee_helper = Era5LandGoogleEarthEngineHelperFunctions()
self.is_initialized = False
self._usecwd = usecwd
self._initialize_client()


def _initialize_client(self):
load_dotenv(find_dotenv())
load_dotenv(find_dotenv(usecwd=self._usecwd))
# read environment variables
account = os.environ.get("GOOGLE_SERVICE_ACCOUNT_EMAIL")
private_key = os.environ.get("GOOGLE_SERVICE_ACCOUNT_PRIVATE_KEY")
Expand Down
6 changes: 2 additions & 4 deletions chap_core/rest_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,8 @@ async def favicon() -> FileResponse:
@app.post('/predict')
async def predict(data: PredictionRequest) -> dict:
"""
Start a prediction task using the given data as training data
Start a prediction task using the given data as training data.
Results can be retrieved using the get-results endpoint.
"""
json_data = data.model_dump()
str_data = json.dumps(json_data)
Expand Down Expand Up @@ -132,11 +133,9 @@ async def get_results() -> FullPredictionResponse:
Retrieve results made by the model
"""
cur_job = internal_state.current_job
print(cur_job)
if not (cur_job and cur_job.is_finished):
raise HTTPException(status_code=400, detail="No response available")
result = cur_job.result
print(result)
return result


Expand Down Expand Up @@ -167,5 +166,4 @@ async def get_status() -> State:

def main_backend():
import uvicorn

uvicorn.run(app, host="0.0.0.0", port=8000)
8 changes: 4 additions & 4 deletions chap_core/rest_api_src/worker_functions.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,8 +23,8 @@
}


def initialize_gee_client():
gee_client = Era5LandGoogleEarthEngine()
def initialize_gee_client(usecwd=False):
gee_client = Era5LandGoogleEarthEngine(usecwd=usecwd)
return gee_client


Expand Down Expand Up @@ -98,7 +98,7 @@ def get_target_id(json_data, target_names):


def dataset_from_request_v1(
json_data: RequestV1, target_name="diseases"
json_data: RequestV1, target_name="diseases", usecwd_for_credentials=False
) -> DataSet[FullData]:
translations = {target_name: "disease_cases"}
data = {
Expand All @@ -107,7 +107,7 @@ def dataset_from_request_v1(
)
for feature in json_data.features
}
gee_client = initialize_gee_client()
gee_client = initialize_gee_client(usecwd=usecwd_for_credentials)
period_range = data["disease_cases"].period_range
locations = list(data["disease_cases"].keys())
climate_data = gee_client.get_historical_era5(
Expand Down
3 changes: 0 additions & 3 deletions chap_core/spatio_temporal_data/temporal_dataclass.py
Original file line number Diff line number Diff line change
Expand Up @@ -200,9 +200,6 @@ def locations(self) -> Iterable[Location]:
def data(self) -> Iterable[FeaturesT]:
return self._data_dict.values()

# def items(self) -> Iterable[Tuple[Location, FeaturesT]]:
# return self._data_dict.items()

def _add_location_to_dataframe(self, df, location):
df["location"] = location
return df
Expand Down
7 changes: 4 additions & 3 deletions chap_core/worker/rq_worker.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,8 @@


class RedisJob(Generic[ReturnType]):
'''Wrapper for a Redis Job'''

def __init__(self, job: Job):
self._job = job

Expand All @@ -38,17 +40,16 @@ def cancel(self):

@property
def is_finished(self) -> bool:
print(self._job.is_finished)
if self._job.get_status() == "queued":
logger.warning(
"Job is queued, maybe no worker is set up? Run `$ rq worker`"
)
print(self._job.get_status())

return self._job.is_finished


class RedisQueue:
'''Simple abstraction for a Redis Queue'''

def __init__(self):
host, port = self.read_environment_variables()
self.q = Queue(connection=Redis(host=host, port=int(port)))
Expand Down
36 changes: 27 additions & 9 deletions docs_source/tutorials/downloaded_json_data.rst
Original file line number Diff line number Diff line change
@@ -1,23 +1,41 @@
Running on JSON data downloaded from the CHAP-app
Running on JSON data downloaded from the CHAP-app (when CHAP Core is not installed on your DHIS2-instance)
=================================================
For this example you will need to have the CHAP-app installed on DHIS2 and Chap Core installed on your local computer, in addition to Docker installed and credentials for Google Earth Engine.
Chap Core can be installed by running the following command:

Requriments:
- Docker is installed on your computer (Installation instructions can be found at https://docs.docker.com/get-started/get-docker/).
- CHAP-app is installed on your DHIS2-instance (Instruction for CHAP-app installation could be found at https://github.com/dhis2/chap-app)
- Access to credentials for Google Earth Engine. (Private Key and Service Account Email)

Install CHAP Core
-----------------
We recommend you run CHAP Core with Conda. If you don't have Conda, you could install Miniconda,
(a minimal installer for Conda) from https://docs.anaconda.com/miniconda/#latest-miniconda-installer-links

Windows: After installation open Anaconda Prompt, search for "Anaconda Prompt" in the Windows Start menu.
Linux: Conda should work in your default terminal after installation.

We recommend you to create a new conda environment by running the following commands:

$ conda create -n chap-core python=3.11
$ conda activate chap-core

In the same shell, install CHAP Core, by runing the following commands (this take 5-20 minutes):

$ pip install git+https://github.com/dhis2/chap-core.git

After installation, chap command line interface (CLI) should be available in your terminal.

Credentials for Google Earth Engine
------------------------------------------
Credentials for Google Earth Engine needs to be put as environment variables on your local computer.
The easiest way to do this is to create a file called ".env" in the root of the chap-core repository with the two environment variables:
"GOOGLE_SERVICE_ACCOUNT_EMAIL" and "GOOGLE_SERVICE_ACCOUNT_PRIVATE_KEY". The file should look similar to the following content:
Credentials for Google Earth Engine need to be set as environment variables on your local computer. We recommend you create a new folder where you will later run CHAP Core. Inside
this folder, create a new file named ".env" with the two environment variables: "GOOGLE_SERVICE_ACCOUNT_EMAIL" and "GOOGLE_SERVICE_ACCOUNT_PRIVATE_KEY".
The file should look similar to the following content:

.. code-block:: bash
GOOGLE_SERVICE_ACCOUNT_EMAIL="[email protected]"
GOOGLE_SERVICE_ACCOUNT_PRIVATE_KEY="-----BEGIN PRIVATE KEY----- <your-private-key> -----END PRIVATE KEY-----"
Convert the JSON data into a CHAP-DataSet
------------------------------------------

Expand Down
2 changes: 1 addition & 1 deletion setup.cfg
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
[bumpversion]
current_version = 0.0.5
current_version = 0.0.7
commit = True
tag = True

Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,6 +80,6 @@
test_suite="tests",
tests_require=test_requirements,
url="https://github.com/dhis2/chap-core",
version="0.0.5",
version="0.0.7",
zip_safe=False,
)

0 comments on commit 8e35179

Please sign in to comment.