Skip to content

Commit

Permalink
Merge remote-tracking branch 'origin/main-dev' into 46-implement-actr…
Browse files Browse the repository at this point in the history
…is-ebas-reader

# Conflicts:
#	setup.cfg
#	src/pyaro_readers/aeronetsdareader/AeronetSdaTimeseriesReader.py
#	src/pyaro_readers/eeareader/EEATimeseriesReader.py
#	src/pyaro_readers/harpreader/harpreader.py
#	src/pyaro_readers/nilupmfabsorptionreader/NILUPMFAbsorptionReader.py
#	src/pyaro_readers/nilupmfebas/EbasPmfReader.py
  • Loading branch information
Jan Griesfeller committed Nov 7, 2024
2 parents a381901 + 002af79 commit 4e07711
Show file tree
Hide file tree
Showing 8 changed files with 2,095 additions and 1,555 deletions.
59 changes: 38 additions & 21 deletions src/pyaro_readers/eeareader/EEATimeseriesReader.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,6 @@
import logging
from os import path

from tqdm import tqdm
from datetime import datetime, timedelta

Expand All @@ -11,15 +14,21 @@
from pyaro.timeseries import (
AutoFilterReaderEngine,
Data,
Filter,
Flag,
NpStructuredData,
Station,
)

try:
import tomllib
except ImportError: # python <3.11
import tomli as tomllib


logger = logging.getLogger(__name__)

FLAGS_VALID = {-99: False, -1: False, 1: True, 2: False, 3: False, 4: True}
VERIFIED_LVL = [1, 2, 3]
DATA_TOML = Path(__file__).parent / "data.toml"
DATA_TOML = path.join(path.dirname(__file__), "data.toml")
FILL_COUNTRY_FLAG = False

TIME_FORMAT = "%Y-%m-%d %H:%M:%S"
Expand Down Expand Up @@ -65,22 +74,16 @@ def __init__(
self,
filename,
filters={},
fill_country_flag: bool = FILL_COUNTRY_FLAG,
):
self._filename = filename
self._stations = {}
self._data = {} # var -> {data-array}
self._set_filters(filters)

self._metadata = self._read_metadata(filename)
self._filters = filters
self.metadata = self._read_metadata(filename)
self.data_cfg = self._read_cfg()

def read(self):
"""reading method quick and dirty"""
self._read_polars(self._filters, self._filename)

def metadata(self) -> dict[str, str]:
return self._metadata
self._read_polars(filters, filename)

def _read_polars(self, filters, filename) -> None:
try:
Expand Down Expand Up @@ -141,6 +144,7 @@ def _read_polars(self, filters, filename) -> None:
polars.read_parquet(file), (start_date, end_date)
)
if lf.is_empty():
logger.info(f"Data for file {file} is empty. Skipping")
continue
else:
lf = polars.read_parquet(file)
Expand Down Expand Up @@ -168,10 +172,15 @@ def _read_polars(self, filters, filename) -> None:
if file_datapoints == 0:
continue
df = lf
try:
station_metadata = self.metadata[df.row(0)[0].split("/")[-1]]
except:
logger.info(
f'Could not extract the metadata for {df.row(0)[0].split("/")[-1]}'
)
continue

station_metadata = self._metadata[df.row(0)[0].split("/")[-1]]

file_unit = df.row(0)[df.get_column_index("Unit")]
file_unit = self._convert_unit(df.row(0)[df.get_column_index("Unit")])

for key in PARQUET_FIELDS:
array[key][
Expand Down Expand Up @@ -229,7 +238,7 @@ def _filter_dates(
)

def _read_metadata(self, folder: str) -> dict:
_metadata = {}
metadata = {}
filename = Path(folder) / "metadata.csv"
if not filename.exists():
raise FileExistsError(f"Metadata file could not be found in {folder}")
Expand All @@ -242,16 +251,27 @@ def _read_metadata(self, folder: str) -> dict:
lat = float(words[4])
alt = float(words[5])
except:
logger.info(
f"Could not interpret lat, lon, alt for line {line} in metadata. Skipping"
)
continue
_metadata[words[0]] = {
metadata[words[0]] = {
"lon": lon,
"lat": lat,
"alt": alt,
"stationcode": words[2],
"country": words[1],
}

return _metadata
return metadata

def _read_cfg(self) -> dict:
with open(DATA_TOML, "rb") as f:
cfg = tomllib.load(f)
return cfg

def _convert_unit(self, unit: str) -> str:
return self.data_cfg["units"][unit]

def _unfiltered_data(self, varname) -> Data:
return self._data[varname]
Expand All @@ -278,6 +298,3 @@ def description(self):

def url(self):
return "https://github.com/metno/pyaro-readers"

def read(self):
return self.reader_class().read()
Loading

0 comments on commit 4e07711

Please sign in to comment.