Skip to content

Commit

Permalink
✨ Improve behavior upon restart
Browse files Browse the repository at this point in the history
This patch will avoid to get an error from the API upon HA restart by
caching the api data and reusing this after HA restart.
  • Loading branch information
kamaradclimber committed Nov 19, 2023
1 parent c932c8e commit c5af1b1
Showing 1 changed file with 42 additions and 8 deletions.
50 changes: 42 additions & 8 deletions custom_components/rte_ecowatt/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
import aiohttp


from homeassistant.helpers.storage import Store
from homeassistant.const import Platform, STATE_ON
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.typing import ConfigType
Expand Down Expand Up @@ -124,6 +125,8 @@ def __init__(self, hass, config: ConfigType):
self.oauth_client = AsyncOauthClient(config)
self.api_version = "v5"

self._custom_store = Store(hass, 1, "rte_ecowatt")

async def async_oauth_client(self):
client = await self.oauth_client.client()
self.token = client.token
Expand Down Expand Up @@ -161,6 +164,43 @@ async def update_method(self):
This could be the place to pre-process the data to lookup tables
so entities can quickly look up their data.
"""
body = None
try:
previous_data = await self._custom_store.async_load()
if previous_data is not None:
last_update = datetime.strptime(
previous_data["last_update"], "%Y-%m-%dT%H:%M:%S.%f"
)
if (datetime.now() - last_update) < timedelta(minutes=15):
_LOGGER.info(
"Loading RTE ecowatt data from storage instead of querying api"
)
body = previous_data["body"]
except Exception as e:
_LOGGER.warn(f"Impossible to load previous data: {e}")
if body is None:
body = await self._real_update_method()
try:
await self._custom_store.async_save(
{
"last_update": datetime.now().strftime("%Y-%m-%dT%H:%M:%S.%f"),
"body": body,
}
)
except Exception as e:
_LOGGER.exception("Error received while caching API data")
raise e
signals = json.loads(body)["signals"]
# additional parsing
for day_data in signals:
parsed_time = datetime.strptime(day_data["jour"], "%Y-%m-%dT%H:%M:%S%z")
day_data["date"] = parsed_time.date()
day_data["datetime"] = parsed_time

_LOGGER.debug(f"data parsed: {signals}")
return signals

async def _real_update_method(self):
try:
_LOGGER.debug(
f"Calling update method, {len(self._listeners)} listeners subscribed"
Expand All @@ -170,6 +210,7 @@ async def update_method(self):
"Failing update on purpose to test state restoration"
)
_LOGGER.debug("Starting collecting data")

if self.skip_refresh():
_LOGGER.warning(f"Skipping data refresh because: {self.skip_refresh()}")
return self.data
Expand Down Expand Up @@ -212,14 +253,7 @@ async def update_method(self):
body = await api_result.text()
await client.close() # we won't need the client anymore
_LOGGER.debug(f"api response body: {body}")
signals = json.loads(body)["signals"]
for day_data in signals:
parsed_time = datetime.strptime(day_data["jour"], "%Y-%m-%dT%H:%M:%S%z")
day_data["date"] = parsed_time.date()
day_data["datetime"] = parsed_time

_LOGGER.debug(f"data parsed: {signals}")
return signals
return body
except Exception as err:
raise UpdateFailed(f"Error communicating with API: {err}")

Expand Down

0 comments on commit c5af1b1

Please sign in to comment.