Skip to content

Commit

Permalink
Fix #35 #36: Planned dispatch source is None (PR #37)
Browse files Browse the repository at this point in the history
  • Loading branch information
pdcastro committed Mar 15, 2024
1 parent 13d548f commit 4b32bdb
Show file tree
Hide file tree
Showing 3 changed files with 200 additions and 3 deletions.
10 changes: 10 additions & 0 deletions custom_components/octopus_intelligent/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,3 +65,13 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry):
_LOGGER.debug("Octopus Intelligent System component setup finished")
return True


async def async_remove_entry(hass: HomeAssistant, entry: ConfigEntry):
"""Called when the config entry is removed (the integration is deleted)."""
octopus_system: OctopusIntelligentSystem = (
hass.data[DOMAIN][entry.entry_id][OCTOPUS_SYSTEM]
)
try:
await octopus_system.async_remove_entry()
except Exception as ex: # pylint: disable=broad-exception-caught
_LOGGER.error(ex)
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
"""Support for Octopus Intelligent Tariff in the UK."""
from datetime import timedelta, datetime, timezone
from typing import Any, override
import asyncio
import logging

Expand All @@ -12,6 +13,7 @@

from .graphql_client import OctopusEnergyGraphQLClient
from .graphql_util import validate_octopus_account
from .persistent_data import PersistentData, PersistentDataStore
from .util import *

_LOGGER = logging.getLogger(__name__)
Expand All @@ -34,30 +36,100 @@ def __init__(self, hass, *, api_key, account_id, off_peak_start, off_peak_end):
self._off_peak_end = off_peak_end

self.client = OctopusEnergyGraphQLClient(self._api_key)

self._persistent_data = PersistentData()
self._store = PersistentDataStore(self._persistent_data, hass, account_id)

@property
def account_id(self):
return self._account_id

async def _async_update_data(self):
@override
async def _async_update_data(self) -> dict[str, Any]:
"""Fetch data from API endpoint.
This is the place to pre-process the data to lookup tables
so entities can quickly look up their data.
Returns:
dict: The data received from the Octopus API, for example:
{
'completedDispatches': [{
'chargeKwh': '-0.58',
'startDtUtc': '2024-02-25 02:00:00+00:00',
'endDtUtc': '2024-02-25 02:30:00+00:00',
'meta': {'location': 'AT_HOME', 'source': None},
}, {
'chargeKwh': '-0.58',
'startDtUtc': '2024-02-25 03:30:00+00:00',
'endDtUtc': '2024-02-25 04:00:00+00:00',
'meta': {'location': 'AT_HOME', 'source': None},
}],
'plannedDispatches': [{
'chargeKwh': '-0.67',
'startDtUtc': '2024-02-25 23:30:00+00:00',
'endDtUtc': '2024-02-26 00:00:00+00:00',
'meta': {'location': None, 'source': 'smart-charge'},
}, {
'chargeKwh': '-1.12',
'startDtUtc': '2024-02-26 03:00:00+00:00',
'endDtUtc': '2024-02-26 04:00:00+00:00',
'meta': {'location': None, 'source': 'smart-charge'},
}],
'vehicleChargingPreferences': {
'weekdayTargetSoc': 80,
'weekdayTargetTime': '08:00',
'weekendTargetSoc': 80,
'weekendTargetTime': '08:00',
},
'registeredKrakenflexDevice': { ... },
}
"""
try:
# Note: asyncio.TimeoutError and aiohttp.ClientError are already
# handled by the data update coordinator.
async with asyncio.timeout(90):
return await self.client.async_get_combined_state(self._account_id)
data = await self.client.async_get_combined_state(self._account_id)
self._update_planned_dispatch_sources(data)
return data
# except ApiAuthError as err:
# # Raising ConfigEntryAuthFailed will cancel future updates
# # and start a config flow with SOURCE_REAUTH (async_step_reauth)
# raise ConfigEntryAuthFailed from err
except Exception as err:
raise UpdateFailed(f"Error communicating with Octopus GraphQL API: {err}")

def _update_planned_dispatch_sources(self, data):
"""Workaround for issue #35: missing dispatch sources in Octopus API response."""
dispatches = (data or {}).get("plannedDispatches", [])
all_sources = [disp.get("meta", {}).get("source", "") for disp in dispatches]
good_sources: set[str] = {src for src in all_sources if src}
if good_sources:
if len(good_sources) > 1:
_LOGGER.warning(
"Unexpected mix of planned dispatch sources: %s", good_sources
)
# We don't expect to see a mix of non-None sources like 'bump-charge'
# and 'smart-charge' in the same planned dispatch list, but if that
# happens, play safe and avoid assuming the wrong source.
self._persistent_data.last_seen_planned_dispatch_source = ""
else:
self._persistent_data.last_seen_planned_dispatch_source = next(
iter(good_sources)
)

# Fill in any missing (None) source attribute in the planned dispatch list.
if any(not src for src in all_sources):
source = self._persistent_data.last_seen_planned_dispatch_source
_LOGGER.debug(
"Missing planned dispatch source in Octopus API response%s",
f", assuming '{source}'" if source else "",
)
if source:
for dispatch in dispatches:
meta = dispatch.get("meta", {})
if meta:
meta["source"] = meta.get("source") or source

def is_smart_charging_enabled(self):
return not self.data.get('registeredKrakenflexDevice', {}).get('suspended', False)
async def async_suspend_smart_charging(self):
Expand Down Expand Up @@ -172,9 +244,15 @@ async def async_start_boost_charge(self):
async def async_cancel_boost_charge(self):
await self.client.async_cancel_boost_charge(self._account_id)

async def async_remove_entry(self):
"""Called when the integration (config entry) is removed from Home Assistant."""
await self._store.remove()

async def start(self):
_LOGGER.debug("Starting OctopusIntelligentSystem")
await validate_octopus_account(self.client, self._account_id)

await self._store.load()

async def stop(self):
_LOGGER.debug("Stopping OctopusIntelligentSystem")
109 changes: 109 additions & 0 deletions custom_components/octopus_intelligent/persistent_data.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,109 @@
"""Persistent data storage for the integration, based on the HASS helpers.storage.Store class."""
import logging
from dataclasses import asdict, dataclass
from typing import Any

from homeassistant.const import EVENT_HOMEASSISTANT_STOP
from homeassistant.core import CALLBACK_TYPE, Event, HomeAssistant
from homeassistant.exceptions import IntegrationError
from homeassistant.helpers.storage import Store

from .const import DOMAIN


_LOGGER = logging.getLogger(__name__)


@dataclass
class PersistentData:
"""JSON-serialisable persistent data."""

last_seen_planned_dispatch_source: str = "smart-charge"

def set_values(self, data: dict[str, Any]):
"""Assign values from the given dict to this dataclass."""
# Explicitly assign each field separately instead of using some '**data'
# unpacking syntax in order to be future-proof against schema changes.
self.last_seen_planned_dispatch_source = data.get(
"last_seen_planned_dispatch_source",
self.last_seen_planned_dispatch_source,
)


class PersistentDataStore:
"""Wrapper around hass.helpers.storage.Store, with a lazy saving feature.
Home Assistant may be hosted on an edge device like the Raspberry Pi, with data
stored on an SD Card that physically "wears" when data is written. To mitigate this
issue, the lazy save feature delays writting data to "disk" until the HASS STOP event
is fired, indicating that Home Assistant is about to quit or restart. This includes
the frontend web UI 'Restart' command, the "docker container stop" command, CTRL-C on
the command line, and generally when the HASS process receives the SIGTERM signal.
"""

def __init__(
self,
data: PersistentData,
hass: HomeAssistant,
account_id: str,
lazy_save=True,
):
self.data = data
self._hass = hass
self._store = Store[dict[str, Any]](
hass=hass,
key=f"{DOMAIN}.{account_id}",
version=1,
minor_version=1,
)
self._stop_event_listener: CALLBACK_TYPE | None = None
self.lazy_save = lazy_save

@property
def lazy_save(self) -> bool:
"""Return whether lazy data saving is enabled."""
return bool(self._stop_event_listener)

@lazy_save.setter
def lazy_save(self, enable: bool):
"""Enable/disable automatically calling self.save() on the HASS STOP event."""

async def _on_hass_stop(_: Event):
await self.save(raise_on_error=False)

if enable:
self._stop_event_listener = self._hass.bus.async_listen(
EVENT_HOMEASSISTANT_STOP, _on_hass_stop
)
elif self._stop_event_listener:
self._stop_event_listener()
self._stop_event_listener = None

async def load(self):
"""Load the data from persistent storage."""
data = None
try:
data = await self._store.async_load()
except Exception as ex: # pylint: disable=broad-exception-caught
_LOGGER.error(ex)
if isinstance(data, dict):
self.data.set_values(data)

async def save(self, raise_on_error=False):
"""Save the data to persistent storage."""
try:
await self._store.async_save(asdict(self.data))
except Exception as ex: # pylint: disable=broad-exception-caught
msg = f"Error saving persistent data: {ex}"
if raise_on_error:
raise IntegrationError(msg) from ex
_LOGGER.error(msg)

async def remove(self, disable_lazy_save=True):
"""Remove the data from persistent storage (delete the JSON file on disk)."""
if disable_lazy_save:
self.lazy_save = False
try:
await self._store.async_remove()
except Exception as ex: # pylint: disable=broad-exception-caught
_LOGGER.error("Error removing persistent data: %s", ex)

0 comments on commit 4b32bdb

Please sign in to comment.