Skip to content

Commit

Permalink
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
use stored last measurement, some fixes
Browse files Browse the repository at this point in the history
duhow committed Nov 17, 2024

Verified

This commit was created on GitHub.com and signed with GitHub’s verified signature.
1 parent 076f40c commit e552c37
Showing 1 changed file with 23 additions and 8 deletions.
31 changes: 23 additions & 8 deletions custom_components/aigues_barcelona/sensor.py
Original file line number Diff line number Diff line change
@@ -42,6 +42,8 @@
from .const import DEFAULT_SCAN_PERIOD
from .const import DOMAIN

from typing import Optional

_LOGGER = logging.getLogger(__name__)


@@ -135,16 +137,21 @@ async def _async_update_data(self):
_LOGGER.info(f"Updating coordinator data for {self.contract}")
TODAY = datetime.now()
LAST_WEEK = TODAY - timedelta(days=7)
LAST_TIME_DAYS = None

#last_measurement = await self.get_last_measurement_stored()
#_LOGGER.info("Last stored measurement: %s", last_measurement)

try:
previous = datetime.fromisoformat(self._data.get(CONF_STATE, ""))
# FIX: TypeError: can't subtract offset-naive and offset-aware datetimes
previous = previous.replace(tzinfo=None)
LAST_TIME_DAYS = (TODAY - previous).days
except ValueError:
previous = None

if previous and (TODAY - previous) <= timedelta(minutes=60):
_LOGGER.warn("Skipping request update data - too early")
_LOGGER.warning("Skipping request update data - too early")
return

consumptions = None
@@ -181,6 +188,9 @@ async def _async_update_data(self):
except:
pass

if LAST_TIME_DAYS >= 7:
await self.import_old_consumptions(days=LAST_TIME_DAYS)

return True

async def _clear_statistics(self) -> None:
@@ -202,21 +212,23 @@ async def _clear_statistics(self) -> None:
clear_statistics, self.hass.data[RECORDER_DATA_INSTANCE], to_clear
)

async def get_last_measurement_stored(self):
last_stored_value = 0.0
last_stored_date = None
async def get_last_measurement_stored(self) -> Optional[datetime]:
last_stored = None

all_ids = await get_db_instance(self.hass).async_add_executor_job(
list_statistic_ids, self.hass
)

for stat_id in all_ids:
if stat_id["statistic_id"] == self.internal_sensor_id:
if stat_id.get("sum") and stat_id["sum"] > last_stored_value:
last_stored_value = stat_id["sum"]
last_stored_date = stat_id["start"]
if stat_id.get("sum") and stat_id["sum"] > last_stored["sum"]:
last_stored = stat_id

return last_stored_date
if last_stored:
_LOGGER.debug(f"Found last stored value: {last_stored}")
return datetime.fromtimestamp(last_stored.get("start_ts"))

return None

async def _async_import_statistics(self, consumptions) -> None:
# force sort by datetime
@@ -259,6 +271,9 @@ async def import_old_consumptions(self, days: int = 365) -> None:
today = datetime.now()
one_year_ago = today - timedelta(days=days)

if self._api.is_token_expired():
raise ConfigEntryAuthFailed

current_date = one_year_ago
while current_date < today:
consumptions = await self.hass.async_add_executor_job(

0 comments on commit e552c37

Please sign in to comment.