Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix: store new measurements with total amount instead of calculate #35

Merged
merged 4 commits into from
Nov 17, 2024
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
59 changes: 36 additions & 23 deletions custom_components/aigues_barcelona/sensor.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,8 @@
from .const import DEFAULT_SCAN_PERIOD
from .const import DOMAIN

from typing import Optional

_LOGGER = logging.getLogger(__name__)


Expand Down Expand Up @@ -135,16 +137,21 @@ async def _async_update_data(self):
_LOGGER.info(f"Updating coordinator data for {self.contract}")
TODAY = datetime.now()
LAST_WEEK = TODAY - timedelta(days=7)
LAST_TIME_DAYS = None

# last_measurement = await self.get_last_measurement_stored()
# _LOGGER.info("Last stored measurement: %s", last_measurement)

try:
previous = datetime.fromisoformat(self._data.get(CONF_STATE, ""))
# FIX: TypeError: can't subtract offset-naive and offset-aware datetimes
previous = previous.replace(tzinfo=None)
LAST_TIME_DAYS = (TODAY - previous).days
except ValueError:
previous = None

if previous and (TODAY - previous) <= timedelta(minutes=60):
_LOGGER.warn("Skipping request update data - too early")
_LOGGER.warning("Skipping request update data - too early")
return

consumptions = None
Expand Down Expand Up @@ -181,6 +188,9 @@ async def _async_update_data(self):
except:
pass

if LAST_TIME_DAYS >= 7:
await self.import_old_consumptions(days=LAST_TIME_DAYS)

return True

async def _clear_statistics(self) -> None:
Expand All @@ -202,47 +212,47 @@ async def _clear_statistics(self) -> None:
clear_statistics, self.hass.data[RECORDER_DATA_INSTANCE], to_clear
)

async def _async_import_statistics(self, consumptions) -> None:
# force sort by datetime
consumptions = sorted(
consumptions, key=lambda x: datetime.fromisoformat(x["datetime"])
)
async def get_last_measurement_stored(self) -> Optional[datetime]:
last_stored = None

# Retrieve the last stored value of accumulatedConsumption
last_stored_value = 0.0
all_ids = await get_db_instance(self.hass).async_add_executor_job(
list_statistic_ids, self.hass
)

for stat_id in all_ids:
if stat_id["statistic_id"] == self.internal_sensor_id:
if stat_id.get("sum") and (
last_stored_value is None or stat_id["sum"] > last_stored_value
):
last_stored_value = stat_id["sum"]
if stat_id.get("sum") and stat_id["sum"] > last_stored["sum"]:
last_stored = stat_id

if last_stored:
_LOGGER.debug(f"Found last stored value: {last_stored}")
return datetime.fromtimestamp(last_stored.get("start_ts"))

return None

async def _async_import_statistics(self, consumptions) -> None:
# force sort by datetime
consumptions = sorted(
consumptions, key=lambda x: datetime.fromisoformat(x["datetime"])
)

stats = list()
sum_total = last_stored_value
for metric in consumptions:
start_ts = datetime.fromisoformat(metric["datetime"])
start_ts = start_ts.replace(minute=0, second=0, microsecond=0) # required
# Calculate deltaConsumption
deltaConsumption = metric["accumulatedConsumption"] - last_stored_value
# Ensure deltaConsumption is positive before adding to sum_total
if deltaConsumption < 0:
_LOGGER.warn(f"Negative deltaConsumption detected: {deltaConsumption}")
deltaConsumption = 0

# round: fixes decimal with 20 digits precision
sum_total = round(sum_total + deltaConsumption, 4)
state = round(metric["accumulatedConsumption"], 4)
stats.append(
{
"start": start_ts,
"state": metric["accumulatedConsumption"],
"state": state,
# -- required to show in historic/recorder
"sum": sum_total,
# -- incremental sum = current total value, so we don't show negative values in HA
"sum": state,
# "last_reset": start_ts,
}
)
last_stored_value = metric["accumulatedConsumption"]
metadata = {
"has_mean": False,
"has_sum": True,
Expand All @@ -261,6 +271,9 @@ async def import_old_consumptions(self, days: int = 365) -> None:
today = datetime.now()
one_year_ago = today - timedelta(days=days)

if self._api.is_token_expired():
raise ConfigEntryAuthFailed

current_date = one_year_ago
while current_date < today:
consumptions = await self.hass.async_add_executor_job(
Expand Down
Loading