Skip to content

Commit

Permalink
Added routine to update PVOutput daily stats
Browse files Browse the repository at this point in the history
Peak & off-Peak periods in PVOutput summary are based on time of use by default. Added daily recalculation to capture IO Smart Target usage as "Shoulder" tariff instead of Peak.
  • Loading branch information
salewis38 authored Jan 1, 2024
1 parent ee4f357 commit 665d22d
Showing 1 changed file with 156 additions and 3 deletions.
159 changes: 156 additions & 3 deletions palm.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@

import sys
import time
import csv
import threading
import json
from typing import List
Expand Down Expand Up @@ -51,8 +52,9 @@
# v1.1.0 06/Aug/23 Split out generic functions as palm_utils.py
# v1.1.1 19/Nov/23 Updated to Shelly Gen 2 switch, improved readability
# v1.1.2 03/Dec/23 Added Shelly switch to load balancing, updated Events logic for robustness
# v1.1.3 01/Jan/24 Added routine to update PVOuput daily stats with IO Smart Charge periods

PALM_VERSION = "v1.1.2"
PALM_VERSION = "v1.1.3"
# -*- coding: utf-8 -*-
# pylint: disable=logging-not-lazy
# pylint: disable=consider-using-f-string
Expand Down Expand Up @@ -487,6 +489,150 @@ def put_pv_output():
# End of put_pv_output()


def resummarise_pv_output(post_date: str):
"""Calculate and upload summary of generation/consumption data to PVOutput.org."""

# Step 1. Download existing daily summary data
url = stgs.PVOutput.url + "getstatistic.jsp"
key = stgs.PVOutput.key
sid = stgs.PVOutput.sid

payload = {
"c" : 1,
"key" : key,
"sid" : sid,
"df" : post_date,
"dt" : post_date
}

payload = urlencode(payload, doseq=True, quote_via=lambda x,y,z,w: x)

try:
resp = requests.get(url, params=payload, timeout=10)
resp.raise_for_status()
except requests.exceptions.RequestException as error:
logger.warning("PVOutput Read Error "+ stgs.pg.long_t_now)
logger.warning(error)
return

stats = list(resp.content.decode('utf-8').split(','))
e_gen = int(stats[0])
e_pk = int(stats[12])
e_off_pk = int(stats[13])
e_shldr = int(stats[14])

print("Stats:", e_gen, e_pk, e_off_pk, e_shldr)

if e_shldr > 0:
print("Shoulder values already computed for ", post_date, ". Exiting")
return

# Step 2. Download 5-minute usage data for analysis
url = stgs.PVOutput.url + "getstatus.jsp"
key = stgs.PVOutput.key
sid = stgs.PVOutput.sid

payload = {
"h" : 1,
"limit": 288,
"ext" : 1,
"key" : key,
"sid" : sid,
"d" : post_date
}

payload = urlencode(payload, doseq=True, quote_via=lambda x,y,z,w: x)

try:
resp = requests.get(url, params=payload, timeout=10)
resp.raise_for_status()
except requests.exceptions.RequestException as error:
logger.warning("PVOutput Read Error "+ stgs.pg.long_t_now)
logger.warning(error)
return

pv_data = resp.content.decode('utf-8')

headings = "Date,Time,EGen,EEff,PInst,PAvg,NormOp,ECons,PCons,Temp,Volts,v7 - PEV,v8 - PBattOut,v9 - CO2 Intens,v10 CO2 Usage,v11 - PBattIn,v12 - SoC;"

measured_data = headings + pv_data

d_time:str = [0] * 300
d_pwr_ev:int = [0] * 300
d_pwr_import:int = [0] * 300

csv_data = csv.DictReader(measured_data.split(';'))

# Step 3. Extract fields from received data. Data is received in reverse order
i = 0
for row in csv_data:
d_time[i] = str(row['Time'])
d_pwr_ev[i] = int(float(row['v7 - PEV']))
d_pwr_import[i] = int(row['ECons']) - int(row['EGen'])
i += 1

# Step 4. Work back through data to identify peak periods where EV is active,
# and sum energy imported in periods
e_shldr = 0
while i > 4:
t_row_mins = t_to_mins(d_time[i])
if t_row_mins % 30 == 0:
# print("Boundary:", row, d_time[i], d_pwr_import[i], d_pwr_ev[i])
if 330 < t_row_mins < 1410 and sum(d_pwr_ev[i:i+5]) > 1000: # Peak hours, EV active
e_shldr += d_pwr_import[i-5] - d_pwr_import[i]
# print("Charging detected. e_shldr = ", e_shldr)
i -= 1

if e_shldr == 0:
print("No shoulder generation identified")
return

e_pk_new = e_pk - e_shldr

logger.warning("Daily adjustments for "+ str(post_date)+ ":")
logger.warning("Off Peak:"+ str(e_off_pk))
logger.warning("Peak: "+ str(e_pk)+ " now: "+ str(e_pk_new))
logger.warning("Shoulder: "+ str(e_shldr))

# Step 5. Upload revised values to PVOutput.org
url = stgs.PVOutput.url + "addoutput.jsp"
key = stgs.PVOutput.key
sid = stgs.PVOutput.sid

payload = {
"sid" : sid,
"key" : key
}

part_payload = {
"d" : post_date,
"g" : e_gen,
"ip" : e_pk_new,
"io" : e_off_pk,
"is" : e_shldr
}

payload.update(part_payload) # Concatenate the data, don't escape ":"
payload = urlencode(payload, doseq=True, quote_via=lambda x,y,z,w: x)

time.sleep(2) # PVOutput has a 1 second rate limit. Avoid any clashes

if not stgs.pg.test_mode:
try:
resp = requests.get(url, params=payload, timeout=10)
resp.raise_for_status()
except requests.exceptions.RequestException as error:
logger.warning("PVOutput Write Error "+ stgs.pg.long_t_now)
logger.warning(error)
print(resp.content)
return

logger.info("Data; Write to pvoutput.org; "+ str(part_payload))
return

# End of resummarise_pv_output


def balance_loads():
"""control loads, based on schedule, generation, temp, etc."""

Expand Down Expand Up @@ -595,7 +741,7 @@ def update(self):

# Publish data to PVOutput.org every 5 minutes (or 5 cycles as a catch-all)
self.post_pvoutput = stgs.PVOutput.enable is True and \
(stgs.pg.test_mode or t_now % 5 == 3 or \
(stgs.pg.test_mode or t_now % 5 == 2 or \
stgs.pg.loop_counter > stgs.pg.pvo_tstamp + 4)

# Update carbon intensity and weather every 15 mins
Expand Down Expand Up @@ -809,7 +955,14 @@ def update(self):
do_put_pv_output = threading.Thread(target=put_pv_output)
do_put_pv_output.daemon = True
do_put_pv_output.start()


# Update PVOutput daily summary to reflect any IO Smart charging
if stgs.pg.t_now_mins == 1438:
do_resumm_pv_output = threading.Thread(target=\
resummarise_pv_output(time.strftime("%Y%m%d", time.localtime())))
do_resumm_pv_output.daemon = True
do_resumm_pv_output.start()

stgs.pg.loop_counter += 1

if stgs.pg.t_now_mins == 0: # Reset frame counter every 24 hours
Expand Down

0 comments on commit 665d22d

Please sign in to comment.