Skip to content

Commit

Permalink
VTO buffer and keepalive fix (#301)
Browse files Browse the repository at this point in the history
* Buffer asyncio data_received

* Remove keep_alive handler after response is received

* Added DHI-VTO3311Q-WP to supported devices
  • Loading branch information
urbim authored Sep 9, 2023
1 parent 321a576 commit b71573d
Show file tree
Hide file tree
Showing 2 changed files with 27 additions and 9 deletions.
1 change: 1 addition & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -118,6 +118,7 @@ Brand | 2 Megapixels | 4 Megapixels | 5 Megapixels | 8 Megapixels
| *Dahua* |
| | DHI-VTO2202F-P |
| | DHI-VTO2211G-P |
| | DHI-VTO3311Q-WP |
| *IMOU* |
| | IMOU C26EP-V2 |

Expand Down
35 changes: 26 additions & 9 deletions custom_components/dahua/vto.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,7 @@ class DahuaVTOClient(asyncio.Protocol):
lock_status: {}
auth: HTTPDigestAuth
data_handlers: {}
buffer: bytearray

def __init__(self, host: str, username: str, password: str, is_ssl: bool, on_receive_vto_event):
self.dahua_details = {}
Expand All @@ -71,6 +72,7 @@ def __init__(self, host: str, username: str, password: str, is_ssl: bool, on_rec
self.hold_time = 0
self.lock_status = {}
self.data_handlers = {}
self.buffer = bytearray()

# This is the hook back into HA
self.on_receive_vto_event = on_receive_vto_event
Expand All @@ -90,17 +92,26 @@ def connection_made(self, transport):

def data_received(self, data):
_LOGGER.debug(f"Event data {self.host}: '{data}'")
try:
messages = self.parse_response(data)
for message in messages:
message_id = message.get("id")

handler: Callable = self.data_handlers.get(message_id, self.handle_default)
handler(message)
except Exception as ex:
exc_type, exc_obj, exc_tb = sys.exc_info()
self.buffer += data

_LOGGER.error(f"Failed to handle message, error: {ex}, Line: {exc_tb.tb_lineno}")
while b'\n' in self.buffer:

newline_index = self.buffer.find(b'\n') + 1
packet = self.buffer[:newline_index]
self.buffer = self.buffer[newline_index:]

try:
messages = self.parse_response(packet)
for message in messages:
message_id = message.get("id")

handler: Callable = self.data_handlers.get(message_id, self.handle_default)
handler(message)
except Exception as ex:
exc_type, exc_obj, exc_tb = sys.exc_info()

_LOGGER.error(f"Failed to handle message, error: {ex}, Line: {exc_tb.tb_lineno}")

def handle_notify_event_stream(self, params):
try:
Expand Down Expand Up @@ -326,6 +337,12 @@ def keep_alive(self):
def handle_keep_alive(message):
Timer(self.keep_alive_interval, self.keep_alive).start()

message_id = message.get('id')
if message_id is not None and message_id in self.data_handlers:
del self.data_handlers[message_id]
else:
_LOGGER.warning(f'Could not delete keep alive handler with message ID {message_id}.')

request_data = {
"timeout": self.keep_alive_interval,
"action": True
Expand Down

0 comments on commit b71573d

Please sign in to comment.