From 3f14f5cb9e9c13dae13055c601bd7309594a35c9 Mon Sep 17 00:00:00 2001 From: Stefan Allius Date: Thu, 5 Oct 2023 18:47:59 +0200 Subject: [PATCH 01/12] make class Proxy to a derivation of class AsyncStream --- app/src/async_stream.py | 59 +++++++++----------------------- app/src/proxy.py | 74 +++++++++++++++++++++++++++++++---------- 2 files changed, 73 insertions(+), 60 deletions(-) diff --git a/app/src/async_stream.py b/app/src/async_stream.py index 9407948..1d0c7ea 100644 --- a/app/src/async_stream.py +++ b/app/src/async_stream.py @@ -1,22 +1,18 @@ -import logging, traceback, aiomqtt, json +import logging, traceback from config import Config from messages import Message, hex_dump_memory -from mqtt import Mqtt logger = logging.getLogger('conn') -logger_mqtt = logging.getLogger('mqtt') class AsyncStream(Message): - def __init__(self, proxy, reader, writer, addr, stream=None, server_side=True): + def __init__(self, reader, writer, addr, remote_stream, server_side: bool) -> None: super().__init__() - self.proxy = proxy self.reader = reader self.writer = writer - self.remoteStream = stream - self.addr = addr + self.remoteStream = remote_stream self.server_side = server_side - self.mqtt = Mqtt() + self.addr = addr self.unique_id = 0 self.node_id = '' @@ -24,26 +20,26 @@ class AsyncStream(Message): Our puplic methods ''' def set_serial_no(self, serial_no : str): - logger_mqtt.info(f'SerialNo: {serial_no}') + logger.info(f'SerialNo: {serial_no}') if self.unique_id != serial_no: inverters = Config.get('inverters') - #logger_mqtt.debug(f'Inverters: {inverters}') + #logger.debug(f'Inverters: {inverters}') if serial_no in inverters: - logger_mqtt.debug(f'SerialNo {serial_no} allowed!') + logger.debug(f'SerialNo {serial_no} allowed!') inv = inverters[serial_no] self.node_id = inv['node_id'] self.sug_area = inv['suggested_area'] else: - logger_mqtt.debug(f'SerialNo {serial_no} not known!') + logger.debug(f'SerialNo {serial_no} not known!') self.node_id = '' self.sug_area = '' if not inverters['allow_all']: self.unique_id = None - logger_mqtt.error('ignore message from unknow inverter!') + logger.error('ignore message from unknow inverter!') return self.unique_id = serial_no @@ -53,18 +49,6 @@ class AsyncStream(Message): self.discovery_prfx = ha['discovery_prefix'] + '/' - async def register_home_assistant(self): - - if self.server_side: - try: - for data_json, component, id in self.db.ha_confs(self.entitiy_prfx + self.node_id, self.unique_id, self.sug_area): - logger_mqtt.debug(f'Register: {data_json}') - await self.mqtt.publish(f"{self.discovery_prfx}{component}/{self.node_id}{id}/config", data_json) - - except Exception: - logging.error( - f"Proxy: Exception:\n" - f"{traceback.format_exc()}") async def loop(self) -> None: @@ -79,7 +63,7 @@ class AsyncStream(Message): if self.unique_id: await self.__async_write() await self.__async_forward() - await self.__async_publ_mqtt() + await self.async_publ_mqtt() except (ConnectionResetError, @@ -104,7 +88,6 @@ class AsyncStream(Message): logger.debug(f'in AsyncStream.close() {self.addr}') self.writer.close() super().close() # call close handler in the parent class - self.proxy = None # clear our refernce to the proxy, to avoid memory leaks if self.remoteStream: # if we have knowledge about a remote stream, we del the references between the two streams self.remoteStream.remoteStream = None @@ -132,8 +115,7 @@ class AsyncStream(Message): async def __async_forward(self) -> None: if self._forward_buffer: if not self.remoteStream: - tsun = Config.get('tsun') - self.remoteStream = await self.proxy.CreateClientStream (self, tsun['host'], tsun['port']) + await self.async_create_remote() # only implmeneted for server side => syncServerStream if self.remoteStream: hex_dump_memory(logging.DEBUG, f'Forward to {self.remoteStream.addr}:', self._forward_buffer, len(self._forward_buffer)) @@ -141,24 +123,15 @@ class AsyncStream(Message): await self.remoteStream.writer.drain() self._forward_buffer = bytearray(0) - async def __async_publ_mqtt(self) -> None: - if self.server_side: - db = self.db.db + async def async_create_remote(self) -> None: + pass - # check if new inverter or collector infos are available or when the home assistant has changed the status back to online - if (self.new_data.keys() & {'inverter', 'collector'}) or self.mqtt.home_assistant_restarted: - await self.register_home_assistant() - self.mqtt.home_assistant_restarted = False # clear flag + async def async_publ_mqtt(self) -> None: + pass - for key in self.new_data: - if self.new_data[key] and key in db: - data_json = json.dumps(db[key]) - logger_mqtt.info(f'{key}: {data_json}') - await self.mqtt.publish(f"{self.entitiy_prfx}{self.node_id}{key}", data_json) - self.new_data[key] = False def __del__ (self): logger.debug ("AsyncStream __del__") super().__del__() - + diff --git a/app/src/proxy.py b/app/src/proxy.py index 41b81ea..a1d165e 100644 --- a/app/src/proxy.py +++ b/app/src/proxy.py @@ -1,42 +1,52 @@ -import asyncio, logging, traceback +import asyncio, logging, traceback, json +from config import Config from async_stream import AsyncStream +from mqtt import Mqtt -class Proxy: - def __init__ (proxy, reader, writer, addr): - proxy.ServerStream = AsyncStream(proxy, reader, writer, addr) - proxy.ClientStream = None +logger = logging.getLogger('conn') - async def server_loop(proxy, addr): + + +class Proxy(AsyncStream): + + def __init__ (self, reader, writer, addr): + super().__init__(reader, writer, addr, None, True) + self.mqtt = Mqtt() + + async def server_loop(self, addr): '''Loop for receiving messages from the inverter (server-side)''' logging.info(f'Accept connection from {addr}') - await proxy.ServerStream.loop() + await self.loop() logging.info(f'Server loop stopped for {addr}') # if the server connection closes, we also disconnect the connection to te TSUN cloud - if proxy.ClientStream: + if self.remoteStream: logging.debug ("disconnect client connection") - proxy.ClientStream.disc() + self.remoteStream.disc() - async def client_loop(proxy, addr): + async def client_loop(self, addr): '''Loop for receiving messages from the TSUN cloud (client-side)''' - await proxy.ClientStream.loop() + await self.remoteStream.loop() logging.info(f'Client loop stopped for {addr}') # if the client connection closes, we don't touch the server connection. Instead we erase the client # connection stream, thus on the next received packet from the inverter, we can establish a new connection # to the TSUN cloud - proxy.ClientStream = None + self.remoteStream = None - async def CreateClientStream (proxy, stream, host, port): + async def async_create_remote(self) -> None: '''Establish a client connection to the TSUN cloud''' + tsun = Config.get('tsun') + host = tsun['host'] + port = tsun['port'] addr = (host, port) try: logging.info(f'Connected to {addr}') connect = asyncio.open_connection(host, port) reader, writer = await connect - proxy.ClientStream = AsyncStream(proxy, reader, writer, addr, stream, server_side=False) - asyncio.create_task(proxy.client_loop(addr)) + self.remoteStream = AsyncStream(reader, writer, addr, self, False) + asyncio.create_task(self.client_loop(addr)) except ConnectionRefusedError as error: logging.info(f'{error}') @@ -44,7 +54,37 @@ class Proxy: logging.error( f"Proxy: Exception for {addr}:\n" f"{traceback.format_exc()}") - return proxy.ClientStream + + + async def async_publ_mqtt(self) -> None: + db = self.db.db + # check if new inverter or collector infos are available or when the home assistant has changed the status back to online + if (self.new_data.keys() & {'inverter', 'collector'}): #or self.mqtt.home_assistant_restarted: + await self.__register_home_assistant() + #self.mqtt.home_assistant_restarted = False # clear flag + for key in self.new_data: + if self.new_data[key] and key in db: + data_json = json.dumps(db[key]) + logger.info(f'{key}: {data_json}') + await self.mqtt.publish(f"{self.entitiy_prfx}{self.node_id}{key}", data_json) + self.new_data[key] = False + + async def __register_home_assistant(self): + try: + for data_json, component, id in self.db.ha_confs(self.entitiy_prfx + self.node_id, self.unique_id, self.sug_area): + logger.debug(f'MQTT Register: {data_json}') + await self.mqtt.publish(f"{self.discovery_prfx}{component}/{self.node_id}{id}/config", data_json) + except Exception: + logging.error( + f"Proxy: Exception:\n" + f"{traceback.format_exc()}") + + def close(self): + logger.debug(f'in AsyncServerStream.close() {self.addr}') + super().close() # call close handler in the parent class + + def __del__ (proxy): - logging.debug ("Proxy __del__") \ No newline at end of file + logging.debug ("Proxy __del__") + super().__del__() From 066459f14e623eedeba521e5e5fc6653f44d8c5b Mon Sep 17 00:00:00 2001 From: Stefan Allius Date: Thu, 5 Oct 2023 19:34:10 +0200 Subject: [PATCH 02/12] rename class Proxy to Inverter --- app/src/async_stream.py | 3 --- app/src/{proxy.py => inverter.py} | 25 ++++++++++++++++--------- app/src/mqtt.py | 16 ++++++++++++---- app/src/server.py | 5 ++--- 4 files changed, 30 insertions(+), 19 deletions(-) rename app/src/{proxy.py => inverter.py} (82%) diff --git a/app/src/async_stream.py b/app/src/async_stream.py index 1d0c7ea..e741a75 100644 --- a/app/src/async_stream.py +++ b/app/src/async_stream.py @@ -44,9 +44,6 @@ class AsyncStream(Message): self.unique_id = serial_no - ha = Config.get('ha') - self.entitiy_prfx = ha['entity_prefix'] + '/' - self.discovery_prfx = ha['discovery_prefix'] + '/' diff --git a/app/src/proxy.py b/app/src/inverter.py similarity index 82% rename from app/src/proxy.py rename to app/src/inverter.py index a1d165e..c2a7f80 100644 --- a/app/src/proxy.py +++ b/app/src/inverter.py @@ -7,11 +7,16 @@ logger = logging.getLogger('conn') -class Proxy(AsyncStream): +class Inverter(AsyncStream): def __init__ (self, reader, writer, addr): super().__init__(reader, writer, addr, None, True) self.mqtt = Mqtt() + self.ha_restarts = 0 + ha = Config.get('ha') + self.entitiy_prfx = ha['entity_prefix'] + '/' + self.discovery_prfx = ha['discovery_prefix'] + '/' + async def server_loop(self, addr): '''Loop for receiving messages from the inverter (server-side)''' @@ -19,7 +24,7 @@ class Proxy(AsyncStream): await self.loop() logging.info(f'Server loop stopped for {addr}') - # if the server connection closes, we also disconnect the connection to te TSUN cloud + # if the server connection closes, we also have to disconnect the connection to te TSUN cloud if self.remoteStream: logging.debug ("disconnect client connection") self.remoteStream.disc() @@ -52,7 +57,7 @@ class Proxy(AsyncStream): logging.info(f'{error}') except Exception: logging.error( - f"Proxy: Exception for {addr}:\n" + f"Inverter: Exception for {addr}:\n" f"{traceback.format_exc()}") @@ -60,9 +65,10 @@ class Proxy(AsyncStream): async def async_publ_mqtt(self) -> None: db = self.db.db # check if new inverter or collector infos are available or when the home assistant has changed the status back to online - if (self.new_data.keys() & {'inverter', 'collector'}): #or self.mqtt.home_assistant_restarted: + if (self.new_data.keys() & {'inverter', 'collector'}) or self.mqtt.ha_restarts != self.ha_restarts: await self.__register_home_assistant() - #self.mqtt.home_assistant_restarted = False # clear flag + self.ha_restarts = self.mqtt.ha_restarts + for key in self.new_data: if self.new_data[key] and key in db: data_json = json.dumps(db[key]) @@ -70,14 +76,15 @@ class Proxy(AsyncStream): await self.mqtt.publish(f"{self.entitiy_prfx}{self.node_id}{key}", data_json) self.new_data[key] = False - async def __register_home_assistant(self): + async def __register_home_assistant(self): + try: for data_json, component, id in self.db.ha_confs(self.entitiy_prfx + self.node_id, self.unique_id, self.sug_area): logger.debug(f'MQTT Register: {data_json}') await self.mqtt.publish(f"{self.discovery_prfx}{component}/{self.node_id}{id}/config", data_json) except Exception: logging.error( - f"Proxy: Exception:\n" + f"Inverter: Exception:\n" f"{traceback.format_exc()}") def close(self): @@ -85,6 +92,6 @@ class Proxy(AsyncStream): super().close() # call close handler in the parent class - def __del__ (proxy): - logging.debug ("Proxy __del__") + def __del__ (self): + logging.debug ("Inverter __del__") super().__del__() diff --git a/app/src/mqtt.py b/app/src/mqtt.py index 0387f50..b136411 100644 --- a/app/src/mqtt.py +++ b/app/src/mqtt.py @@ -16,14 +16,22 @@ class Singleton(type): class Mqtt(metaclass=Singleton): client = None - + def __init__(self): logger_mqtt.debug(f'MQTT: __init__') loop = asyncio.get_event_loop() self.task = loop.create_task(self.__loop()) - self.home_assistant_restarted = False - + self.ha_restarts = 0 + + + @property + def ha_restarts(self): + return self._ha_restarts + @ha_restarts.setter + def ha_restarts(self, value): + self._ha_restarts = value + def __del__(self): logger_mqtt.debug(f'MQTT: __del__') @@ -59,7 +67,7 @@ class Mqtt(metaclass=Singleton): status = message.payload.decode("UTF-8") logger_mqtt.info(f'Home-Assistant Status: {status}') if status == 'online': - self.home_assistant_restarted = True # set flag to force MQTT registering + self.restarts += 1 except aiomqtt.MqttError: logger_mqtt.info(f"Connection lost; Reconnecting in {interval} seconds ...") diff --git a/app/src/server.py b/app/src/server.py index 99abc53..01a1577 100644 --- a/app/src/server.py +++ b/app/src/server.py @@ -1,8 +1,7 @@ import logging, asyncio, signal, functools, os -#from logging.handlers import TimedRotatingFileHandler from logging import config from async_stream import AsyncStream -from proxy import Proxy +from inverter import Inverter from config import Config from mqtt import Mqtt @@ -11,7 +10,7 @@ async def handle_client(reader, writer): '''Handles a new incoming connection and starts an async loop''' addr = writer.get_extra_info('peername') - await Proxy(reader, writer, addr).server_loop(addr) + await Inverter(reader, writer, addr).server_loop(addr) def handle_SIGTERM(loop): From 1871f6c8d2af48c31bd533e67a0e208d89f0f71a Mon Sep 17 00:00:00 2001 From: Stefan Allius Date: Thu, 5 Oct 2023 21:25:02 +0200 Subject: [PATCH 03/12] change owner id during startup --- app/Dockerfile | 6 ------ app/entrypoint.sh | 14 ++++++++++---- 2 files changed, 10 insertions(+), 10 deletions(-) diff --git a/app/Dockerfile b/app/Dockerfile index ddcfc42..f90177f 100644 --- a/app/Dockerfile +++ b/app/Dockerfile @@ -37,12 +37,6 @@ ENV SERVICE_NAME=$SERVICE_NAME ENV UID=$UID ENV GID=$GID - - -RUN addgroup --gid $GID $SERVICE_NAME && \ - adduser --ingroup $SERVICE_NAME --shell /bin/false --disabled-password --uid $UID $SERVICE_NAME && \ - mkdir -p /home/$SERVICE_NAME/log /home/$SERVICE_NAME/config && \ - chown -R $SERVICE_NAME:$SERVICE_NAME /home/$SERVICE_NAME # set the working directory in the container WORKDIR /home/$SERVICE_NAME diff --git a/app/entrypoint.sh b/app/entrypoint.sh index 6c51ae2..8360c44 100644 --- a/app/entrypoint.sh +++ b/app/entrypoint.sh @@ -3,12 +3,18 @@ set -e user="$(id -u)" echo "######################################################" -echo "# start: '$SERVICE_NAME' Version:$VERSION" -echo "# with UserID:$UID, GroupID:$GID" -echo "######################################################" +echo "# prepare: '$SERVICE_NAME' Version:$VERSION" +echo "# for running with UserID:$UID, GroupID:$GID" +echo "#" if [ "$user" = '0' ]; then - [ -d "/home/$SERVICE_NAME" ] && chown -R $SERVICE_NAME:$SERVICE_NAME /home/$SERVICE_NAME || true + mkdir -p /home/$SERVICE_NAME/log /home/$SERVICE_NAME/config + addgroup --gid $GID $SERVICE_NAME 2> /dev/null + adduser --ingroup $SERVICE_NAME --shell /bin/false --disabled-password --no-create-home --comment "" --uid $UID $SERVICE_NAME + chown -R $SERVICE_NAME:$SERVICE_NAME /home/$SERVICE_NAME || true + echo "######################################################" + echo "#" + exec gosu $SERVICE_NAME "$@" else exec "$@" From a48394d057b6892737eaa9ea477fc3b15a71c0a9 Mon Sep 17 00:00:00 2001 From: Stefan Allius Date: Thu, 5 Oct 2023 21:52:26 +0200 Subject: [PATCH 04/12] fix crash on container restart --- app/entrypoint.sh | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/app/entrypoint.sh b/app/entrypoint.sh index 8360c44..7935f3b 100644 --- a/app/entrypoint.sh +++ b/app/entrypoint.sh @@ -9,8 +9,13 @@ echo "#" if [ "$user" = '0' ]; then mkdir -p /home/$SERVICE_NAME/log /home/$SERVICE_NAME/config - addgroup --gid $GID $SERVICE_NAME 2> /dev/null - adduser --ingroup $SERVICE_NAME --shell /bin/false --disabled-password --no-create-home --comment "" --uid $UID $SERVICE_NAME + + if id $SERVICE_NAME ; then + echo "user still exists" + else + addgroup --gid $GID $SERVICE_NAME 2> /dev/null + adduser --ingroup $SERVICE_NAME --shell /bin/false --disabled-password --no-create-home --comment "" --uid $UID $SERVICE_NAME + fi chown -R $SERVICE_NAME:$SERVICE_NAME /home/$SERVICE_NAME || true echo "######################################################" echo "#" From 19cbd5a041e242671bbe084ee610f0696425e4cf Mon Sep 17 00:00:00 2001 From: Stefan Allius Date: Thu, 5 Oct 2023 22:48:20 +0200 Subject: [PATCH 05/12] fix memory leak, improve traces --- app/src/async_stream.py | 7 +++---- app/src/inverter.py | 18 +++++++++++------- 2 files changed, 14 insertions(+), 11 deletions(-) diff --git a/app/src/async_stream.py b/app/src/async_stream.py index e741a75..4d3cb04 100644 --- a/app/src/async_stream.py +++ b/app/src/async_stream.py @@ -1,5 +1,6 @@ import logging, traceback from config import Config +#import gc from messages import Message, hex_dump_memory logger = logging.getLogger('conn') @@ -86,9 +87,7 @@ class AsyncStream(Message): self.writer.close() super().close() # call close handler in the parent class - if self.remoteStream: # if we have knowledge about a remote stream, we del the references between the two streams - self.remoteStream.remoteStream = None - self.remoteStream = None +# logger.info (f'AsyncStream refs: {gc.get_referrers(self)}') ''' @@ -128,7 +127,7 @@ class AsyncStream(Message): def __del__ (self): - logger.debug ("AsyncStream __del__") + logging.debug (f"AsyncStream.__del__ {self.addr}") super().__del__() diff --git a/app/src/inverter.py b/app/src/inverter.py index c2a7f80..ce6a59d 100644 --- a/app/src/inverter.py +++ b/app/src/inverter.py @@ -2,6 +2,7 @@ import asyncio, logging, traceback, json from config import Config from async_stream import AsyncStream from mqtt import Mqtt +#import gc logger = logging.getLogger('conn') @@ -20,7 +21,7 @@ class Inverter(AsyncStream): async def server_loop(self, addr): '''Loop for receiving messages from the inverter (server-side)''' - logging.info(f'Accept connection from {addr}') + logger.info(f'Accept connection from {addr}') await self.loop() logging.info(f'Server loop stopped for {addr}') @@ -37,7 +38,8 @@ class Inverter(AsyncStream): # if the client connection closes, we don't touch the server connection. Instead we erase the client # connection stream, thus on the next received packet from the inverter, we can establish a new connection # to the TSUN cloud - self.remoteStream = None + self.remoteStream.remoteStream = None # erase backlink to inverter instance + self.remoteStream = None # than erase client connection async def async_create_remote(self) -> None: '''Establish a client connection to the TSUN cloud''' @@ -63,6 +65,7 @@ class Inverter(AsyncStream): async def async_publ_mqtt(self) -> None: + '''puplish data to MQTT broker''' db = self.db.db # check if new inverter or collector infos are available or when the home assistant has changed the status back to online if (self.new_data.keys() & {'inverter', 'collector'}) or self.mqtt.ha_restarts != self.ha_restarts: @@ -76,8 +79,8 @@ class Inverter(AsyncStream): await self.mqtt.publish(f"{self.entitiy_prfx}{self.node_id}{key}", data_json) self.new_data[key] = False - async def __register_home_assistant(self): - + async def __register_home_assistant(self) -> None: + '''register all our topics at home assistant''' try: for data_json, component, id in self.db.ha_confs(self.entitiy_prfx + self.node_id, self.unique_id, self.sug_area): logger.debug(f'MQTT Register: {data_json}') @@ -87,11 +90,12 @@ class Inverter(AsyncStream): f"Inverter: Exception:\n" f"{traceback.format_exc()}") - def close(self): - logger.debug(f'in AsyncServerStream.close() {self.addr}') + def close(self) -> None: + logging.debug(f'Inverter.close() {self.addr}') super().close() # call close handler in the parent class +# logger.debug (f'Inverter refs: {gc.get_referrers(self)}') def __del__ (self): - logging.debug ("Inverter __del__") + logging.debug ("Inverter.__del__") super().__del__() From f38047c9315199a9feb85c199421acfd2f494e3d Mon Sep 17 00:00:00 2001 From: Stefan Allius Date: Thu, 5 Oct 2023 22:51:52 +0200 Subject: [PATCH 06/12] update changelog --- CHANGELOG.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 269ff2c..485014a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,10 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [Unreleased] +- refactoring of the connection classes +- change user id on startup +- register MQTT topics to home assistant, even if we have multiple inverters + ## [0.0.6] - 2023-10-03 - Bump aiomqtt to version 1.2.1 From 0043e4c147e6a5fa6fca1705be9b2e57bcecefca Mon Sep 17 00:00:00 2001 From: Stefan Allius Date: Fri, 6 Oct 2023 19:35:41 +0200 Subject: [PATCH 07/12] fix typo --- app/src/mqtt.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/app/src/mqtt.py b/app/src/mqtt.py index b136411..a0da909 100644 --- a/app/src/mqtt.py +++ b/app/src/mqtt.py @@ -67,7 +67,7 @@ class Mqtt(metaclass=Singleton): status = message.payload.decode("UTF-8") logger_mqtt.info(f'Home-Assistant Status: {status}') if status == 'online': - self.restarts += 1 + self.ha_restarts += 1 except aiomqtt.MqttError: logger_mqtt.info(f"Connection lost; Reconnecting in {interval} seconds ...") From c791395e0e22f75aecf065477b96c69e8c2fb16b Mon Sep 17 00:00:00 2001 From: Stefan Allius Date: Fri, 6 Oct 2023 19:59:47 +0200 Subject: [PATCH 08/12] remove obsolete logging messages --- app/src/messages.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/app/src/messages.py b/app/src/messages.py index 16f0d14..e9a5838 100644 --- a/app/src/messages.py +++ b/app/src/messages.py @@ -101,7 +101,6 @@ class Message(metaclass=IterRegistry): Our puplic methods ''' def close(self) -> None: - logger.debug(f'in Message.close()') # we have refernces to methods of this class in self.switch # so we have to erase self.switch, otherwise this instance can't be # deallocated by the garbage collector ==> we get a memory leak @@ -295,11 +294,9 @@ class Message(metaclass=IterRegistry): def msg_unknown(self): + logger.error (f"Unknow Msg: ID:{self.msg_id}") self.forward(self._recv_buffer, self.header_len+self.data_len) - - def __del__ (self): - logger.debug ("Messages __del__") From 3db643cb87acd1714044796b9492023530d60e08 Mon Sep 17 00:00:00 2001 From: Stefan Allius Date: Fri, 6 Oct 2023 20:02:38 +0200 Subject: [PATCH 09/12] send ha registrations only if values have changed --- app/src/inverter.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/app/src/inverter.py b/app/src/inverter.py index ce6a59d..cdea435 100644 --- a/app/src/inverter.py +++ b/app/src/inverter.py @@ -68,7 +68,9 @@ class Inverter(AsyncStream): '''puplish data to MQTT broker''' db = self.db.db # check if new inverter or collector infos are available or when the home assistant has changed the status back to online - if (self.new_data.keys() & {'inverter', 'collector'}) or self.mqtt.ha_restarts != self.ha_restarts: + if (('inverter' in self.new_data and self.new_data['inverter']) or + ('collector' in self.new_data and self.new_data['collector']) or + self.mqtt.ha_restarts != self.ha_restarts): await self.__register_home_assistant() self.ha_restarts = self.mqtt.ha_restarts @@ -83,7 +85,7 @@ class Inverter(AsyncStream): '''register all our topics at home assistant''' try: for data_json, component, id in self.db.ha_confs(self.entitiy_prfx + self.node_id, self.unique_id, self.sug_area): - logger.debug(f'MQTT Register: {data_json}') + #logger.debug(f'MQTT Register: {data_json}') await self.mqtt.publish(f"{self.discovery_prfx}{component}/{self.node_id}{id}/config", data_json) except Exception: logging.error( From bee25a5f13351eab4b47ed68d83f5a7499d5e3d5 Mon Sep 17 00:00:00 2001 From: Stefan Allius Date: Fri, 6 Oct 2023 22:59:57 +0200 Subject: [PATCH 10/12] disable DEBUG logging for optimzed images --- app/src/server.py | 1 + 1 file changed, 1 insertion(+) diff --git a/app/src/server.py b/app/src/server.py index 01a1577..727004f 100644 --- a/app/src/server.py +++ b/app/src/server.py @@ -45,6 +45,7 @@ if __name__ == "__main__": logging.config.fileConfig('logging.ini') logging.info(f'Server "{serv_name} - {version}" will be started') + logging.getLogger().setLevel(logging.DEBUG if __debug__ else logging.INFO) # read config file Config.read() From 6816a3e027f23f7aa158e9ad4a25afd07e6ec6b2 Mon Sep 17 00:00:00 2001 From: Stefan Allius Date: Fri, 6 Oct 2023 23:11:12 +0200 Subject: [PATCH 11/12] remove empty method Messages.__del__ --- app/src/async_stream.py | 1 - 1 file changed, 1 deletion(-) diff --git a/app/src/async_stream.py b/app/src/async_stream.py index 4d3cb04..d216550 100644 --- a/app/src/async_stream.py +++ b/app/src/async_stream.py @@ -128,6 +128,5 @@ class AsyncStream(Message): def __del__ (self): logging.debug (f"AsyncStream.__del__ {self.addr}") - super().__del__() From 271b4f876e8b9f94196e0f0bcb8c76a39eab3ad8 Mon Sep 17 00:00:00 2001 From: Stefan Allius Date: Fri, 6 Oct 2023 23:12:06 +0200 Subject: [PATCH 12/12] Version 0.1.0 --- CHANGELOG.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 485014a..717d75b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [Unreleased] +## [0.1.0] - 2023-10-06 + - refactoring of the connection classes - change user id on startup - register MQTT topics to home assistant, even if we have multiple inverters