diff --git a/CHANGELOG.md b/CHANGELOG.md index 269ff2c..717d75b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,12 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [Unreleased] +## [0.1.0] - 2023-10-06 + +- refactoring of the connection classes +- change user id on startup +- register MQTT topics to home assistant, even if we have multiple inverters + ## [0.0.6] - 2023-10-03 - Bump aiomqtt to version 1.2.1 diff --git a/app/Dockerfile b/app/Dockerfile index ddcfc42..f90177f 100644 --- a/app/Dockerfile +++ b/app/Dockerfile @@ -37,12 +37,6 @@ ENV SERVICE_NAME=$SERVICE_NAME ENV UID=$UID ENV GID=$GID - - -RUN addgroup --gid $GID $SERVICE_NAME && \ - adduser --ingroup $SERVICE_NAME --shell /bin/false --disabled-password --uid $UID $SERVICE_NAME && \ - mkdir -p /home/$SERVICE_NAME/log /home/$SERVICE_NAME/config && \ - chown -R $SERVICE_NAME:$SERVICE_NAME /home/$SERVICE_NAME # set the working directory in the container WORKDIR /home/$SERVICE_NAME diff --git a/app/entrypoint.sh b/app/entrypoint.sh index 6c51ae2..7935f3b 100644 --- a/app/entrypoint.sh +++ b/app/entrypoint.sh @@ -3,12 +3,23 @@ set -e user="$(id -u)" echo "######################################################" -echo "# start: '$SERVICE_NAME' Version:$VERSION" -echo "# with UserID:$UID, GroupID:$GID" -echo "######################################################" +echo "# prepare: '$SERVICE_NAME' Version:$VERSION" +echo "# for running with UserID:$UID, GroupID:$GID" +echo "#" if [ "$user" = '0' ]; then - [ -d "/home/$SERVICE_NAME" ] && chown -R $SERVICE_NAME:$SERVICE_NAME /home/$SERVICE_NAME || true + mkdir -p /home/$SERVICE_NAME/log /home/$SERVICE_NAME/config + + if id $SERVICE_NAME ; then + echo "user still exists" + else + addgroup --gid $GID $SERVICE_NAME 2> /dev/null + adduser --ingroup $SERVICE_NAME --shell /bin/false --disabled-password --no-create-home --comment "" --uid $UID $SERVICE_NAME + fi + chown -R $SERVICE_NAME:$SERVICE_NAME /home/$SERVICE_NAME || true + echo "######################################################" + echo "#" + exec gosu $SERVICE_NAME "$@" else exec "$@" diff --git a/app/src/async_stream.py b/app/src/async_stream.py index 9407948..d216550 100644 --- a/app/src/async_stream.py +++ b/app/src/async_stream.py @@ -1,22 +1,19 @@ -import logging, traceback, aiomqtt, json +import logging, traceback from config import Config +#import gc from messages import Message, hex_dump_memory -from mqtt import Mqtt logger = logging.getLogger('conn') -logger_mqtt = logging.getLogger('mqtt') class AsyncStream(Message): - def __init__(self, proxy, reader, writer, addr, stream=None, server_side=True): + def __init__(self, reader, writer, addr, remote_stream, server_side: bool) -> None: super().__init__() - self.proxy = proxy self.reader = reader self.writer = writer - self.remoteStream = stream - self.addr = addr + self.remoteStream = remote_stream self.server_side = server_side - self.mqtt = Mqtt() + self.addr = addr self.unique_id = 0 self.node_id = '' @@ -24,47 +21,32 @@ class AsyncStream(Message): Our puplic methods ''' def set_serial_no(self, serial_no : str): - logger_mqtt.info(f'SerialNo: {serial_no}') + logger.info(f'SerialNo: {serial_no}') if self.unique_id != serial_no: inverters = Config.get('inverters') - #logger_mqtt.debug(f'Inverters: {inverters}') + #logger.debug(f'Inverters: {inverters}') if serial_no in inverters: - logger_mqtt.debug(f'SerialNo {serial_no} allowed!') + logger.debug(f'SerialNo {serial_no} allowed!') inv = inverters[serial_no] self.node_id = inv['node_id'] self.sug_area = inv['suggested_area'] else: - logger_mqtt.debug(f'SerialNo {serial_no} not known!') + logger.debug(f'SerialNo {serial_no} not known!') self.node_id = '' self.sug_area = '' if not inverters['allow_all']: self.unique_id = None - logger_mqtt.error('ignore message from unknow inverter!') + logger.error('ignore message from unknow inverter!') return self.unique_id = serial_no - ha = Config.get('ha') - self.entitiy_prfx = ha['entity_prefix'] + '/' - self.discovery_prfx = ha['discovery_prefix'] + '/' - async def register_home_assistant(self): - - if self.server_side: - try: - for data_json, component, id in self.db.ha_confs(self.entitiy_prfx + self.node_id, self.unique_id, self.sug_area): - logger_mqtt.debug(f'Register: {data_json}') - await self.mqtt.publish(f"{self.discovery_prfx}{component}/{self.node_id}{id}/config", data_json) - - except Exception: - logging.error( - f"Proxy: Exception:\n" - f"{traceback.format_exc()}") async def loop(self) -> None: @@ -79,7 +61,7 @@ class AsyncStream(Message): if self.unique_id: await self.__async_write() await self.__async_forward() - await self.__async_publ_mqtt() + await self.async_publ_mqtt() except (ConnectionResetError, @@ -104,11 +86,8 @@ class AsyncStream(Message): logger.debug(f'in AsyncStream.close() {self.addr}') self.writer.close() super().close() # call close handler in the parent class - self.proxy = None # clear our refernce to the proxy, to avoid memory leaks - if self.remoteStream: # if we have knowledge about a remote stream, we del the references between the two streams - self.remoteStream.remoteStream = None - self.remoteStream = None +# logger.info (f'AsyncStream refs: {gc.get_referrers(self)}') ''' @@ -132,8 +111,7 @@ class AsyncStream(Message): async def __async_forward(self) -> None: if self._forward_buffer: if not self.remoteStream: - tsun = Config.get('tsun') - self.remoteStream = await self.proxy.CreateClientStream (self, tsun['host'], tsun['port']) + await self.async_create_remote() # only implmeneted for server side => syncServerStream if self.remoteStream: hex_dump_memory(logging.DEBUG, f'Forward to {self.remoteStream.addr}:', self._forward_buffer, len(self._forward_buffer)) @@ -141,24 +119,14 @@ class AsyncStream(Message): await self.remoteStream.writer.drain() self._forward_buffer = bytearray(0) - async def __async_publ_mqtt(self) -> None: - if self.server_side: - db = self.db.db + async def async_create_remote(self) -> None: + pass - # check if new inverter or collector infos are available or when the home assistant has changed the status back to online - if (self.new_data.keys() & {'inverter', 'collector'}) or self.mqtt.home_assistant_restarted: - await self.register_home_assistant() - self.mqtt.home_assistant_restarted = False # clear flag + async def async_publ_mqtt(self) -> None: + pass - for key in self.new_data: - if self.new_data[key] and key in db: - data_json = json.dumps(db[key]) - logger_mqtt.info(f'{key}: {data_json}') - await self.mqtt.publish(f"{self.entitiy_prfx}{self.node_id}{key}", data_json) - self.new_data[key] = False def __del__ (self): - logger.debug ("AsyncStream __del__") - super().__del__() - + logging.debug (f"AsyncStream.__del__ {self.addr}") + diff --git a/app/src/inverter.py b/app/src/inverter.py new file mode 100644 index 0000000..cdea435 --- /dev/null +++ b/app/src/inverter.py @@ -0,0 +1,103 @@ +import asyncio, logging, traceback, json +from config import Config +from async_stream import AsyncStream +from mqtt import Mqtt +#import gc + +logger = logging.getLogger('conn') + + + +class Inverter(AsyncStream): + + def __init__ (self, reader, writer, addr): + super().__init__(reader, writer, addr, None, True) + self.mqtt = Mqtt() + self.ha_restarts = 0 + ha = Config.get('ha') + self.entitiy_prfx = ha['entity_prefix'] + '/' + self.discovery_prfx = ha['discovery_prefix'] + '/' + + + async def server_loop(self, addr): + '''Loop for receiving messages from the inverter (server-side)''' + logger.info(f'Accept connection from {addr}') + await self.loop() + logging.info(f'Server loop stopped for {addr}') + + # if the server connection closes, we also have to disconnect the connection to te TSUN cloud + if self.remoteStream: + logging.debug ("disconnect client connection") + self.remoteStream.disc() + + async def client_loop(self, addr): + '''Loop for receiving messages from the TSUN cloud (client-side)''' + await self.remoteStream.loop() + logging.info(f'Client loop stopped for {addr}') + + # if the client connection closes, we don't touch the server connection. Instead we erase the client + # connection stream, thus on the next received packet from the inverter, we can establish a new connection + # to the TSUN cloud + self.remoteStream.remoteStream = None # erase backlink to inverter instance + self.remoteStream = None # than erase client connection + + async def async_create_remote(self) -> None: + '''Establish a client connection to the TSUN cloud''' + tsun = Config.get('tsun') + host = tsun['host'] + port = tsun['port'] + addr = (host, port) + + try: + logging.info(f'Connected to {addr}') + connect = asyncio.open_connection(host, port) + reader, writer = await connect + self.remoteStream = AsyncStream(reader, writer, addr, self, False) + asyncio.create_task(self.client_loop(addr)) + + except ConnectionRefusedError as error: + logging.info(f'{error}') + except Exception: + logging.error( + f"Inverter: Exception for {addr}:\n" + f"{traceback.format_exc()}") + + + + async def async_publ_mqtt(self) -> None: + '''puplish data to MQTT broker''' + db = self.db.db + # check if new inverter or collector infos are available or when the home assistant has changed the status back to online + if (('inverter' in self.new_data and self.new_data['inverter']) or + ('collector' in self.new_data and self.new_data['collector']) or + self.mqtt.ha_restarts != self.ha_restarts): + await self.__register_home_assistant() + self.ha_restarts = self.mqtt.ha_restarts + + for key in self.new_data: + if self.new_data[key] and key in db: + data_json = json.dumps(db[key]) + logger.info(f'{key}: {data_json}') + await self.mqtt.publish(f"{self.entitiy_prfx}{self.node_id}{key}", data_json) + self.new_data[key] = False + + async def __register_home_assistant(self) -> None: + '''register all our topics at home assistant''' + try: + for data_json, component, id in self.db.ha_confs(self.entitiy_prfx + self.node_id, self.unique_id, self.sug_area): + #logger.debug(f'MQTT Register: {data_json}') + await self.mqtt.publish(f"{self.discovery_prfx}{component}/{self.node_id}{id}/config", data_json) + except Exception: + logging.error( + f"Inverter: Exception:\n" + f"{traceback.format_exc()}") + + def close(self) -> None: + logging.debug(f'Inverter.close() {self.addr}') + super().close() # call close handler in the parent class +# logger.debug (f'Inverter refs: {gc.get_referrers(self)}') + + + def __del__ (self): + logging.debug ("Inverter.__del__") + super().__del__() diff --git a/app/src/messages.py b/app/src/messages.py index 16f0d14..e9a5838 100644 --- a/app/src/messages.py +++ b/app/src/messages.py @@ -101,7 +101,6 @@ class Message(metaclass=IterRegistry): Our puplic methods ''' def close(self) -> None: - logger.debug(f'in Message.close()') # we have refernces to methods of this class in self.switch # so we have to erase self.switch, otherwise this instance can't be # deallocated by the garbage collector ==> we get a memory leak @@ -295,11 +294,9 @@ class Message(metaclass=IterRegistry): def msg_unknown(self): + logger.error (f"Unknow Msg: ID:{self.msg_id}") self.forward(self._recv_buffer, self.header_len+self.data_len) - - def __del__ (self): - logger.debug ("Messages __del__") diff --git a/app/src/mqtt.py b/app/src/mqtt.py index 0387f50..a0da909 100644 --- a/app/src/mqtt.py +++ b/app/src/mqtt.py @@ -16,14 +16,22 @@ class Singleton(type): class Mqtt(metaclass=Singleton): client = None - + def __init__(self): logger_mqtt.debug(f'MQTT: __init__') loop = asyncio.get_event_loop() self.task = loop.create_task(self.__loop()) - self.home_assistant_restarted = False - + self.ha_restarts = 0 + + + @property + def ha_restarts(self): + return self._ha_restarts + @ha_restarts.setter + def ha_restarts(self, value): + self._ha_restarts = value + def __del__(self): logger_mqtt.debug(f'MQTT: __del__') @@ -59,7 +67,7 @@ class Mqtt(metaclass=Singleton): status = message.payload.decode("UTF-8") logger_mqtt.info(f'Home-Assistant Status: {status}') if status == 'online': - self.home_assistant_restarted = True # set flag to force MQTT registering + self.ha_restarts += 1 except aiomqtt.MqttError: logger_mqtt.info(f"Connection lost; Reconnecting in {interval} seconds ...") diff --git a/app/src/proxy.py b/app/src/proxy.py deleted file mode 100644 index 41b81ea..0000000 --- a/app/src/proxy.py +++ /dev/null @@ -1,50 +0,0 @@ -import asyncio, logging, traceback -from async_stream import AsyncStream - -class Proxy: - def __init__ (proxy, reader, writer, addr): - proxy.ServerStream = AsyncStream(proxy, reader, writer, addr) - proxy.ClientStream = None - - async def server_loop(proxy, addr): - '''Loop for receiving messages from the inverter (server-side)''' - logging.info(f'Accept connection from {addr}') - await proxy.ServerStream.loop() - logging.info(f'Server loop stopped for {addr}') - - # if the server connection closes, we also disconnect the connection to te TSUN cloud - if proxy.ClientStream: - logging.debug ("disconnect client connection") - proxy.ClientStream.disc() - - async def client_loop(proxy, addr): - '''Loop for receiving messages from the TSUN cloud (client-side)''' - await proxy.ClientStream.loop() - logging.info(f'Client loop stopped for {addr}') - - # if the client connection closes, we don't touch the server connection. Instead we erase the client - # connection stream, thus on the next received packet from the inverter, we can establish a new connection - # to the TSUN cloud - proxy.ClientStream = None - - async def CreateClientStream (proxy, stream, host, port): - '''Establish a client connection to the TSUN cloud''' - addr = (host, port) - - try: - logging.info(f'Connected to {addr}') - connect = asyncio.open_connection(host, port) - reader, writer = await connect - proxy.ClientStream = AsyncStream(proxy, reader, writer, addr, stream, server_side=False) - asyncio.create_task(proxy.client_loop(addr)) - - except ConnectionRefusedError as error: - logging.info(f'{error}') - except Exception: - logging.error( - f"Proxy: Exception for {addr}:\n" - f"{traceback.format_exc()}") - return proxy.ClientStream - - def __del__ (proxy): - logging.debug ("Proxy __del__") \ No newline at end of file diff --git a/app/src/server.py b/app/src/server.py index 99abc53..727004f 100644 --- a/app/src/server.py +++ b/app/src/server.py @@ -1,8 +1,7 @@ import logging, asyncio, signal, functools, os -#from logging.handlers import TimedRotatingFileHandler from logging import config from async_stream import AsyncStream -from proxy import Proxy +from inverter import Inverter from config import Config from mqtt import Mqtt @@ -11,7 +10,7 @@ async def handle_client(reader, writer): '''Handles a new incoming connection and starts an async loop''' addr = writer.get_extra_info('peername') - await Proxy(reader, writer, addr).server_loop(addr) + await Inverter(reader, writer, addr).server_loop(addr) def handle_SIGTERM(loop): @@ -46,6 +45,7 @@ if __name__ == "__main__": logging.config.fileConfig('logging.ini') logging.info(f'Server "{serv_name} - {version}" will be started') + logging.getLogger().setLevel(logging.DEBUG if __debug__ else logging.INFO) # read config file Config.read()